| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.0, |
| "eval_steps": 500, |
| "global_step": 76394, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.013090033248684452, |
| "grad_norm": 0.4367656707763672, |
| "learning_rate": 1.991273311167544e-05, |
| "loss": 1.32, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.026180066497368905, |
| "grad_norm": 0.39482298493385315, |
| "learning_rate": 1.9825466223350874e-05, |
| "loss": 0.0594, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.039270099746053354, |
| "grad_norm": 0.47824403643608093, |
| "learning_rate": 1.9738199335026312e-05, |
| "loss": 0.0497, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.05236013299473781, |
| "grad_norm": 0.3971627652645111, |
| "learning_rate": 1.965093244670175e-05, |
| "loss": 0.043, |
| "step": 2000 |
| }, |
| { |
| "epoch": 0.06545016624342226, |
| "grad_norm": 0.5000212788581848, |
| "learning_rate": 1.9563665558377188e-05, |
| "loss": 0.0374, |
| "step": 2500 |
| }, |
| { |
| "epoch": 0.07854019949210671, |
| "grad_norm": 0.9078471064567566, |
| "learning_rate": 1.9476398670052623e-05, |
| "loss": 0.0357, |
| "step": 3000 |
| }, |
| { |
| "epoch": 0.09163023274079116, |
| "grad_norm": 0.35205739736557007, |
| "learning_rate": 1.938913178172806e-05, |
| "loss": 0.0317, |
| "step": 3500 |
| }, |
| { |
| "epoch": 0.10472026598947562, |
| "grad_norm": 0.19642357528209686, |
| "learning_rate": 1.9301864893403495e-05, |
| "loss": 0.0304, |
| "step": 4000 |
| }, |
| { |
| "epoch": 0.11781029923816007, |
| "grad_norm": 0.5134682655334473, |
| "learning_rate": 1.9214598005078933e-05, |
| "loss": 0.0281, |
| "step": 4500 |
| }, |
| { |
| "epoch": 0.13090033248684452, |
| "grad_norm": 0.23212824761867523, |
| "learning_rate": 1.912733111675437e-05, |
| "loss": 0.0274, |
| "step": 5000 |
| }, |
| { |
| "epoch": 0.14399036573552898, |
| "grad_norm": 0.4254980981349945, |
| "learning_rate": 1.904006422842981e-05, |
| "loss": 0.0247, |
| "step": 5500 |
| }, |
| { |
| "epoch": 0.15708039898421342, |
| "grad_norm": 0.4024842083454132, |
| "learning_rate": 1.8952797340105244e-05, |
| "loss": 0.0235, |
| "step": 6000 |
| }, |
| { |
| "epoch": 0.17017043223289788, |
| "grad_norm": 0.40947026014328003, |
| "learning_rate": 1.8865530451780682e-05, |
| "loss": 0.0235, |
| "step": 6500 |
| }, |
| { |
| "epoch": 0.1832604654815823, |
| "grad_norm": 0.366158127784729, |
| "learning_rate": 1.877826356345612e-05, |
| "loss": 0.0225, |
| "step": 7000 |
| }, |
| { |
| "epoch": 0.19635049873026678, |
| "grad_norm": 0.4678824841976166, |
| "learning_rate": 1.8690996675131558e-05, |
| "loss": 0.0219, |
| "step": 7500 |
| }, |
| { |
| "epoch": 0.20944053197895124, |
| "grad_norm": 0.21004918217658997, |
| "learning_rate": 1.8603729786806996e-05, |
| "loss": 0.0198, |
| "step": 8000 |
| }, |
| { |
| "epoch": 0.22253056522763567, |
| "grad_norm": 0.2192375510931015, |
| "learning_rate": 1.851646289848243e-05, |
| "loss": 0.0201, |
| "step": 8500 |
| }, |
| { |
| "epoch": 0.23562059847632014, |
| "grad_norm": 0.15161575376987457, |
| "learning_rate": 1.842919601015787e-05, |
| "loss": 0.0192, |
| "step": 9000 |
| }, |
| { |
| "epoch": 0.24871063172500457, |
| "grad_norm": 0.3270639181137085, |
| "learning_rate": 1.8341929121833303e-05, |
| "loss": 0.0186, |
| "step": 9500 |
| }, |
| { |
| "epoch": 0.26180066497368903, |
| "grad_norm": 0.25447696447372437, |
| "learning_rate": 1.825466223350874e-05, |
| "loss": 0.0182, |
| "step": 10000 |
| }, |
| { |
| "epoch": 0.2748906982223735, |
| "grad_norm": 0.1797029674053192, |
| "learning_rate": 1.816739534518418e-05, |
| "loss": 0.0182, |
| "step": 10500 |
| }, |
| { |
| "epoch": 0.28798073147105796, |
| "grad_norm": 0.4422529637813568, |
| "learning_rate": 1.8080128456859617e-05, |
| "loss": 0.0175, |
| "step": 11000 |
| }, |
| { |
| "epoch": 0.30107076471974237, |
| "grad_norm": 0.19019261002540588, |
| "learning_rate": 1.7992861568535052e-05, |
| "loss": 0.0159, |
| "step": 11500 |
| }, |
| { |
| "epoch": 0.31416079796842683, |
| "grad_norm": 0.12262561917304993, |
| "learning_rate": 1.790559468021049e-05, |
| "loss": 0.0162, |
| "step": 12000 |
| }, |
| { |
| "epoch": 0.3272508312171113, |
| "grad_norm": 0.5514086484909058, |
| "learning_rate": 1.7818327791885924e-05, |
| "loss": 0.0159, |
| "step": 12500 |
| }, |
| { |
| "epoch": 0.34034086446579576, |
| "grad_norm": 0.3397659957408905, |
| "learning_rate": 1.7731060903561362e-05, |
| "loss": 0.0163, |
| "step": 13000 |
| }, |
| { |
| "epoch": 0.3534308977144802, |
| "grad_norm": 0.3542526364326477, |
| "learning_rate": 1.76437940152368e-05, |
| "loss": 0.0161, |
| "step": 13500 |
| }, |
| { |
| "epoch": 0.3665209309631646, |
| "grad_norm": 0.22688041627407074, |
| "learning_rate": 1.7556527126912238e-05, |
| "loss": 0.0155, |
| "step": 14000 |
| }, |
| { |
| "epoch": 0.3796109642118491, |
| "grad_norm": 0.20115447044372559, |
| "learning_rate": 1.7469260238587673e-05, |
| "loss": 0.0148, |
| "step": 14500 |
| }, |
| { |
| "epoch": 0.39270099746053355, |
| "grad_norm": 0.09773228317499161, |
| "learning_rate": 1.738199335026311e-05, |
| "loss": 0.0149, |
| "step": 15000 |
| }, |
| { |
| "epoch": 0.405791030709218, |
| "grad_norm": 0.41297146677970886, |
| "learning_rate": 1.729472646193855e-05, |
| "loss": 0.0146, |
| "step": 15500 |
| }, |
| { |
| "epoch": 0.4188810639579025, |
| "grad_norm": 0.6495208740234375, |
| "learning_rate": 1.7207459573613983e-05, |
| "loss": 0.0136, |
| "step": 16000 |
| }, |
| { |
| "epoch": 0.4319710972065869, |
| "grad_norm": 0.35930490493774414, |
| "learning_rate": 1.712019268528942e-05, |
| "loss": 0.0137, |
| "step": 16500 |
| }, |
| { |
| "epoch": 0.44506113045527135, |
| "grad_norm": 0.22953346371650696, |
| "learning_rate": 1.703292579696486e-05, |
| "loss": 0.0139, |
| "step": 17000 |
| }, |
| { |
| "epoch": 0.4581511637039558, |
| "grad_norm": 0.41518378257751465, |
| "learning_rate": 1.6945658908640297e-05, |
| "loss": 0.0135, |
| "step": 17500 |
| }, |
| { |
| "epoch": 0.4712411969526403, |
| "grad_norm": 0.2171572744846344, |
| "learning_rate": 1.6858392020315732e-05, |
| "loss": 0.0129, |
| "step": 18000 |
| }, |
| { |
| "epoch": 0.48433123020132474, |
| "grad_norm": 0.2897777557373047, |
| "learning_rate": 1.677112513199117e-05, |
| "loss": 0.0122, |
| "step": 18500 |
| }, |
| { |
| "epoch": 0.49742126345000914, |
| "grad_norm": 0.4209305942058563, |
| "learning_rate": 1.6683858243666608e-05, |
| "loss": 0.0128, |
| "step": 19000 |
| }, |
| { |
| "epoch": 0.5105112966986937, |
| "grad_norm": 0.08220311999320984, |
| "learning_rate": 1.6596591355342046e-05, |
| "loss": 0.0123, |
| "step": 19500 |
| }, |
| { |
| "epoch": 0.5236013299473781, |
| "grad_norm": 0.09847331047058105, |
| "learning_rate": 1.650932446701748e-05, |
| "loss": 0.0123, |
| "step": 20000 |
| }, |
| { |
| "epoch": 0.5366913631960625, |
| "grad_norm": 0.41798558831214905, |
| "learning_rate": 1.642205757869292e-05, |
| "loss": 0.0128, |
| "step": 20500 |
| }, |
| { |
| "epoch": 0.549781396444747, |
| "grad_norm": 0.2177186757326126, |
| "learning_rate": 1.6334790690368353e-05, |
| "loss": 0.0124, |
| "step": 21000 |
| }, |
| { |
| "epoch": 0.5628714296934314, |
| "grad_norm": 0.16467055678367615, |
| "learning_rate": 1.624752380204379e-05, |
| "loss": 0.0118, |
| "step": 21500 |
| }, |
| { |
| "epoch": 0.5759614629421159, |
| "grad_norm": 0.3392176330089569, |
| "learning_rate": 1.616025691371923e-05, |
| "loss": 0.0121, |
| "step": 22000 |
| }, |
| { |
| "epoch": 0.5890514961908003, |
| "grad_norm": 0.14208835363388062, |
| "learning_rate": 1.6072990025394667e-05, |
| "loss": 0.0118, |
| "step": 22500 |
| }, |
| { |
| "epoch": 0.6021415294394847, |
| "grad_norm": 0.18248602747917175, |
| "learning_rate": 1.5985723137070105e-05, |
| "loss": 0.0118, |
| "step": 23000 |
| }, |
| { |
| "epoch": 0.6152315626881693, |
| "grad_norm": 0.32215237617492676, |
| "learning_rate": 1.589845624874554e-05, |
| "loss": 0.0111, |
| "step": 23500 |
| }, |
| { |
| "epoch": 0.6283215959368537, |
| "grad_norm": 0.35770946741104126, |
| "learning_rate": 1.5811189360420978e-05, |
| "loss": 0.0111, |
| "step": 24000 |
| }, |
| { |
| "epoch": 0.6414116291855382, |
| "grad_norm": 0.21091119945049286, |
| "learning_rate": 1.5723922472096412e-05, |
| "loss": 0.0115, |
| "step": 24500 |
| }, |
| { |
| "epoch": 0.6545016624342226, |
| "grad_norm": 0.215097576379776, |
| "learning_rate": 1.563665558377185e-05, |
| "loss": 0.0107, |
| "step": 25000 |
| }, |
| { |
| "epoch": 0.667591695682907, |
| "grad_norm": 0.15477751195430756, |
| "learning_rate": 1.554938869544729e-05, |
| "loss": 0.0105, |
| "step": 25500 |
| }, |
| { |
| "epoch": 0.6806817289315915, |
| "grad_norm": 0.1576426774263382, |
| "learning_rate": 1.5462121807122726e-05, |
| "loss": 0.0105, |
| "step": 26000 |
| }, |
| { |
| "epoch": 0.6937717621802759, |
| "grad_norm": 0.19675689935684204, |
| "learning_rate": 1.537485491879816e-05, |
| "loss": 0.0096, |
| "step": 26500 |
| }, |
| { |
| "epoch": 0.7068617954289604, |
| "grad_norm": 0.480955570936203, |
| "learning_rate": 1.52875880304736e-05, |
| "loss": 0.0104, |
| "step": 27000 |
| }, |
| { |
| "epoch": 0.7199518286776448, |
| "grad_norm": 0.2651515603065491, |
| "learning_rate": 1.5200321142149035e-05, |
| "loss": 0.0106, |
| "step": 27500 |
| }, |
| { |
| "epoch": 0.7330418619263293, |
| "grad_norm": 0.21407313644886017, |
| "learning_rate": 1.5113054253824473e-05, |
| "loss": 0.0102, |
| "step": 28000 |
| }, |
| { |
| "epoch": 0.7461318951750138, |
| "grad_norm": 0.13915963470935822, |
| "learning_rate": 1.502578736549991e-05, |
| "loss": 0.0098, |
| "step": 28500 |
| }, |
| { |
| "epoch": 0.7592219284236982, |
| "grad_norm": 0.09625498950481415, |
| "learning_rate": 1.4938520477175348e-05, |
| "loss": 0.0099, |
| "step": 29000 |
| }, |
| { |
| "epoch": 0.7723119616723827, |
| "grad_norm": 0.26766207814216614, |
| "learning_rate": 1.4851253588850782e-05, |
| "loss": 0.01, |
| "step": 29500 |
| }, |
| { |
| "epoch": 0.7854019949210671, |
| "grad_norm": 0.03729819133877754, |
| "learning_rate": 1.476398670052622e-05, |
| "loss": 0.0092, |
| "step": 30000 |
| }, |
| { |
| "epoch": 0.7984920281697515, |
| "grad_norm": 0.19327211380004883, |
| "learning_rate": 1.4676719812201658e-05, |
| "loss": 0.0094, |
| "step": 30500 |
| }, |
| { |
| "epoch": 0.811582061418436, |
| "grad_norm": 0.27896180748939514, |
| "learning_rate": 1.4589452923877094e-05, |
| "loss": 0.01, |
| "step": 31000 |
| }, |
| { |
| "epoch": 0.8246720946671204, |
| "grad_norm": 0.1490613967180252, |
| "learning_rate": 1.4502186035552532e-05, |
| "loss": 0.0092, |
| "step": 31500 |
| }, |
| { |
| "epoch": 0.837762127915805, |
| "grad_norm": 0.22722095251083374, |
| "learning_rate": 1.4414919147227969e-05, |
| "loss": 0.0093, |
| "step": 32000 |
| }, |
| { |
| "epoch": 0.8508521611644894, |
| "grad_norm": 0.2942313849925995, |
| "learning_rate": 1.4327652258903407e-05, |
| "loss": 0.0098, |
| "step": 32500 |
| }, |
| { |
| "epoch": 0.8639421944131738, |
| "grad_norm": 0.37654176354408264, |
| "learning_rate": 1.4240385370578841e-05, |
| "loss": 0.0095, |
| "step": 33000 |
| }, |
| { |
| "epoch": 0.8770322276618583, |
| "grad_norm": 0.21543939411640167, |
| "learning_rate": 1.415311848225428e-05, |
| "loss": 0.0097, |
| "step": 33500 |
| }, |
| { |
| "epoch": 0.8901222609105427, |
| "grad_norm": 0.4608267843723297, |
| "learning_rate": 1.4065851593929716e-05, |
| "loss": 0.0093, |
| "step": 34000 |
| }, |
| { |
| "epoch": 0.9032122941592272, |
| "grad_norm": 0.1784515380859375, |
| "learning_rate": 1.3978584705605154e-05, |
| "loss": 0.009, |
| "step": 34500 |
| }, |
| { |
| "epoch": 0.9163023274079116, |
| "grad_norm": 0.3353884518146515, |
| "learning_rate": 1.389131781728059e-05, |
| "loss": 0.009, |
| "step": 35000 |
| }, |
| { |
| "epoch": 0.929392360656596, |
| "grad_norm": 0.2330337017774582, |
| "learning_rate": 1.3804050928956028e-05, |
| "loss": 0.009, |
| "step": 35500 |
| }, |
| { |
| "epoch": 0.9424823939052805, |
| "grad_norm": 0.32975077629089355, |
| "learning_rate": 1.3716784040631464e-05, |
| "loss": 0.0092, |
| "step": 36000 |
| }, |
| { |
| "epoch": 0.955572427153965, |
| "grad_norm": 0.03280099481344223, |
| "learning_rate": 1.3629517152306902e-05, |
| "loss": 0.0084, |
| "step": 36500 |
| }, |
| { |
| "epoch": 0.9686624604026495, |
| "grad_norm": 0.22344884276390076, |
| "learning_rate": 1.3542250263982337e-05, |
| "loss": 0.0089, |
| "step": 37000 |
| }, |
| { |
| "epoch": 0.9817524936513339, |
| "grad_norm": 0.361147403717041, |
| "learning_rate": 1.3454983375657775e-05, |
| "loss": 0.0084, |
| "step": 37500 |
| }, |
| { |
| "epoch": 0.9948425269000183, |
| "grad_norm": 0.2099611908197403, |
| "learning_rate": 1.3367716487333213e-05, |
| "loss": 0.0088, |
| "step": 38000 |
| }, |
| { |
| "epoch": 1.0, |
| "eval_loss": 0.005731215700507164, |
| "eval_runtime": 2297.4971, |
| "eval_samples_per_second": 133.003, |
| "eval_steps_per_second": 16.625, |
| "step": 38197 |
| }, |
| { |
| "epoch": 1.0079325601487028, |
| "grad_norm": 0.24136124551296234, |
| "learning_rate": 1.3280449599008649e-05, |
| "loss": 0.0078, |
| "step": 38500 |
| }, |
| { |
| "epoch": 1.0210225933973873, |
| "grad_norm": 0.42886942625045776, |
| "learning_rate": 1.3193182710684087e-05, |
| "loss": 0.0082, |
| "step": 39000 |
| }, |
| { |
| "epoch": 1.0341126266460716, |
| "grad_norm": 0.10831937938928604, |
| "learning_rate": 1.3105915822359523e-05, |
| "loss": 0.0071, |
| "step": 39500 |
| }, |
| { |
| "epoch": 1.0472026598947561, |
| "grad_norm": 0.20537184178829193, |
| "learning_rate": 1.3018648934034961e-05, |
| "loss": 0.0081, |
| "step": 40000 |
| }, |
| { |
| "epoch": 1.0602926931434407, |
| "grad_norm": 0.28797346353530884, |
| "learning_rate": 1.2931382045710396e-05, |
| "loss": 0.0075, |
| "step": 40500 |
| }, |
| { |
| "epoch": 1.073382726392125, |
| "grad_norm": 0.22928300499916077, |
| "learning_rate": 1.2844115157385834e-05, |
| "loss": 0.0076, |
| "step": 41000 |
| }, |
| { |
| "epoch": 1.0864727596408095, |
| "grad_norm": 0.003099123015999794, |
| "learning_rate": 1.275684826906127e-05, |
| "loss": 0.0079, |
| "step": 41500 |
| }, |
| { |
| "epoch": 1.099562792889494, |
| "grad_norm": 0.287396639585495, |
| "learning_rate": 1.2669581380736708e-05, |
| "loss": 0.0075, |
| "step": 42000 |
| }, |
| { |
| "epoch": 1.1126528261381785, |
| "grad_norm": 0.07049620896577835, |
| "learning_rate": 1.2582314492412144e-05, |
| "loss": 0.007, |
| "step": 42500 |
| }, |
| { |
| "epoch": 1.1257428593868628, |
| "grad_norm": 0.2029752880334854, |
| "learning_rate": 1.2495047604087582e-05, |
| "loss": 0.0076, |
| "step": 43000 |
| }, |
| { |
| "epoch": 1.1388328926355473, |
| "grad_norm": 0.04248388856649399, |
| "learning_rate": 1.2407780715763019e-05, |
| "loss": 0.0072, |
| "step": 43500 |
| }, |
| { |
| "epoch": 1.1519229258842318, |
| "grad_norm": 0.4174834191799164, |
| "learning_rate": 1.2320513827438457e-05, |
| "loss": 0.0077, |
| "step": 44000 |
| }, |
| { |
| "epoch": 1.1650129591329161, |
| "grad_norm": 0.25623586773872375, |
| "learning_rate": 1.2233246939113891e-05, |
| "loss": 0.0069, |
| "step": 44500 |
| }, |
| { |
| "epoch": 1.1781029923816007, |
| "grad_norm": 0.12231756001710892, |
| "learning_rate": 1.214598005078933e-05, |
| "loss": 0.0076, |
| "step": 45000 |
| }, |
| { |
| "epoch": 1.1911930256302852, |
| "grad_norm": 0.24928466975688934, |
| "learning_rate": 1.2058713162464767e-05, |
| "loss": 0.0072, |
| "step": 45500 |
| }, |
| { |
| "epoch": 1.2042830588789695, |
| "grad_norm": 0.11469607055187225, |
| "learning_rate": 1.1971446274140204e-05, |
| "loss": 0.0068, |
| "step": 46000 |
| }, |
| { |
| "epoch": 1.217373092127654, |
| "grad_norm": 0.389217734336853, |
| "learning_rate": 1.1884179385815642e-05, |
| "loss": 0.0072, |
| "step": 46500 |
| }, |
| { |
| "epoch": 1.2304631253763385, |
| "grad_norm": 0.1249752938747406, |
| "learning_rate": 1.1796912497491078e-05, |
| "loss": 0.0065, |
| "step": 47000 |
| }, |
| { |
| "epoch": 1.2435531586250228, |
| "grad_norm": 0.2189619392156601, |
| "learning_rate": 1.1709645609166516e-05, |
| "loss": 0.0073, |
| "step": 47500 |
| }, |
| { |
| "epoch": 1.2566431918737073, |
| "grad_norm": 0.3927897810935974, |
| "learning_rate": 1.1622378720841952e-05, |
| "loss": 0.0069, |
| "step": 48000 |
| }, |
| { |
| "epoch": 1.2697332251223918, |
| "grad_norm": 0.07470349222421646, |
| "learning_rate": 1.153511183251739e-05, |
| "loss": 0.0076, |
| "step": 48500 |
| }, |
| { |
| "epoch": 1.2828232583710761, |
| "grad_norm": 0.3911282420158386, |
| "learning_rate": 1.1447844944192825e-05, |
| "loss": 0.0071, |
| "step": 49000 |
| }, |
| { |
| "epoch": 1.2959132916197607, |
| "grad_norm": 0.2361019402742386, |
| "learning_rate": 1.1360578055868263e-05, |
| "loss": 0.0074, |
| "step": 49500 |
| }, |
| { |
| "epoch": 1.3090033248684452, |
| "grad_norm": 0.1682516634464264, |
| "learning_rate": 1.1273311167543699e-05, |
| "loss": 0.0076, |
| "step": 50000 |
| }, |
| { |
| "epoch": 1.3220933581171297, |
| "grad_norm": 0.1294177621603012, |
| "learning_rate": 1.1186044279219137e-05, |
| "loss": 0.0065, |
| "step": 50500 |
| }, |
| { |
| "epoch": 1.335183391365814, |
| "grad_norm": 0.5710951685905457, |
| "learning_rate": 1.1098777390894573e-05, |
| "loss": 0.0068, |
| "step": 51000 |
| }, |
| { |
| "epoch": 1.3482734246144985, |
| "grad_norm": 0.12587948143482208, |
| "learning_rate": 1.1011510502570011e-05, |
| "loss": 0.0071, |
| "step": 51500 |
| }, |
| { |
| "epoch": 1.361363457863183, |
| "grad_norm": 0.37146255373954773, |
| "learning_rate": 1.0924243614245446e-05, |
| "loss": 0.0069, |
| "step": 52000 |
| }, |
| { |
| "epoch": 1.3744534911118675, |
| "grad_norm": 0.29824337363243103, |
| "learning_rate": 1.0836976725920884e-05, |
| "loss": 0.0069, |
| "step": 52500 |
| }, |
| { |
| "epoch": 1.3875435243605518, |
| "grad_norm": 0.10349422693252563, |
| "learning_rate": 1.0749709837596322e-05, |
| "loss": 0.007, |
| "step": 53000 |
| }, |
| { |
| "epoch": 1.4006335576092364, |
| "grad_norm": 0.2823665142059326, |
| "learning_rate": 1.0662442949271758e-05, |
| "loss": 0.0069, |
| "step": 53500 |
| }, |
| { |
| "epoch": 1.4137235908579209, |
| "grad_norm": 0.038343362510204315, |
| "learning_rate": 1.0575176060947196e-05, |
| "loss": 0.0061, |
| "step": 54000 |
| }, |
| { |
| "epoch": 1.4268136241066052, |
| "grad_norm": 0.2581956088542938, |
| "learning_rate": 1.0487909172622633e-05, |
| "loss": 0.0062, |
| "step": 54500 |
| }, |
| { |
| "epoch": 1.4399036573552897, |
| "grad_norm": 0.11447520554065704, |
| "learning_rate": 1.040064228429807e-05, |
| "loss": 0.0066, |
| "step": 55000 |
| }, |
| { |
| "epoch": 1.4529936906039742, |
| "grad_norm": 0.4050372838973999, |
| "learning_rate": 1.0313375395973507e-05, |
| "loss": 0.007, |
| "step": 55500 |
| }, |
| { |
| "epoch": 1.4660837238526585, |
| "grad_norm": 0.022968396544456482, |
| "learning_rate": 1.0226108507648945e-05, |
| "loss": 0.0066, |
| "step": 56000 |
| }, |
| { |
| "epoch": 1.479173757101343, |
| "grad_norm": 0.07500626146793365, |
| "learning_rate": 1.013884161932438e-05, |
| "loss": 0.0065, |
| "step": 56500 |
| }, |
| { |
| "epoch": 1.4922637903500275, |
| "grad_norm": 0.031823791563510895, |
| "learning_rate": 1.0051574730999817e-05, |
| "loss": 0.0063, |
| "step": 57000 |
| }, |
| { |
| "epoch": 1.5053538235987118, |
| "grad_norm": 0.19768255949020386, |
| "learning_rate": 9.964307842675255e-06, |
| "loss": 0.0068, |
| "step": 57500 |
| }, |
| { |
| "epoch": 1.5184438568473964, |
| "grad_norm": 0.210379958152771, |
| "learning_rate": 9.877040954350692e-06, |
| "loss": 0.0064, |
| "step": 58000 |
| }, |
| { |
| "epoch": 1.5315338900960809, |
| "grad_norm": 0.14373145997524261, |
| "learning_rate": 9.789774066026128e-06, |
| "loss": 0.007, |
| "step": 58500 |
| }, |
| { |
| "epoch": 1.5446239233447652, |
| "grad_norm": 0.2256031185388565, |
| "learning_rate": 9.702507177701566e-06, |
| "loss": 0.0063, |
| "step": 59000 |
| }, |
| { |
| "epoch": 1.55771395659345, |
| "grad_norm": 0.11486474424600601, |
| "learning_rate": 9.615240289377002e-06, |
| "loss": 0.0062, |
| "step": 59500 |
| }, |
| { |
| "epoch": 1.5708039898421342, |
| "grad_norm": 0.20883575081825256, |
| "learning_rate": 9.527973401052439e-06, |
| "loss": 0.0061, |
| "step": 60000 |
| }, |
| { |
| "epoch": 1.5838940230908185, |
| "grad_norm": 0.15535525977611542, |
| "learning_rate": 9.440706512727877e-06, |
| "loss": 0.006, |
| "step": 60500 |
| }, |
| { |
| "epoch": 1.5969840563395032, |
| "grad_norm": 0.2790842652320862, |
| "learning_rate": 9.353439624403313e-06, |
| "loss": 0.0065, |
| "step": 61000 |
| }, |
| { |
| "epoch": 1.6100740895881875, |
| "grad_norm": 0.338480681180954, |
| "learning_rate": 9.266172736078751e-06, |
| "loss": 0.0066, |
| "step": 61500 |
| }, |
| { |
| "epoch": 1.623164122836872, |
| "grad_norm": 0.3213784098625183, |
| "learning_rate": 9.178905847754187e-06, |
| "loss": 0.0064, |
| "step": 62000 |
| }, |
| { |
| "epoch": 1.6362541560855566, |
| "grad_norm": 0.22697031497955322, |
| "learning_rate": 9.091638959429623e-06, |
| "loss": 0.0066, |
| "step": 62500 |
| }, |
| { |
| "epoch": 1.6493441893342409, |
| "grad_norm": 0.12834736704826355, |
| "learning_rate": 9.004372071105061e-06, |
| "loss": 0.0063, |
| "step": 63000 |
| }, |
| { |
| "epoch": 1.6624342225829254, |
| "grad_norm": 0.08631685376167297, |
| "learning_rate": 8.9171051827805e-06, |
| "loss": 0.0058, |
| "step": 63500 |
| }, |
| { |
| "epoch": 1.67552425583161, |
| "grad_norm": 0.12295836955308914, |
| "learning_rate": 8.829838294455936e-06, |
| "loss": 0.0054, |
| "step": 64000 |
| }, |
| { |
| "epoch": 1.6886142890802942, |
| "grad_norm": 0.3893487751483917, |
| "learning_rate": 8.742571406131372e-06, |
| "loss": 0.0062, |
| "step": 64500 |
| }, |
| { |
| "epoch": 1.7017043223289787, |
| "grad_norm": 0.14583726227283478, |
| "learning_rate": 8.65530451780681e-06, |
| "loss": 0.0063, |
| "step": 65000 |
| }, |
| { |
| "epoch": 1.7147943555776632, |
| "grad_norm": 0.3584669232368469, |
| "learning_rate": 8.568037629482246e-06, |
| "loss": 0.0064, |
| "step": 65500 |
| }, |
| { |
| "epoch": 1.7278843888263475, |
| "grad_norm": 0.05680006742477417, |
| "learning_rate": 8.480770741157683e-06, |
| "loss": 0.0061, |
| "step": 66000 |
| }, |
| { |
| "epoch": 1.740974422075032, |
| "grad_norm": 0.15972712635993958, |
| "learning_rate": 8.39350385283312e-06, |
| "loss": 0.0061, |
| "step": 66500 |
| }, |
| { |
| "epoch": 1.7540644553237166, |
| "grad_norm": 0.3504839837551117, |
| "learning_rate": 8.306236964508557e-06, |
| "loss": 0.0056, |
| "step": 67000 |
| }, |
| { |
| "epoch": 1.7671544885724009, |
| "grad_norm": 0.2058769315481186, |
| "learning_rate": 8.218970076183995e-06, |
| "loss": 0.0062, |
| "step": 67500 |
| }, |
| { |
| "epoch": 1.7802445218210854, |
| "grad_norm": 0.2028743475675583, |
| "learning_rate": 8.131703187859431e-06, |
| "loss": 0.0062, |
| "step": 68000 |
| }, |
| { |
| "epoch": 1.79333455506977, |
| "grad_norm": 0.11009418964385986, |
| "learning_rate": 8.044436299534868e-06, |
| "loss": 0.0062, |
| "step": 68500 |
| }, |
| { |
| "epoch": 1.8064245883184542, |
| "grad_norm": 0.0618484802544117, |
| "learning_rate": 7.957169411210305e-06, |
| "loss": 0.0062, |
| "step": 69000 |
| }, |
| { |
| "epoch": 1.819514621567139, |
| "grad_norm": 0.08521759510040283, |
| "learning_rate": 7.869902522885742e-06, |
| "loss": 0.0059, |
| "step": 69500 |
| }, |
| { |
| "epoch": 1.8326046548158232, |
| "grad_norm": 0.3029402196407318, |
| "learning_rate": 7.782635634561178e-06, |
| "loss": 0.0059, |
| "step": 70000 |
| }, |
| { |
| "epoch": 1.8456946880645075, |
| "grad_norm": 0.23642723262310028, |
| "learning_rate": 7.695368746236616e-06, |
| "loss": 0.006, |
| "step": 70500 |
| }, |
| { |
| "epoch": 1.8587847213131923, |
| "grad_norm": 0.14888063073158264, |
| "learning_rate": 7.608101857912053e-06, |
| "loss": 0.006, |
| "step": 71000 |
| }, |
| { |
| "epoch": 1.8718747545618766, |
| "grad_norm": 0.10285039246082306, |
| "learning_rate": 7.52083496958749e-06, |
| "loss": 0.0054, |
| "step": 71500 |
| }, |
| { |
| "epoch": 1.884964787810561, |
| "grad_norm": 0.1975327879190445, |
| "learning_rate": 7.4335680812629275e-06, |
| "loss": 0.0059, |
| "step": 72000 |
| }, |
| { |
| "epoch": 1.8980548210592456, |
| "grad_norm": 0.24295471608638763, |
| "learning_rate": 7.346301192938365e-06, |
| "loss": 0.0058, |
| "step": 72500 |
| }, |
| { |
| "epoch": 1.91114485430793, |
| "grad_norm": 0.03384074568748474, |
| "learning_rate": 7.259034304613801e-06, |
| "loss": 0.0058, |
| "step": 73000 |
| }, |
| { |
| "epoch": 1.9242348875566144, |
| "grad_norm": 0.18082177639007568, |
| "learning_rate": 7.171767416289238e-06, |
| "loss": 0.0058, |
| "step": 73500 |
| }, |
| { |
| "epoch": 1.937324920805299, |
| "grad_norm": 0.33321407437324524, |
| "learning_rate": 7.084500527964675e-06, |
| "loss": 0.0057, |
| "step": 74000 |
| }, |
| { |
| "epoch": 1.9504149540539832, |
| "grad_norm": 0.0694960206747055, |
| "learning_rate": 6.9972336396401115e-06, |
| "loss": 0.0058, |
| "step": 74500 |
| }, |
| { |
| "epoch": 1.9635049873026678, |
| "grad_norm": 0.11903239041566849, |
| "learning_rate": 6.909966751315549e-06, |
| "loss": 0.0057, |
| "step": 75000 |
| }, |
| { |
| "epoch": 1.9765950205513523, |
| "grad_norm": 0.28279566764831543, |
| "learning_rate": 6.822699862990986e-06, |
| "loss": 0.0054, |
| "step": 75500 |
| }, |
| { |
| "epoch": 1.9896850538000366, |
| "grad_norm": 0.12491460144519806, |
| "learning_rate": 6.735432974666423e-06, |
| "loss": 0.0055, |
| "step": 76000 |
| }, |
| { |
| "epoch": 2.0, |
| "eval_loss": 0.0037132962606847286, |
| "eval_runtime": 2298.3508, |
| "eval_samples_per_second": 132.953, |
| "eval_steps_per_second": 16.619, |
| "step": 76394 |
| } |
| ], |
| "logging_steps": 500, |
| "max_steps": 114591, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 4.657974968844288e+16, |
| "train_batch_size": 8, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|