{ "best_metric": null, "best_model_checkpoint": null, "epoch": 3.97561622051418, "eval_steps": 0, "global_step": 90000, "is_hyper_param_search": false, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 0.02, "grad_norm": 0.010943206027150154, "learning_rate": 1.6563604240282684e-06, "loss": 0.0168, "step": 500 }, { "epoch": 0.04, "grad_norm": 0.010196206159889698, "learning_rate": 3.312720848056537e-06, "loss": 0.0139, "step": 1000 }, { "epoch": 0.07, "grad_norm": 0.009697528555989265, "learning_rate": 4.969081272084806e-06, "loss": 0.0142, "step": 1500 }, { "epoch": 0.09, "grad_norm": 0.011516646482050419, "learning_rate": 6.625441696113074e-06, "loss": 0.0139, "step": 2000 }, { "epoch": 0.11, "grad_norm": 0.01910693757236004, "learning_rate": 8.281802120141344e-06, "loss": 0.0137, "step": 2500 }, { "epoch": 0.13, "grad_norm": 0.004923399072140455, "learning_rate": 9.938162544169612e-06, "loss": 0.0139, "step": 3000 }, { "epoch": 0.15, "grad_norm": 0.007313380483537912, "learning_rate": 1.159452296819788e-05, "loss": 0.0137, "step": 3500 }, { "epoch": 0.18, "grad_norm": 0.003835548646748066, "learning_rate": 1.3250883392226147e-05, "loss": 0.0139, "step": 4000 }, { "epoch": 0.2, "grad_norm": 0.007223702035844326, "learning_rate": 1.4907243816254417e-05, "loss": 0.0136, "step": 4500 }, { "epoch": 0.22, "grad_norm": 0.010532427579164505, "learning_rate": 1.6563604240282687e-05, "loss": 0.0135, "step": 5000 }, { "epoch": 0.24, "grad_norm": 0.0022224171552807093, "learning_rate": 1.8219964664310956e-05, "loss": 0.0137, "step": 5500 }, { "epoch": 0.27, "grad_norm": 0.0046217963099479675, "learning_rate": 1.9876325088339224e-05, "loss": 0.0138, "step": 6000 }, { "epoch": 0.29, "grad_norm": 0.007814540527760983, "learning_rate": 2.1532685512367493e-05, "loss": 0.0136, "step": 6500 }, { "epoch": 0.31, "grad_norm": 0.011144978925585747, "learning_rate": 2.318904593639576e-05, "loss": 0.0137, "step": 7000 }, { "epoch": 0.33, "grad_norm": 0.0014108135364949703, "learning_rate": 2.484540636042403e-05, "loss": 0.0138, "step": 7500 }, { "epoch": 0.35, "grad_norm": 0.0039382693357765675, "learning_rate": 2.6501766784452294e-05, "loss": 0.0135, "step": 8000 }, { "epoch": 0.38, "grad_norm": 0.014407187700271606, "learning_rate": 2.8158127208480566e-05, "loss": 0.0138, "step": 8500 }, { "epoch": 0.4, "grad_norm": 0.003938812296837568, "learning_rate": 2.9814487632508834e-05, "loss": 0.0138, "step": 9000 }, { "epoch": 0.42, "grad_norm": 0.0075125317089259624, "learning_rate": 2.983655639540591e-05, "loss": 0.0141, "step": 9500 }, { "epoch": 0.44, "grad_norm": 0.004186388570815325, "learning_rate": 2.9652498282124278e-05, "loss": 0.0139, "step": 10000 }, { "epoch": 0.46, "grad_norm": 0.004882327280938625, "learning_rate": 2.9468440168842645e-05, "loss": 0.0139, "step": 10500 }, { "epoch": 0.49, "grad_norm": 0.002360760699957609, "learning_rate": 2.9284382055561012e-05, "loss": 0.0138, "step": 11000 }, { "epoch": 0.51, "grad_norm": 0.003932056948542595, "learning_rate": 2.910032394227938e-05, "loss": 0.0141, "step": 11500 }, { "epoch": 0.53, "grad_norm": 0.015781084075570107, "learning_rate": 2.8916265828997743e-05, "loss": 0.0138, "step": 12000 }, { "epoch": 0.55, "grad_norm": 0.002987222746014595, "learning_rate": 2.873220771571611e-05, "loss": 0.0138, "step": 12500 }, { "epoch": 0.57, "grad_norm": 0.002478894544765353, "learning_rate": 2.8548149602434473e-05, "loss": 0.0138, "step": 13000 }, { "epoch": 0.6, "grad_norm": 0.00281110149808228, "learning_rate": 2.836409148915284e-05, "loss": 0.0138, "step": 13500 }, { "epoch": 0.62, "grad_norm": 0.0016416048165410757, "learning_rate": 2.8180033375871207e-05, "loss": 0.0136, "step": 14000 }, { "epoch": 0.64, "grad_norm": 0.003159256186336279, "learning_rate": 2.7995975262589574e-05, "loss": 0.0139, "step": 14500 }, { "epoch": 0.66, "grad_norm": 0.03868310526013374, "learning_rate": 2.781191714930794e-05, "loss": 0.0151, "step": 15000 }, { "epoch": 0.68, "grad_norm": 0.026466334238648415, "learning_rate": 2.7627859036026308e-05, "loss": 0.019, "step": 15500 }, { "epoch": 0.71, "grad_norm": 0.014158655889332294, "learning_rate": 2.7443800922744675e-05, "loss": 0.0184, "step": 16000 }, { "epoch": 0.73, "grad_norm": 0.0531819723546505, "learning_rate": 2.7259742809463042e-05, "loss": 0.018, "step": 16500 }, { "epoch": 0.75, "grad_norm": 0.031152892857789993, "learning_rate": 2.707568469618141e-05, "loss": 0.0163, "step": 17000 }, { "epoch": 0.77, "grad_norm": 0.045258529484272, "learning_rate": 2.6891626582899776e-05, "loss": 0.0164, "step": 17500 }, { "epoch": 0.8, "grad_norm": 0.08072955161333084, "learning_rate": 2.6707568469618143e-05, "loss": 0.0158, "step": 18000 }, { "epoch": 0.82, "grad_norm": 0.06747995316982269, "learning_rate": 2.652351035633651e-05, "loss": 0.0155, "step": 18500 }, { "epoch": 0.84, "grad_norm": 0.029322072863578796, "learning_rate": 2.6339452243054877e-05, "loss": 0.0151, "step": 19000 }, { "epoch": 0.86, "grad_norm": 0.03149860352277756, "learning_rate": 2.615539412977324e-05, "loss": 0.0151, "step": 19500 }, { "epoch": 0.88, "grad_norm": 0.03845517709851265, "learning_rate": 2.5971336016491608e-05, "loss": 0.0152, "step": 20000 }, { "epoch": 0.91, "grad_norm": 0.02114521712064743, "learning_rate": 2.578727790320997e-05, "loss": 0.0152, "step": 20500 }, { "epoch": 0.93, "grad_norm": 0.04469970241189003, "learning_rate": 2.560321978992834e-05, "loss": 0.0151, "step": 21000 }, { "epoch": 0.95, "grad_norm": 0.03439483791589737, "learning_rate": 2.5419161676646705e-05, "loss": 0.0148, "step": 21500 }, { "epoch": 0.97, "grad_norm": 0.016261784359812737, "learning_rate": 2.5235103563365072e-05, "loss": 0.015, "step": 22000 }, { "epoch": 0.99, "grad_norm": 0.04864068329334259, "learning_rate": 2.505104545008344e-05, "loss": 0.0147, "step": 22500 }, { "epoch": 1.02, "grad_norm": 0.024570118635892868, "learning_rate": 2.4866987336801806e-05, "loss": 0.0149, "step": 23000 }, { "epoch": 1.04, "grad_norm": 0.015043354593217373, "learning_rate": 2.4682929223520173e-05, "loss": 0.0151, "step": 23500 }, { "epoch": 1.06, "grad_norm": 0.038648445159196854, "learning_rate": 2.449887111023854e-05, "loss": 0.015, "step": 24000 }, { "epoch": 1.08, "grad_norm": 0.2623123824596405, "learning_rate": 2.4314812996956907e-05, "loss": 0.0148, "step": 24500 }, { "epoch": 1.1, "grad_norm": 0.02235906571149826, "learning_rate": 2.4130754883675274e-05, "loss": 0.0147, "step": 25000 }, { "epoch": 1.13, "grad_norm": 0.005854467861354351, "learning_rate": 2.394669677039364e-05, "loss": 0.0149, "step": 25500 }, { "epoch": 1.15, "grad_norm": 0.011547247879207134, "learning_rate": 2.376263865711201e-05, "loss": 0.0147, "step": 26000 }, { "epoch": 1.17, "grad_norm": 0.03119933046400547, "learning_rate": 2.3578580543830375e-05, "loss": 0.015, "step": 26500 }, { "epoch": 1.19, "grad_norm": 0.047728102654218674, "learning_rate": 2.339452243054874e-05, "loss": 0.0146, "step": 27000 }, { "epoch": 1.21, "grad_norm": 0.04931659996509552, "learning_rate": 2.3210464317267106e-05, "loss": 0.0145, "step": 27500 }, { "epoch": 1.24, "grad_norm": 0.22793345153331757, "learning_rate": 2.3026406203985473e-05, "loss": 0.0147, "step": 28000 }, { "epoch": 1.26, "grad_norm": 0.046943288296461105, "learning_rate": 2.2842348090703837e-05, "loss": 0.0149, "step": 28500 }, { "epoch": 1.28, "grad_norm": 0.07938718795776367, "learning_rate": 2.2658289977422203e-05, "loss": 0.0147, "step": 29000 }, { "epoch": 1.3, "grad_norm": 0.02574564516544342, "learning_rate": 2.247423186414057e-05, "loss": 0.0144, "step": 29500 }, { "epoch": 1.33, "grad_norm": 0.011776907369494438, "learning_rate": 2.2290173750858937e-05, "loss": 0.0147, "step": 30000 }, { "epoch": 1.35, "grad_norm": 0.0066869258880615234, "learning_rate": 2.2106115637577304e-05, "loss": 0.0147, "step": 30500 }, { "epoch": 1.37, "grad_norm": 0.010923570953309536, "learning_rate": 2.192205752429567e-05, "loss": 0.0145, "step": 31000 }, { "epoch": 1.39, "grad_norm": 0.03816843405365944, "learning_rate": 2.173799941101404e-05, "loss": 0.0149, "step": 31500 }, { "epoch": 1.41, "grad_norm": 0.04863005876541138, "learning_rate": 2.1553941297732405e-05, "loss": 0.0147, "step": 32000 }, { "epoch": 1.44, "grad_norm": 0.09232094883918762, "learning_rate": 2.1369883184450772e-05, "loss": 0.0148, "step": 32500 }, { "epoch": 1.46, "grad_norm": 0.006221645046025515, "learning_rate": 2.118582507116914e-05, "loss": 0.0148, "step": 33000 }, { "epoch": 1.48, "grad_norm": 0.008674496784806252, "learning_rate": 2.1001766957887506e-05, "loss": 0.0145, "step": 33500 }, { "epoch": 1.5, "grad_norm": 0.013610797002911568, "learning_rate": 2.081770884460587e-05, "loss": 0.0149, "step": 34000 }, { "epoch": 1.52, "grad_norm": 0.020487351343035698, "learning_rate": 2.0633650731324237e-05, "loss": 0.0147, "step": 34500 }, { "epoch": 1.55, "grad_norm": 0.004388992674648762, "learning_rate": 2.0449592618042604e-05, "loss": 0.0146, "step": 35000 }, { "epoch": 1.57, "grad_norm": 0.029407095164060593, "learning_rate": 2.026553450476097e-05, "loss": 0.0144, "step": 35500 }, { "epoch": 1.59, "grad_norm": 0.04079248011112213, "learning_rate": 2.0081476391479335e-05, "loss": 0.0146, "step": 36000 }, { "epoch": 1.61, "grad_norm": 0.033315982669591904, "learning_rate": 1.98974182781977e-05, "loss": 0.0143, "step": 36500 }, { "epoch": 1.63, "grad_norm": 0.00864444486796856, "learning_rate": 1.971336016491607e-05, "loss": 0.0145, "step": 37000 }, { "epoch": 1.66, "grad_norm": 0.03435393422842026, "learning_rate": 1.9529302051634436e-05, "loss": 0.0145, "step": 37500 }, { "epoch": 1.68, "grad_norm": 0.008053929544985294, "learning_rate": 1.9345243938352803e-05, "loss": 0.0146, "step": 38000 }, { "epoch": 1.7, "grad_norm": 0.004771470092236996, "learning_rate": 1.916118582507117e-05, "loss": 0.0143, "step": 38500 }, { "epoch": 1.72, "grad_norm": 0.016594666987657547, "learning_rate": 1.8977127711789537e-05, "loss": 0.0149, "step": 39000 }, { "epoch": 1.74, "grad_norm": 0.01181800477206707, "learning_rate": 1.8793069598507904e-05, "loss": 0.0143, "step": 39500 }, { "epoch": 1.77, "grad_norm": 0.03508065640926361, "learning_rate": 1.860901148522627e-05, "loss": 0.0146, "step": 40000 }, { "epoch": 1.79, "grad_norm": 0.028093870729207993, "learning_rate": 1.8424953371944638e-05, "loss": 0.0146, "step": 40500 }, { "epoch": 1.81, "grad_norm": 0.05029403790831566, "learning_rate": 1.8240895258663005e-05, "loss": 0.0146, "step": 41000 }, { "epoch": 1.83, "grad_norm": 0.006528925616294146, "learning_rate": 1.8056837145381368e-05, "loss": 0.0142, "step": 41500 }, { "epoch": 1.86, "grad_norm": 0.02557162009179592, "learning_rate": 1.7872779032099735e-05, "loss": 0.0144, "step": 42000 }, { "epoch": 1.88, "grad_norm": 0.025360217317938805, "learning_rate": 1.7688720918818102e-05, "loss": 0.0146, "step": 42500 }, { "epoch": 1.9, "grad_norm": 0.04788580909371376, "learning_rate": 1.750466280553647e-05, "loss": 0.0147, "step": 43000 }, { "epoch": 1.92, "grad_norm": 0.02906920574605465, "learning_rate": 1.7320604692254836e-05, "loss": 0.0144, "step": 43500 }, { "epoch": 1.94, "grad_norm": 0.012823808006942272, "learning_rate": 1.71365465789732e-05, "loss": 0.0145, "step": 44000 }, { "epoch": 1.97, "grad_norm": 0.008996455930173397, "learning_rate": 1.6952488465691567e-05, "loss": 0.0143, "step": 44500 }, { "epoch": 1.99, "grad_norm": 0.010119748301804066, "learning_rate": 1.6768430352409934e-05, "loss": 0.0146, "step": 45000 }, { "epoch": 2.01, "grad_norm": 0.02591855265200138, "learning_rate": 1.65843722391283e-05, "loss": 0.0143, "step": 45500 }, { "epoch": 2.03, "grad_norm": 0.013729671947658062, "learning_rate": 1.6400314125846668e-05, "loss": 0.0147, "step": 46000 }, { "epoch": 2.05, "grad_norm": 0.0771203562617302, "learning_rate": 1.6216256012565035e-05, "loss": 0.0146, "step": 46500 }, { "epoch": 2.08, "grad_norm": 0.04501279070973396, "learning_rate": 1.60321978992834e-05, "loss": 0.0144, "step": 47000 }, { "epoch": 2.1, "grad_norm": 0.03493111953139305, "learning_rate": 1.584813978600177e-05, "loss": 0.0144, "step": 47500 }, { "epoch": 2.12, "grad_norm": 0.01472916454076767, "learning_rate": 1.5664081672720136e-05, "loss": 0.0144, "step": 48000 }, { "epoch": 2.14, "grad_norm": 0.04763146862387657, "learning_rate": 1.54800235594385e-05, "loss": 0.0145, "step": 48500 }, { "epoch": 2.16, "grad_norm": 0.024467509239912033, "learning_rate": 1.5295965446156866e-05, "loss": 0.0144, "step": 49000 }, { "epoch": 2.19, "grad_norm": 0.01768341101706028, "learning_rate": 1.5111907332875235e-05, "loss": 0.0144, "step": 49500 }, { "epoch": 2.21, "grad_norm": 0.06102894991636276, "learning_rate": 1.49278492195936e-05, "loss": 0.0141, "step": 50000 }, { "epoch": 2.23, "grad_norm": 0.01851697266101837, "learning_rate": 1.4743791106311966e-05, "loss": 0.0142, "step": 50500 }, { "epoch": 2.25, "grad_norm": 0.015444310382008553, "learning_rate": 1.4559732993030333e-05, "loss": 0.0145, "step": 51000 }, { "epoch": 2.27, "grad_norm": 0.013120009563863277, "learning_rate": 1.43756748797487e-05, "loss": 0.0143, "step": 51500 }, { "epoch": 2.3, "grad_norm": 0.01589464209973812, "learning_rate": 1.4191616766467067e-05, "loss": 0.0141, "step": 52000 }, { "epoch": 2.32, "grad_norm": 0.040490709245204926, "learning_rate": 1.4007558653185433e-05, "loss": 0.0144, "step": 52500 }, { "epoch": 2.34, "grad_norm": 0.025874989107251167, "learning_rate": 1.38235005399038e-05, "loss": 0.0143, "step": 53000 }, { "epoch": 2.36, "grad_norm": 0.022394156083464622, "learning_rate": 1.3639442426622166e-05, "loss": 0.0144, "step": 53500 }, { "epoch": 2.39, "grad_norm": 0.010273805819451809, "learning_rate": 1.3455384313340531e-05, "loss": 0.0144, "step": 54000 }, { "epoch": 2.41, "grad_norm": 0.028374383226037025, "learning_rate": 1.3271326200058898e-05, "loss": 0.0144, "step": 54500 }, { "epoch": 2.43, "grad_norm": 0.018441613763570786, "learning_rate": 1.3087268086777265e-05, "loss": 0.0145, "step": 55000 }, { "epoch": 2.45, "grad_norm": 0.006460436619818211, "learning_rate": 1.2903209973495632e-05, "loss": 0.0145, "step": 55500 }, { "epoch": 2.47, "grad_norm": 0.00770485308021307, "learning_rate": 1.2719151860213999e-05, "loss": 0.0144, "step": 56000 }, { "epoch": 2.5, "grad_norm": 0.00849447026848793, "learning_rate": 1.2535093746932366e-05, "loss": 0.0147, "step": 56500 }, { "epoch": 2.52, "grad_norm": 0.035621609538793564, "learning_rate": 1.2351035633650733e-05, "loss": 0.0145, "step": 57000 }, { "epoch": 2.54, "grad_norm": 0.008923010900616646, "learning_rate": 1.2166977520369098e-05, "loss": 0.0144, "step": 57500 }, { "epoch": 2.56, "grad_norm": 0.0056849876418709755, "learning_rate": 1.1982919407087464e-05, "loss": 0.0143, "step": 58000 }, { "epoch": 2.58, "grad_norm": 0.0071659935638308525, "learning_rate": 1.179886129380583e-05, "loss": 0.0144, "step": 58500 }, { "epoch": 2.61, "grad_norm": 0.021617043763399124, "learning_rate": 1.1614803180524198e-05, "loss": 0.0143, "step": 59000 }, { "epoch": 2.63, "grad_norm": 0.011144719086587429, "learning_rate": 1.1430745067242565e-05, "loss": 0.0142, "step": 59500 }, { "epoch": 2.65, "grad_norm": 0.010943782515823841, "learning_rate": 1.1246686953960932e-05, "loss": 0.0143, "step": 60000 }, { "epoch": 2.67, "grad_norm": 0.010286700911819935, "learning_rate": 1.1062628840679299e-05, "loss": 0.0143, "step": 60500 }, { "epoch": 2.69, "grad_norm": 0.010169615969061852, "learning_rate": 1.0878570727397666e-05, "loss": 0.0143, "step": 61000 }, { "epoch": 2.72, "grad_norm": 0.032067082822322845, "learning_rate": 1.069451261411603e-05, "loss": 0.0144, "step": 61500 }, { "epoch": 2.74, "grad_norm": 0.008680183440446854, "learning_rate": 1.0510454500834396e-05, "loss": 0.0143, "step": 62000 }, { "epoch": 2.76, "grad_norm": 0.01648719422519207, "learning_rate": 1.0326396387552763e-05, "loss": 0.0143, "step": 62500 }, { "epoch": 2.78, "grad_norm": 0.0210120789706707, "learning_rate": 1.014233827427113e-05, "loss": 0.0146, "step": 63000 }, { "epoch": 2.81, "grad_norm": 0.034336596727371216, "learning_rate": 9.958280160989497e-06, "loss": 0.0144, "step": 63500 }, { "epoch": 2.83, "grad_norm": 0.03138417750597, "learning_rate": 9.774222047707864e-06, "loss": 0.0141, "step": 64000 }, { "epoch": 2.85, "grad_norm": 0.01799875684082508, "learning_rate": 9.590163934426231e-06, "loss": 0.0142, "step": 64500 }, { "epoch": 2.87, "grad_norm": 0.02960127592086792, "learning_rate": 9.406105821144595e-06, "loss": 0.0143, "step": 65000 }, { "epoch": 2.89, "grad_norm": 0.012712860479950905, "learning_rate": 9.222047707862962e-06, "loss": 0.0146, "step": 65500 }, { "epoch": 2.92, "grad_norm": 0.009180006571114063, "learning_rate": 9.037989594581329e-06, "loss": 0.0143, "step": 66000 }, { "epoch": 2.94, "grad_norm": 0.0106426402926445, "learning_rate": 8.853931481299696e-06, "loss": 0.0143, "step": 66500 }, { "epoch": 2.96, "grad_norm": 0.03638075664639473, "learning_rate": 8.669873368018063e-06, "loss": 0.0141, "step": 67000 }, { "epoch": 2.98, "grad_norm": 0.02028089202940464, "learning_rate": 8.48581525473643e-06, "loss": 0.0144, "step": 67500 }, { "epoch": 3.0, "grad_norm": 0.004987742286175489, "learning_rate": 8.301757141454797e-06, "loss": 0.0143, "step": 68000 }, { "epoch": 3.03, "grad_norm": 0.012421207502484322, "learning_rate": 8.117699028173162e-06, "loss": 0.0145, "step": 68500 }, { "epoch": 3.05, "grad_norm": 0.05489884316921234, "learning_rate": 7.933640914891527e-06, "loss": 0.0142, "step": 69000 }, { "epoch": 3.07, "grad_norm": 0.007833471521735191, "learning_rate": 7.749582801609894e-06, "loss": 0.0145, "step": 69500 }, { "epoch": 3.09, "grad_norm": 0.014776123687624931, "learning_rate": 7.565524688328261e-06, "loss": 0.0142, "step": 70000 }, { "epoch": 3.11, "grad_norm": 0.015590249560773373, "learning_rate": 7.381466575046628e-06, "loss": 0.0143, "step": 70500 }, { "epoch": 3.14, "grad_norm": 0.018214261159300804, "learning_rate": 7.197408461764995e-06, "loss": 0.0142, "step": 71000 }, { "epoch": 3.16, "grad_norm": 0.029773008078336716, "learning_rate": 7.013350348483361e-06, "loss": 0.0143, "step": 71500 }, { "epoch": 3.18, "grad_norm": 0.028139958158135414, "learning_rate": 6.8292922352017276e-06, "loss": 0.0143, "step": 72000 }, { "epoch": 3.2, "grad_norm": 0.024558302015066147, "learning_rate": 6.6452341219200945e-06, "loss": 0.014, "step": 72500 }, { "epoch": 3.22, "grad_norm": 0.05188705772161484, "learning_rate": 6.461176008638461e-06, "loss": 0.0141, "step": 73000 }, { "epoch": 3.25, "grad_norm": 0.02240253984928131, "learning_rate": 6.277117895356828e-06, "loss": 0.0142, "step": 73500 }, { "epoch": 3.27, "grad_norm": 0.015994379296898842, "learning_rate": 6.093059782075194e-06, "loss": 0.0143, "step": 74000 }, { "epoch": 3.29, "grad_norm": 0.014095323160290718, "learning_rate": 5.909001668793561e-06, "loss": 0.0141, "step": 74500 }, { "epoch": 3.31, "grad_norm": 0.0076615894213318825, "learning_rate": 5.724943555511927e-06, "loss": 0.0141, "step": 75000 }, { "epoch": 3.34, "grad_norm": 0.023330098018050194, "learning_rate": 5.540885442230294e-06, "loss": 0.0143, "step": 75500 }, { "epoch": 3.36, "grad_norm": 0.022397508844733238, "learning_rate": 5.35682732894866e-06, "loss": 0.0141, "step": 76000 }, { "epoch": 3.38, "grad_norm": 0.01998765394091606, "learning_rate": 5.172769215667027e-06, "loss": 0.0143, "step": 76500 }, { "epoch": 3.4, "grad_norm": 0.07193479686975479, "learning_rate": 4.988711102385393e-06, "loss": 0.0143, "step": 77000 }, { "epoch": 3.42, "grad_norm": 0.030124777927994728, "learning_rate": 4.80465298910376e-06, "loss": 0.0146, "step": 77500 }, { "epoch": 3.45, "grad_norm": 0.0762249082326889, "learning_rate": 4.620594875822126e-06, "loss": 0.0144, "step": 78000 }, { "epoch": 3.47, "grad_norm": 0.030013220384716988, "learning_rate": 4.4365367625404925e-06, "loss": 0.0143, "step": 78500 }, { "epoch": 3.49, "grad_norm": 0.013210024684667587, "learning_rate": 4.2524786492588595e-06, "loss": 0.0144, "step": 79000 }, { "epoch": 3.51, "grad_norm": 0.021476522088050842, "learning_rate": 4.068420535977226e-06, "loss": 0.0145, "step": 79500 }, { "epoch": 3.53, "grad_norm": 0.005120801739394665, "learning_rate": 3.884362422695593e-06, "loss": 0.0142, "step": 80000 }, { "epoch": 3.56, "grad_norm": 0.03378542512655258, "learning_rate": 3.7003043094139592e-06, "loss": 0.0144, "step": 80500 }, { "epoch": 3.58, "grad_norm": 0.004559422377496958, "learning_rate": 3.5162461961323254e-06, "loss": 0.0143, "step": 81000 }, { "epoch": 3.6, "grad_norm": 0.022087154909968376, "learning_rate": 3.3321880828506924e-06, "loss": 0.0142, "step": 81500 }, { "epoch": 3.62, "grad_norm": 0.027302134782075882, "learning_rate": 3.1481299695690585e-06, "loss": 0.0142, "step": 82000 }, { "epoch": 3.64, "grad_norm": 0.0070667564868927, "learning_rate": 2.964071856287425e-06, "loss": 0.0142, "step": 82500 }, { "epoch": 3.67, "grad_norm": 0.004392644390463829, "learning_rate": 2.7800137430057916e-06, "loss": 0.014, "step": 83000 }, { "epoch": 3.69, "grad_norm": 0.004756265785545111, "learning_rate": 2.595955629724158e-06, "loss": 0.0144, "step": 83500 }, { "epoch": 3.71, "grad_norm": 0.028167065232992172, "learning_rate": 2.4118975164425248e-06, "loss": 0.0141, "step": 84000 }, { "epoch": 3.73, "grad_norm": 0.04241223633289337, "learning_rate": 2.2278394031608913e-06, "loss": 0.0143, "step": 84500 }, { "epoch": 3.75, "grad_norm": 0.0073333000764250755, "learning_rate": 2.043781289879258e-06, "loss": 0.014, "step": 85000 }, { "epoch": 3.78, "grad_norm": 0.022960776463150978, "learning_rate": 1.8597231765976245e-06, "loss": 0.0146, "step": 85500 }, { "epoch": 3.8, "grad_norm": 0.009491208009421825, "learning_rate": 1.675665063315991e-06, "loss": 0.0143, "step": 86000 }, { "epoch": 3.82, "grad_norm": 0.04244249686598778, "learning_rate": 1.4916069500343576e-06, "loss": 0.0142, "step": 86500 }, { "epoch": 3.84, "grad_norm": 0.0845978856086731, "learning_rate": 1.307548836752724e-06, "loss": 0.0139, "step": 87000 }, { "epoch": 3.87, "grad_norm": 0.012684383429586887, "learning_rate": 1.1234907234710905e-06, "loss": 0.0143, "step": 87500 }, { "epoch": 3.89, "grad_norm": 0.010290221311151981, "learning_rate": 9.394326101894571e-07, "loss": 0.0144, "step": 88000 }, { "epoch": 3.91, "grad_norm": 0.009122644551098347, "learning_rate": 7.553744969078238e-07, "loss": 0.0143, "step": 88500 }, { "epoch": 3.93, "grad_norm": 0.02026693895459175, "learning_rate": 5.713163836261902e-07, "loss": 0.0142, "step": 89000 }, { "epoch": 3.95, "grad_norm": 0.03241865336894989, "learning_rate": 3.8725827034455676e-07, "loss": 0.0142, "step": 89500 }, { "epoch": 3.98, "grad_norm": 0.04753628000617027, "learning_rate": 2.0320015706292333e-07, "loss": 0.0142, "step": 90000 } ], "logging_steps": 500, "max_steps": 90552, "num_input_tokens_seen": 0, "num_train_epochs": 4, "save_steps": 10000, "total_flos": 0.0, "train_batch_size": 32, "trial_name": null, "trial_params": null }