diff --git "a/trainer_state.json" "b/trainer_state.json" new file mode 100644--- /dev/null +++ "b/trainer_state.json" @@ -0,0 +1,36409 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 0.9998737411693973, + "eval_steps": 500, + "global_step": 5197, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.00019239440853750187, + "grad_norm": 2.7525458471593183, + "learning_rate": 1.282051282051282e-06, + "loss": 2.0372, + "step": 1 + }, + { + "epoch": 0.00038478881707500374, + "grad_norm": 3.3321528814620067, + "learning_rate": 2.564102564102564e-06, + "loss": 2.1372, + "step": 2 + }, + { + "epoch": 0.0005771832256125057, + "grad_norm": 3.944387374020291, + "learning_rate": 3.846153846153847e-06, + "loss": 2.1234, + "step": 3 + }, + { + "epoch": 0.0007695776341500075, + "grad_norm": 3.229562266204187, + "learning_rate": 5.128205128205128e-06, + "loss": 2.0367, + "step": 4 + }, + { + "epoch": 0.0009619720426875094, + "grad_norm": 2.8909143591476454, + "learning_rate": 6.41025641025641e-06, + "loss": 1.9597, + "step": 5 + }, + { + "epoch": 0.0011543664512250113, + "grad_norm": 3.410435117906883, + "learning_rate": 7.692307692307694e-06, + "loss": 1.9149, + "step": 6 + }, + { + "epoch": 0.0013467608597625132, + "grad_norm": 2.300554810079252, + "learning_rate": 8.974358974358976e-06, + "loss": 1.8909, + "step": 7 + }, + { + "epoch": 0.001539155268300015, + "grad_norm": 3.090665861126571, + "learning_rate": 1.0256410256410256e-05, + "loss": 2.0468, + "step": 8 + }, + { + "epoch": 0.001731549676837517, + "grad_norm": 2.0948227536865542, + "learning_rate": 1.153846153846154e-05, + "loss": 1.8387, + "step": 9 + }, + { + "epoch": 0.0019239440853750188, + "grad_norm": 2.057968488640536, + "learning_rate": 1.282051282051282e-05, + "loss": 1.8397, + "step": 10 + }, + { + "epoch": 0.002116338493912521, + "grad_norm": 1.5223082417883718, + "learning_rate": 1.4102564102564104e-05, + "loss": 1.8193, + "step": 11 + }, + { + "epoch": 0.0023087329024500227, + "grad_norm": 1.1935463000877207, + "learning_rate": 1.5384615384615387e-05, + "loss": 1.8624, + "step": 12 + }, + { + "epoch": 0.0025011273109875245, + "grad_norm": 1.1665543351628567, + "learning_rate": 1.6666666666666667e-05, + "loss": 1.7492, + "step": 13 + }, + { + "epoch": 0.0026935217195250263, + "grad_norm": 1.2934871817264082, + "learning_rate": 1.794871794871795e-05, + "loss": 1.8863, + "step": 14 + }, + { + "epoch": 0.002885916128062528, + "grad_norm": 0.9235693583370461, + "learning_rate": 1.923076923076923e-05, + "loss": 1.7406, + "step": 15 + }, + { + "epoch": 0.00307831053660003, + "grad_norm": 0.971049821770051, + "learning_rate": 2.0512820512820512e-05, + "loss": 1.7381, + "step": 16 + }, + { + "epoch": 0.0032707049451375318, + "grad_norm": 0.8293228609690957, + "learning_rate": 2.1794871794871795e-05, + "loss": 1.7429, + "step": 17 + }, + { + "epoch": 0.003463099353675034, + "grad_norm": 0.9500386578091011, + "learning_rate": 2.307692307692308e-05, + "loss": 1.6875, + "step": 18 + }, + { + "epoch": 0.003655493762212536, + "grad_norm": 0.6974359236292831, + "learning_rate": 2.435897435897436e-05, + "loss": 1.6986, + "step": 19 + }, + { + "epoch": 0.0038478881707500376, + "grad_norm": 0.909510623437065, + "learning_rate": 2.564102564102564e-05, + "loss": 1.717, + "step": 20 + }, + { + "epoch": 0.00404028257928754, + "grad_norm": 0.7378054455865029, + "learning_rate": 2.6923076923076923e-05, + "loss": 1.6046, + "step": 21 + }, + { + "epoch": 0.004232676987825042, + "grad_norm": 0.6573768657794542, + "learning_rate": 2.8205128205128207e-05, + "loss": 1.5586, + "step": 22 + }, + { + "epoch": 0.0044250713963625435, + "grad_norm": 0.591424063753591, + "learning_rate": 2.948717948717949e-05, + "loss": 1.6279, + "step": 23 + }, + { + "epoch": 0.004617465804900045, + "grad_norm": 0.6329184676844749, + "learning_rate": 3.0769230769230774e-05, + "loss": 1.4297, + "step": 24 + }, + { + "epoch": 0.004809860213437547, + "grad_norm": 0.6101505751917261, + "learning_rate": 3.205128205128206e-05, + "loss": 1.5585, + "step": 25 + }, + { + "epoch": 0.005002254621975049, + "grad_norm": 0.5587381379284022, + "learning_rate": 3.3333333333333335e-05, + "loss": 1.4957, + "step": 26 + }, + { + "epoch": 0.005194649030512551, + "grad_norm": 0.5858283225543386, + "learning_rate": 3.461538461538462e-05, + "loss": 1.523, + "step": 27 + }, + { + "epoch": 0.005387043439050053, + "grad_norm": 0.5305749427400327, + "learning_rate": 3.58974358974359e-05, + "loss": 1.5648, + "step": 28 + }, + { + "epoch": 0.005579437847587554, + "grad_norm": 0.49831555031417485, + "learning_rate": 3.717948717948718e-05, + "loss": 1.6035, + "step": 29 + }, + { + "epoch": 0.005771832256125056, + "grad_norm": 0.49498666201043756, + "learning_rate": 3.846153846153846e-05, + "loss": 1.4841, + "step": 30 + }, + { + "epoch": 0.005964226664662558, + "grad_norm": 0.5306126185222823, + "learning_rate": 3.974358974358974e-05, + "loss": 1.5928, + "step": 31 + }, + { + "epoch": 0.00615662107320006, + "grad_norm": 0.4337095829200314, + "learning_rate": 4.1025641025641023e-05, + "loss": 1.5603, + "step": 32 + }, + { + "epoch": 0.006349015481737562, + "grad_norm": 0.43514674800870196, + "learning_rate": 4.230769230769231e-05, + "loss": 1.6523, + "step": 33 + }, + { + "epoch": 0.0065414098902750635, + "grad_norm": 0.5963298979323705, + "learning_rate": 4.358974358974359e-05, + "loss": 1.4926, + "step": 34 + }, + { + "epoch": 0.006733804298812566, + "grad_norm": 0.44531715285780604, + "learning_rate": 4.4871794871794874e-05, + "loss": 1.5325, + "step": 35 + }, + { + "epoch": 0.006926198707350068, + "grad_norm": 0.3473040471350792, + "learning_rate": 4.615384615384616e-05, + "loss": 1.4282, + "step": 36 + }, + { + "epoch": 0.00711859311588757, + "grad_norm": 0.443491003274745, + "learning_rate": 4.7435897435897435e-05, + "loss": 1.4698, + "step": 37 + }, + { + "epoch": 0.007310987524425072, + "grad_norm": 0.44814406853655325, + "learning_rate": 4.871794871794872e-05, + "loss": 1.528, + "step": 38 + }, + { + "epoch": 0.0075033819329625735, + "grad_norm": 0.5391192848549605, + "learning_rate": 5e-05, + "loss": 1.5508, + "step": 39 + }, + { + "epoch": 0.007695776341500075, + "grad_norm": 0.54926588050491, + "learning_rate": 5.128205128205128e-05, + "loss": 1.4781, + "step": 40 + }, + { + "epoch": 0.007888170750037577, + "grad_norm": 0.46443543207168564, + "learning_rate": 5.256410256410257e-05, + "loss": 1.4432, + "step": 41 + }, + { + "epoch": 0.00808056515857508, + "grad_norm": 0.44321460008702634, + "learning_rate": 5.384615384615385e-05, + "loss": 1.4928, + "step": 42 + }, + { + "epoch": 0.00827295956711258, + "grad_norm": 0.4632820875245947, + "learning_rate": 5.512820512820514e-05, + "loss": 1.4281, + "step": 43 + }, + { + "epoch": 0.008465353975650083, + "grad_norm": 0.42181714931408915, + "learning_rate": 5.6410256410256414e-05, + "loss": 1.4614, + "step": 44 + }, + { + "epoch": 0.008657748384187584, + "grad_norm": 0.45202808445955417, + "learning_rate": 5.769230769230769e-05, + "loss": 1.512, + "step": 45 + }, + { + "epoch": 0.008850142792725087, + "grad_norm": 0.39129615556085906, + "learning_rate": 5.897435897435898e-05, + "loss": 1.3329, + "step": 46 + }, + { + "epoch": 0.009042537201262588, + "grad_norm": 0.44025007115595804, + "learning_rate": 6.025641025641026e-05, + "loss": 1.4993, + "step": 47 + }, + { + "epoch": 0.00923493160980009, + "grad_norm": 0.34779913994346245, + "learning_rate": 6.153846153846155e-05, + "loss": 1.411, + "step": 48 + }, + { + "epoch": 0.009427326018337592, + "grad_norm": 0.3585120048114127, + "learning_rate": 6.282051282051282e-05, + "loss": 1.4531, + "step": 49 + }, + { + "epoch": 0.009619720426875094, + "grad_norm": 0.3955562532849728, + "learning_rate": 6.410256410256412e-05, + "loss": 1.4165, + "step": 50 + }, + { + "epoch": 0.009812114835412595, + "grad_norm": 0.39043156648418725, + "learning_rate": 6.538461538461539e-05, + "loss": 1.442, + "step": 51 + }, + { + "epoch": 0.010004509243950098, + "grad_norm": 0.33706432808884385, + "learning_rate": 6.666666666666667e-05, + "loss": 1.4472, + "step": 52 + }, + { + "epoch": 0.010196903652487599, + "grad_norm": 0.3742128066725855, + "learning_rate": 6.794871794871795e-05, + "loss": 1.3246, + "step": 53 + }, + { + "epoch": 0.010389298061025102, + "grad_norm": 0.3929624766913999, + "learning_rate": 6.923076923076924e-05, + "loss": 1.3529, + "step": 54 + }, + { + "epoch": 0.010581692469562603, + "grad_norm": 0.5610802334436438, + "learning_rate": 7.051282051282052e-05, + "loss": 1.5152, + "step": 55 + }, + { + "epoch": 0.010774086878100105, + "grad_norm": 0.5250016533473014, + "learning_rate": 7.17948717948718e-05, + "loss": 1.3199, + "step": 56 + }, + { + "epoch": 0.010966481286637608, + "grad_norm": 0.42322353397161805, + "learning_rate": 7.307692307692307e-05, + "loss": 1.4614, + "step": 57 + }, + { + "epoch": 0.011158875695175109, + "grad_norm": 0.4845674596386445, + "learning_rate": 7.435897435897436e-05, + "loss": 1.4659, + "step": 58 + }, + { + "epoch": 0.011351270103712612, + "grad_norm": 0.34749048932750803, + "learning_rate": 7.564102564102564e-05, + "loss": 1.3888, + "step": 59 + }, + { + "epoch": 0.011543664512250112, + "grad_norm": 0.4017986834980663, + "learning_rate": 7.692307692307693e-05, + "loss": 1.4199, + "step": 60 + }, + { + "epoch": 0.011736058920787615, + "grad_norm": 0.5027208677679172, + "learning_rate": 7.820512820512821e-05, + "loss": 1.3256, + "step": 61 + }, + { + "epoch": 0.011928453329325116, + "grad_norm": 0.5527999152289834, + "learning_rate": 7.948717948717948e-05, + "loss": 1.3996, + "step": 62 + }, + { + "epoch": 0.012120847737862619, + "grad_norm": 0.4462731246263686, + "learning_rate": 8.076923076923078e-05, + "loss": 1.3843, + "step": 63 + }, + { + "epoch": 0.01231324214640012, + "grad_norm": 0.359874528713232, + "learning_rate": 8.205128205128205e-05, + "loss": 1.2931, + "step": 64 + }, + { + "epoch": 0.012505636554937622, + "grad_norm": 0.38360876148067313, + "learning_rate": 8.333333333333334e-05, + "loss": 1.3122, + "step": 65 + }, + { + "epoch": 0.012698030963475123, + "grad_norm": 0.4749001100500843, + "learning_rate": 8.461538461538461e-05, + "loss": 1.3386, + "step": 66 + }, + { + "epoch": 0.012890425372012626, + "grad_norm": 0.568153688337833, + "learning_rate": 8.58974358974359e-05, + "loss": 1.3523, + "step": 67 + }, + { + "epoch": 0.013082819780550127, + "grad_norm": 0.3376201695261382, + "learning_rate": 8.717948717948718e-05, + "loss": 1.3455, + "step": 68 + }, + { + "epoch": 0.01327521418908763, + "grad_norm": 0.41665092992914415, + "learning_rate": 8.846153846153847e-05, + "loss": 1.3805, + "step": 69 + }, + { + "epoch": 0.013467608597625132, + "grad_norm": 0.3775378273997053, + "learning_rate": 8.974358974358975e-05, + "loss": 1.282, + "step": 70 + }, + { + "epoch": 0.013660003006162633, + "grad_norm": 0.4836285793376228, + "learning_rate": 9.102564102564103e-05, + "loss": 1.3999, + "step": 71 + }, + { + "epoch": 0.013852397414700136, + "grad_norm": 0.484212509769501, + "learning_rate": 9.230769230769232e-05, + "loss": 1.3214, + "step": 72 + }, + { + "epoch": 0.014044791823237637, + "grad_norm": 0.5377956432211242, + "learning_rate": 9.35897435897436e-05, + "loss": 1.3792, + "step": 73 + }, + { + "epoch": 0.01423718623177514, + "grad_norm": 0.45721867790444926, + "learning_rate": 9.487179487179487e-05, + "loss": 1.263, + "step": 74 + }, + { + "epoch": 0.01442958064031264, + "grad_norm": 0.4104047180613727, + "learning_rate": 9.615384615384617e-05, + "loss": 1.3201, + "step": 75 + }, + { + "epoch": 0.014621975048850143, + "grad_norm": 0.5907839436204101, + "learning_rate": 9.743589743589744e-05, + "loss": 1.4332, + "step": 76 + }, + { + "epoch": 0.014814369457387644, + "grad_norm": 0.4254853829097721, + "learning_rate": 9.871794871794872e-05, + "loss": 1.3182, + "step": 77 + }, + { + "epoch": 0.015006763865925147, + "grad_norm": 0.5329864075221257, + "learning_rate": 0.0001, + "loss": 1.3, + "step": 78 + }, + { + "epoch": 0.015199158274462648, + "grad_norm": 0.4324036261474306, + "learning_rate": 0.00010128205128205129, + "loss": 1.3243, + "step": 79 + }, + { + "epoch": 0.01539155268300015, + "grad_norm": 0.4290409758966646, + "learning_rate": 0.00010256410256410256, + "loss": 1.3799, + "step": 80 + }, + { + "epoch": 0.015583947091537651, + "grad_norm": 0.3391541691436233, + "learning_rate": 0.00010384615384615386, + "loss": 1.3717, + "step": 81 + }, + { + "epoch": 0.015776341500075154, + "grad_norm": 0.37373787707783, + "learning_rate": 0.00010512820512820514, + "loss": 1.3731, + "step": 82 + }, + { + "epoch": 0.015968735908612655, + "grad_norm": 0.31215329512195983, + "learning_rate": 0.00010641025641025641, + "loss": 1.3579, + "step": 83 + }, + { + "epoch": 0.01616113031715016, + "grad_norm": 0.4288427945434432, + "learning_rate": 0.0001076923076923077, + "loss": 1.3237, + "step": 84 + }, + { + "epoch": 0.01635352472568766, + "grad_norm": 0.5160078251857673, + "learning_rate": 0.00010897435897435896, + "loss": 1.2972, + "step": 85 + }, + { + "epoch": 0.01654591913422516, + "grad_norm": 0.3993394373534561, + "learning_rate": 0.00011025641025641027, + "loss": 1.3384, + "step": 86 + }, + { + "epoch": 0.016738313542762662, + "grad_norm": 0.4366076718871945, + "learning_rate": 0.00011153846153846154, + "loss": 1.3148, + "step": 87 + }, + { + "epoch": 0.016930707951300167, + "grad_norm": 0.4733218205937479, + "learning_rate": 0.00011282051282051283, + "loss": 1.2973, + "step": 88 + }, + { + "epoch": 0.017123102359837668, + "grad_norm": 0.4087246684407547, + "learning_rate": 0.0001141025641025641, + "loss": 1.3423, + "step": 89 + }, + { + "epoch": 0.01731549676837517, + "grad_norm": 0.3992625790555533, + "learning_rate": 0.00011538461538461538, + "loss": 1.3228, + "step": 90 + }, + { + "epoch": 0.01750789117691267, + "grad_norm": 0.5150054008924904, + "learning_rate": 0.00011666666666666668, + "loss": 1.2881, + "step": 91 + }, + { + "epoch": 0.017700285585450174, + "grad_norm": 0.5876754273805255, + "learning_rate": 0.00011794871794871796, + "loss": 1.3594, + "step": 92 + }, + { + "epoch": 0.017892679993987675, + "grad_norm": 0.42631406042289577, + "learning_rate": 0.00011923076923076923, + "loss": 1.4834, + "step": 93 + }, + { + "epoch": 0.018085074402525176, + "grad_norm": 0.3838034281944992, + "learning_rate": 0.00012051282051282052, + "loss": 1.2706, + "step": 94 + }, + { + "epoch": 0.018277468811062677, + "grad_norm": 0.4301521685881343, + "learning_rate": 0.00012179487179487179, + "loss": 1.2317, + "step": 95 + }, + { + "epoch": 0.01846986321960018, + "grad_norm": 0.3865391962403398, + "learning_rate": 0.0001230769230769231, + "loss": 1.2702, + "step": 96 + }, + { + "epoch": 0.018662257628137682, + "grad_norm": 0.46676939700542575, + "learning_rate": 0.00012435897435897437, + "loss": 1.2659, + "step": 97 + }, + { + "epoch": 0.018854652036675183, + "grad_norm": 0.3282694769605565, + "learning_rate": 0.00012564102564102564, + "loss": 1.3712, + "step": 98 + }, + { + "epoch": 0.019047046445212688, + "grad_norm": 0.41323205438808747, + "learning_rate": 0.00012692307692307693, + "loss": 1.3105, + "step": 99 + }, + { + "epoch": 0.01923944085375019, + "grad_norm": 0.39942104467779616, + "learning_rate": 0.00012820512820512823, + "loss": 1.3306, + "step": 100 + }, + { + "epoch": 0.01943183526228769, + "grad_norm": 0.39527401905640397, + "learning_rate": 0.0001294871794871795, + "loss": 1.3257, + "step": 101 + }, + { + "epoch": 0.01962422967082519, + "grad_norm": 0.45162116871270536, + "learning_rate": 0.00013076923076923077, + "loss": 1.3487, + "step": 102 + }, + { + "epoch": 0.019816624079362695, + "grad_norm": 0.45123939487831205, + "learning_rate": 0.00013205128205128204, + "loss": 1.3389, + "step": 103 + }, + { + "epoch": 0.020009018487900196, + "grad_norm": 0.3141520566672659, + "learning_rate": 0.00013333333333333334, + "loss": 1.2622, + "step": 104 + }, + { + "epoch": 0.020201412896437697, + "grad_norm": 0.4351649493507871, + "learning_rate": 0.00013461538461538464, + "loss": 1.2419, + "step": 105 + }, + { + "epoch": 0.020393807304975198, + "grad_norm": 0.39794413282752655, + "learning_rate": 0.0001358974358974359, + "loss": 1.2614, + "step": 106 + }, + { + "epoch": 0.020586201713512702, + "grad_norm": 0.5243932516778702, + "learning_rate": 0.00013717948717948718, + "loss": 1.2405, + "step": 107 + }, + { + "epoch": 0.020778596122050203, + "grad_norm": 0.3886407628185571, + "learning_rate": 0.00013846153846153847, + "loss": 1.3281, + "step": 108 + }, + { + "epoch": 0.020970990530587704, + "grad_norm": 0.42235960401541284, + "learning_rate": 0.00013974358974358974, + "loss": 1.3742, + "step": 109 + }, + { + "epoch": 0.021163384939125205, + "grad_norm": 0.38868927854775726, + "learning_rate": 0.00014102564102564104, + "loss": 1.185, + "step": 110 + }, + { + "epoch": 0.02135577934766271, + "grad_norm": 0.4545771910029529, + "learning_rate": 0.0001423076923076923, + "loss": 1.2502, + "step": 111 + }, + { + "epoch": 0.02154817375620021, + "grad_norm": 0.42840503963264065, + "learning_rate": 0.0001435897435897436, + "loss": 1.3541, + "step": 112 + }, + { + "epoch": 0.02174056816473771, + "grad_norm": 0.5606903363021304, + "learning_rate": 0.00014487179487179488, + "loss": 1.2254, + "step": 113 + }, + { + "epoch": 0.021932962573275216, + "grad_norm": 0.3710188315891035, + "learning_rate": 0.00014615384615384615, + "loss": 1.213, + "step": 114 + }, + { + "epoch": 0.022125356981812717, + "grad_norm": 0.49119061421727545, + "learning_rate": 0.00014743589743589745, + "loss": 1.2475, + "step": 115 + }, + { + "epoch": 0.022317751390350218, + "grad_norm": 0.4781056177708388, + "learning_rate": 0.00014871794871794872, + "loss": 1.2206, + "step": 116 + }, + { + "epoch": 0.02251014579888772, + "grad_norm": 0.42953663682993126, + "learning_rate": 0.00015000000000000001, + "loss": 1.4172, + "step": 117 + }, + { + "epoch": 0.022702540207425223, + "grad_norm": 0.5224096844586757, + "learning_rate": 0.00015128205128205128, + "loss": 1.3066, + "step": 118 + }, + { + "epoch": 0.022894934615962724, + "grad_norm": 0.3840882540830783, + "learning_rate": 0.00015256410256410255, + "loss": 1.175, + "step": 119 + }, + { + "epoch": 0.023087329024500225, + "grad_norm": 0.37664235360882764, + "learning_rate": 0.00015384615384615385, + "loss": 1.3042, + "step": 120 + }, + { + "epoch": 0.023279723433037726, + "grad_norm": 0.44660253558315144, + "learning_rate": 0.00015512820512820515, + "loss": 1.3376, + "step": 121 + }, + { + "epoch": 0.02347211784157523, + "grad_norm": 0.45240697967438576, + "learning_rate": 0.00015641025641025642, + "loss": 1.3129, + "step": 122 + }, + { + "epoch": 0.02366451225011273, + "grad_norm": 0.5488188281205489, + "learning_rate": 0.0001576923076923077, + "loss": 1.3034, + "step": 123 + }, + { + "epoch": 0.023856906658650232, + "grad_norm": 0.4093560819591969, + "learning_rate": 0.00015897435897435896, + "loss": 1.3536, + "step": 124 + }, + { + "epoch": 0.024049301067187733, + "grad_norm": 0.4181518968444485, + "learning_rate": 0.00016025641025641028, + "loss": 1.2972, + "step": 125 + }, + { + "epoch": 0.024241695475725238, + "grad_norm": 0.47822260626271096, + "learning_rate": 0.00016153846153846155, + "loss": 1.2461, + "step": 126 + }, + { + "epoch": 0.02443408988426274, + "grad_norm": 0.3658429455505507, + "learning_rate": 0.00016282051282051282, + "loss": 1.2409, + "step": 127 + }, + { + "epoch": 0.02462648429280024, + "grad_norm": 0.39272623832743203, + "learning_rate": 0.0001641025641025641, + "loss": 1.2174, + "step": 128 + }, + { + "epoch": 0.024818878701337744, + "grad_norm": 0.39762228575064995, + "learning_rate": 0.0001653846153846154, + "loss": 1.284, + "step": 129 + }, + { + "epoch": 0.025011273109875245, + "grad_norm": 0.47852070426660737, + "learning_rate": 0.0001666666666666667, + "loss": 1.2164, + "step": 130 + }, + { + "epoch": 0.025203667518412746, + "grad_norm": 0.37689479811805965, + "learning_rate": 0.00016794871794871796, + "loss": 1.3237, + "step": 131 + }, + { + "epoch": 0.025396061926950247, + "grad_norm": 0.39110709401209226, + "learning_rate": 0.00016923076923076923, + "loss": 1.2545, + "step": 132 + }, + { + "epoch": 0.02558845633548775, + "grad_norm": 0.4310773405987121, + "learning_rate": 0.00017051282051282053, + "loss": 1.2216, + "step": 133 + }, + { + "epoch": 0.025780850744025252, + "grad_norm": 0.3468023433804279, + "learning_rate": 0.0001717948717948718, + "loss": 1.2678, + "step": 134 + }, + { + "epoch": 0.025973245152562753, + "grad_norm": 0.5085016913769101, + "learning_rate": 0.0001730769230769231, + "loss": 1.2955, + "step": 135 + }, + { + "epoch": 0.026165639561100254, + "grad_norm": 0.49287583142064645, + "learning_rate": 0.00017435897435897436, + "loss": 1.358, + "step": 136 + }, + { + "epoch": 0.02635803396963776, + "grad_norm": 0.33228001088489983, + "learning_rate": 0.00017564102564102566, + "loss": 1.3327, + "step": 137 + }, + { + "epoch": 0.02655042837817526, + "grad_norm": 0.3503824504467012, + "learning_rate": 0.00017692307692307693, + "loss": 1.1518, + "step": 138 + }, + { + "epoch": 0.02674282278671276, + "grad_norm": 0.37532956999924594, + "learning_rate": 0.00017820512820512823, + "loss": 1.2865, + "step": 139 + }, + { + "epoch": 0.026935217195250265, + "grad_norm": 0.38406220033381544, + "learning_rate": 0.0001794871794871795, + "loss": 1.2418, + "step": 140 + }, + { + "epoch": 0.027127611603787766, + "grad_norm": 0.39031936695061314, + "learning_rate": 0.00018076923076923077, + "loss": 1.228, + "step": 141 + }, + { + "epoch": 0.027320006012325267, + "grad_norm": 0.34195813274578396, + "learning_rate": 0.00018205128205128207, + "loss": 1.2056, + "step": 142 + }, + { + "epoch": 0.027512400420862768, + "grad_norm": 0.4248888854316749, + "learning_rate": 0.00018333333333333334, + "loss": 1.3257, + "step": 143 + }, + { + "epoch": 0.027704794829400272, + "grad_norm": 0.4210928393092026, + "learning_rate": 0.00018461538461538463, + "loss": 1.2242, + "step": 144 + }, + { + "epoch": 0.027897189237937773, + "grad_norm": 0.4352511878415626, + "learning_rate": 0.0001858974358974359, + "loss": 1.3441, + "step": 145 + }, + { + "epoch": 0.028089583646475274, + "grad_norm": 0.4217866234218403, + "learning_rate": 0.0001871794871794872, + "loss": 1.2583, + "step": 146 + }, + { + "epoch": 0.028281978055012775, + "grad_norm": 0.3974517630815656, + "learning_rate": 0.00018846153846153847, + "loss": 1.269, + "step": 147 + }, + { + "epoch": 0.02847437246355028, + "grad_norm": 0.4216992528085191, + "learning_rate": 0.00018974358974358974, + "loss": 1.2464, + "step": 148 + }, + { + "epoch": 0.02866676687208778, + "grad_norm": 0.33500073758371224, + "learning_rate": 0.00019102564102564104, + "loss": 1.4107, + "step": 149 + }, + { + "epoch": 0.02885916128062528, + "grad_norm": 0.3930034995291226, + "learning_rate": 0.00019230769230769233, + "loss": 1.2547, + "step": 150 + }, + { + "epoch": 0.029051555689162782, + "grad_norm": 0.39243830006177954, + "learning_rate": 0.0001935897435897436, + "loss": 1.3772, + "step": 151 + }, + { + "epoch": 0.029243950097700287, + "grad_norm": 0.43922584612423615, + "learning_rate": 0.00019487179487179487, + "loss": 1.2391, + "step": 152 + }, + { + "epoch": 0.029436344506237788, + "grad_norm": 0.3444854965075257, + "learning_rate": 0.00019615384615384615, + "loss": 1.3353, + "step": 153 + }, + { + "epoch": 0.02962873891477529, + "grad_norm": 0.359517893965557, + "learning_rate": 0.00019743589743589744, + "loss": 1.2569, + "step": 154 + }, + { + "epoch": 0.029821133323312793, + "grad_norm": 0.3968742594316916, + "learning_rate": 0.00019871794871794874, + "loss": 1.3355, + "step": 155 + }, + { + "epoch": 0.030013527731850294, + "grad_norm": 0.34182297551625235, + "learning_rate": 0.0002, + "loss": 1.2427, + "step": 156 + }, + { + "epoch": 0.030205922140387795, + "grad_norm": 0.44423820785007373, + "learning_rate": 0.00019999998058057615, + "loss": 1.2839, + "step": 157 + }, + { + "epoch": 0.030398316548925296, + "grad_norm": 0.42072857308135675, + "learning_rate": 0.00019999992232231216, + "loss": 1.2678, + "step": 158 + }, + { + "epoch": 0.0305907109574628, + "grad_norm": 0.48936094118847634, + "learning_rate": 0.0001999998252252306, + "loss": 1.2668, + "step": 159 + }, + { + "epoch": 0.0307831053660003, + "grad_norm": 0.3442662491489396, + "learning_rate": 0.00019999968928936926, + "loss": 1.2838, + "step": 160 + }, + { + "epoch": 0.030975499774537802, + "grad_norm": 0.3920464401403197, + "learning_rate": 0.00019999951451478087, + "loss": 1.2205, + "step": 161 + }, + { + "epoch": 0.031167894183075303, + "grad_norm": 0.3334924587891562, + "learning_rate": 0.00019999930090153334, + "loss": 1.3183, + "step": 162 + }, + { + "epoch": 0.03136028859161281, + "grad_norm": 0.3582955766263805, + "learning_rate": 0.00019999904844970962, + "loss": 1.2481, + "step": 163 + }, + { + "epoch": 0.03155268300015031, + "grad_norm": 0.34978513788320387, + "learning_rate": 0.00019999875715940782, + "loss": 1.1672, + "step": 164 + }, + { + "epoch": 0.03174507740868781, + "grad_norm": 0.49416109569985456, + "learning_rate": 0.000199998427030741, + "loss": 1.2332, + "step": 165 + }, + { + "epoch": 0.03193747181722531, + "grad_norm": 0.35765291072585936, + "learning_rate": 0.00019999805806383738, + "loss": 1.2619, + "step": 166 + }, + { + "epoch": 0.03212986622576281, + "grad_norm": 0.4847857479606003, + "learning_rate": 0.0001999976502588403, + "loss": 1.2642, + "step": 167 + }, + { + "epoch": 0.03232226063430032, + "grad_norm": 0.48853166022098093, + "learning_rate": 0.0001999972036159081, + "loss": 1.3542, + "step": 168 + }, + { + "epoch": 0.03251465504283782, + "grad_norm": 0.35297868597517823, + "learning_rate": 0.00019999671813521435, + "loss": 1.2381, + "step": 169 + }, + { + "epoch": 0.03270704945137532, + "grad_norm": 0.4769237665603929, + "learning_rate": 0.0001999961938169475, + "loss": 1.1891, + "step": 170 + }, + { + "epoch": 0.03289944385991282, + "grad_norm": 0.36210408771409336, + "learning_rate": 0.00019999563066131124, + "loss": 1.3756, + "step": 171 + }, + { + "epoch": 0.03309183826845032, + "grad_norm": 0.34813424957631334, + "learning_rate": 0.00019999502866852425, + "loss": 1.293, + "step": 172 + }, + { + "epoch": 0.033284232676987824, + "grad_norm": 0.3784075964795684, + "learning_rate": 0.0001999943878388204, + "loss": 1.3094, + "step": 173 + }, + { + "epoch": 0.033476627085525325, + "grad_norm": 0.39718809122566173, + "learning_rate": 0.00019999370817244853, + "loss": 1.3564, + "step": 174 + }, + { + "epoch": 0.033669021494062826, + "grad_norm": 0.45762884116455194, + "learning_rate": 0.00019999298966967265, + "loss": 1.258, + "step": 175 + }, + { + "epoch": 0.033861415902600334, + "grad_norm": 0.381488697387606, + "learning_rate": 0.00019999223233077177, + "loss": 1.303, + "step": 176 + }, + { + "epoch": 0.034053810311137835, + "grad_norm": 0.32182417029429744, + "learning_rate": 0.0001999914361560401, + "loss": 1.2069, + "step": 177 + }, + { + "epoch": 0.034246204719675336, + "grad_norm": 0.33952904696616504, + "learning_rate": 0.00019999060114578684, + "loss": 1.2738, + "step": 178 + }, + { + "epoch": 0.034438599128212836, + "grad_norm": 0.45283663599696256, + "learning_rate": 0.00019998972730033622, + "loss": 1.2087, + "step": 179 + }, + { + "epoch": 0.03463099353675034, + "grad_norm": 0.42777961625466415, + "learning_rate": 0.00019998881462002778, + "loss": 1.2969, + "step": 180 + }, + { + "epoch": 0.03482338794528784, + "grad_norm": 0.4353440055613314, + "learning_rate": 0.00019998786310521585, + "loss": 1.1811, + "step": 181 + }, + { + "epoch": 0.03501578235382534, + "grad_norm": 0.3623049235937626, + "learning_rate": 0.00019998687275627006, + "loss": 1.2145, + "step": 182 + }, + { + "epoch": 0.03520817676236285, + "grad_norm": 0.3633740669851794, + "learning_rate": 0.00019998584357357502, + "loss": 1.282, + "step": 183 + }, + { + "epoch": 0.03540057117090035, + "grad_norm": 0.40672086192782747, + "learning_rate": 0.00019998477555753055, + "loss": 1.2021, + "step": 184 + }, + { + "epoch": 0.03559296557943785, + "grad_norm": 0.3528660946853748, + "learning_rate": 0.00019998366870855133, + "loss": 1.3171, + "step": 185 + }, + { + "epoch": 0.03578535998797535, + "grad_norm": 0.29673419601117756, + "learning_rate": 0.0001999825230270673, + "loss": 1.2103, + "step": 186 + }, + { + "epoch": 0.03597775439651285, + "grad_norm": 0.30415174073622714, + "learning_rate": 0.0001999813385135234, + "loss": 1.2861, + "step": 187 + }, + { + "epoch": 0.03617014880505035, + "grad_norm": 0.3569229070825008, + "learning_rate": 0.00019998011516837974, + "loss": 1.1805, + "step": 188 + }, + { + "epoch": 0.03636254321358785, + "grad_norm": 0.3184620004926606, + "learning_rate": 0.0001999788529921114, + "loss": 1.3267, + "step": 189 + }, + { + "epoch": 0.036554937622125354, + "grad_norm": 0.4710231851905076, + "learning_rate": 0.0001999775519852086, + "loss": 1.2713, + "step": 190 + }, + { + "epoch": 0.03674733203066286, + "grad_norm": 0.31583452059456996, + "learning_rate": 0.00019997621214817667, + "loss": 1.1783, + "step": 191 + }, + { + "epoch": 0.03693972643920036, + "grad_norm": 0.3796671027972158, + "learning_rate": 0.000199974833481536, + "loss": 1.1852, + "step": 192 + }, + { + "epoch": 0.037132120847737864, + "grad_norm": 0.30458775980863195, + "learning_rate": 0.00019997341598582195, + "loss": 1.3163, + "step": 193 + }, + { + "epoch": 0.037324515256275365, + "grad_norm": 0.40876099439003527, + "learning_rate": 0.00019997195966158518, + "loss": 1.2282, + "step": 194 + }, + { + "epoch": 0.037516909664812866, + "grad_norm": 0.2851561930530594, + "learning_rate": 0.0001999704645093912, + "loss": 1.3051, + "step": 195 + }, + { + "epoch": 0.037709304073350366, + "grad_norm": 0.4264152679934513, + "learning_rate": 0.0001999689305298208, + "loss": 1.3383, + "step": 196 + }, + { + "epoch": 0.03790169848188787, + "grad_norm": 0.3968562536557867, + "learning_rate": 0.00019996735772346972, + "loss": 1.2176, + "step": 197 + }, + { + "epoch": 0.038094092890425375, + "grad_norm": 0.3784089645095029, + "learning_rate": 0.00019996574609094884, + "loss": 1.3513, + "step": 198 + }, + { + "epoch": 0.038286487298962876, + "grad_norm": 0.501810530415642, + "learning_rate": 0.00019996409563288406, + "loss": 1.1565, + "step": 199 + }, + { + "epoch": 0.03847888170750038, + "grad_norm": 0.4982537736486703, + "learning_rate": 0.00019996240634991642, + "loss": 1.1701, + "step": 200 + }, + { + "epoch": 0.03867127611603788, + "grad_norm": 0.40509324758121606, + "learning_rate": 0.00019996067824270205, + "loss": 1.2529, + "step": 201 + }, + { + "epoch": 0.03886367052457538, + "grad_norm": 0.424702289607452, + "learning_rate": 0.00019995891131191205, + "loss": 1.3131, + "step": 202 + }, + { + "epoch": 0.03905606493311288, + "grad_norm": 0.41181957289529814, + "learning_rate": 0.00019995710555823276, + "loss": 1.1718, + "step": 203 + }, + { + "epoch": 0.03924845934165038, + "grad_norm": 0.3498527557157088, + "learning_rate": 0.00019995526098236547, + "loss": 1.2304, + "step": 204 + }, + { + "epoch": 0.03944085375018788, + "grad_norm": 0.2965374719080043, + "learning_rate": 0.0001999533775850266, + "loss": 1.281, + "step": 205 + }, + { + "epoch": 0.03963324815872539, + "grad_norm": 0.3680750001537127, + "learning_rate": 0.00019995145536694762, + "loss": 1.2044, + "step": 206 + }, + { + "epoch": 0.03982564256726289, + "grad_norm": 0.42398151925042865, + "learning_rate": 0.00019994949432887514, + "loss": 1.2261, + "step": 207 + }, + { + "epoch": 0.04001803697580039, + "grad_norm": 0.397051080448696, + "learning_rate": 0.00019994749447157077, + "loss": 1.2953, + "step": 208 + }, + { + "epoch": 0.04021043138433789, + "grad_norm": 0.34499463191570895, + "learning_rate": 0.00019994545579581123, + "loss": 1.2452, + "step": 209 + }, + { + "epoch": 0.040402825792875394, + "grad_norm": 0.3798929225789759, + "learning_rate": 0.00019994337830238834, + "loss": 1.3276, + "step": 210 + }, + { + "epoch": 0.040595220201412895, + "grad_norm": 0.3686082097211794, + "learning_rate": 0.00019994126199210897, + "loss": 1.2147, + "step": 211 + }, + { + "epoch": 0.040787614609950396, + "grad_norm": 0.32252571596181884, + "learning_rate": 0.00019993910686579507, + "loss": 1.2185, + "step": 212 + }, + { + "epoch": 0.0409800090184879, + "grad_norm": 0.42545588466081985, + "learning_rate": 0.00019993691292428365, + "loss": 1.198, + "step": 213 + }, + { + "epoch": 0.041172403427025404, + "grad_norm": 0.3269172449298148, + "learning_rate": 0.00019993468016842682, + "loss": 1.2332, + "step": 214 + }, + { + "epoch": 0.041364797835562905, + "grad_norm": 0.33412442290693023, + "learning_rate": 0.00019993240859909176, + "loss": 1.2392, + "step": 215 + }, + { + "epoch": 0.041557192244100406, + "grad_norm": 0.37427266017576016, + "learning_rate": 0.00019993009821716074, + "loss": 1.1579, + "step": 216 + }, + { + "epoch": 0.04174958665263791, + "grad_norm": 0.32304928157084595, + "learning_rate": 0.00019992774902353105, + "loss": 1.2504, + "step": 217 + }, + { + "epoch": 0.04194198106117541, + "grad_norm": 0.4210692012146056, + "learning_rate": 0.0001999253610191151, + "loss": 1.255, + "step": 218 + }, + { + "epoch": 0.04213437546971291, + "grad_norm": 0.39979090531976197, + "learning_rate": 0.00019992293420484039, + "loss": 1.2154, + "step": 219 + }, + { + "epoch": 0.04232676987825041, + "grad_norm": 0.33287705270465007, + "learning_rate": 0.00019992046858164944, + "loss": 1.3106, + "step": 220 + }, + { + "epoch": 0.04251916428678792, + "grad_norm": 0.4267622193847413, + "learning_rate": 0.0001999179641504999, + "loss": 1.156, + "step": 221 + }, + { + "epoch": 0.04271155869532542, + "grad_norm": 0.3489543426400865, + "learning_rate": 0.00019991542091236437, + "loss": 1.2274, + "step": 222 + }, + { + "epoch": 0.04290395310386292, + "grad_norm": 0.3965495981572789, + "learning_rate": 0.00019991283886823074, + "loss": 1.2429, + "step": 223 + }, + { + "epoch": 0.04309634751240042, + "grad_norm": 0.35185812275485573, + "learning_rate": 0.00019991021801910177, + "loss": 1.1352, + "step": 224 + }, + { + "epoch": 0.04328874192093792, + "grad_norm": 0.4269523111931177, + "learning_rate": 0.00019990755836599538, + "loss": 1.2589, + "step": 225 + }, + { + "epoch": 0.04348113632947542, + "grad_norm": 0.3197675379281588, + "learning_rate": 0.0001999048599099446, + "loss": 1.2607, + "step": 226 + }, + { + "epoch": 0.043673530738012924, + "grad_norm": 0.31993439796766976, + "learning_rate": 0.00019990212265199738, + "loss": 1.3237, + "step": 227 + }, + { + "epoch": 0.04386592514655043, + "grad_norm": 0.28977818784032877, + "learning_rate": 0.0001998993465932169, + "loss": 1.2173, + "step": 228 + }, + { + "epoch": 0.04405831955508793, + "grad_norm": 0.3382834816165424, + "learning_rate": 0.00019989653173468135, + "loss": 1.2356, + "step": 229 + }, + { + "epoch": 0.04425071396362543, + "grad_norm": 0.35078039254538645, + "learning_rate": 0.000199893678077484, + "loss": 1.2551, + "step": 230 + }, + { + "epoch": 0.044443108372162934, + "grad_norm": 0.35486481756596094, + "learning_rate": 0.00019989078562273314, + "loss": 1.2036, + "step": 231 + }, + { + "epoch": 0.044635502780700435, + "grad_norm": 0.3642186760167112, + "learning_rate": 0.0001998878543715522, + "loss": 1.2376, + "step": 232 + }, + { + "epoch": 0.044827897189237936, + "grad_norm": 0.3800056324738678, + "learning_rate": 0.0001998848843250796, + "loss": 1.2665, + "step": 233 + }, + { + "epoch": 0.04502029159777544, + "grad_norm": 0.3237256422775167, + "learning_rate": 0.00019988187548446894, + "loss": 1.133, + "step": 234 + }, + { + "epoch": 0.04521268600631294, + "grad_norm": 0.2837551658060299, + "learning_rate": 0.0001998788278508888, + "loss": 1.1966, + "step": 235 + }, + { + "epoch": 0.045405080414850446, + "grad_norm": 0.4134776579615321, + "learning_rate": 0.00019987574142552275, + "loss": 1.2163, + "step": 236 + }, + { + "epoch": 0.04559747482338795, + "grad_norm": 0.3694123337421609, + "learning_rate": 0.00019987261620956964, + "loss": 1.2597, + "step": 237 + }, + { + "epoch": 0.04578986923192545, + "grad_norm": 0.4114395315715834, + "learning_rate": 0.00019986945220424324, + "loss": 1.3565, + "step": 238 + }, + { + "epoch": 0.04598226364046295, + "grad_norm": 0.3084412092455137, + "learning_rate": 0.00019986624941077238, + "loss": 1.1464, + "step": 239 + }, + { + "epoch": 0.04617465804900045, + "grad_norm": 0.349142692471618, + "learning_rate": 0.000199863007830401, + "loss": 1.2835, + "step": 240 + }, + { + "epoch": 0.04636705245753795, + "grad_norm": 0.2732533508503848, + "learning_rate": 0.0001998597274643881, + "loss": 1.338, + "step": 241 + }, + { + "epoch": 0.04655944686607545, + "grad_norm": 0.25748553040026023, + "learning_rate": 0.00019985640831400776, + "loss": 1.2175, + "step": 242 + }, + { + "epoch": 0.04675184127461296, + "grad_norm": 0.32749537921051525, + "learning_rate": 0.0001998530503805491, + "loss": 1.1979, + "step": 243 + }, + { + "epoch": 0.04694423568315046, + "grad_norm": 0.3717967835992418, + "learning_rate": 0.00019984965366531623, + "loss": 1.2869, + "step": 244 + }, + { + "epoch": 0.04713663009168796, + "grad_norm": 0.3410985836741922, + "learning_rate": 0.00019984621816962844, + "loss": 1.3445, + "step": 245 + }, + { + "epoch": 0.04732902450022546, + "grad_norm": 0.3767653735452373, + "learning_rate": 0.00019984274389482005, + "loss": 1.2968, + "step": 246 + }, + { + "epoch": 0.047521418908762963, + "grad_norm": 0.3063748515018053, + "learning_rate": 0.00019983923084224045, + "loss": 1.3421, + "step": 247 + }, + { + "epoch": 0.047713813317300464, + "grad_norm": 0.3953429677010023, + "learning_rate": 0.00019983567901325403, + "loss": 1.2541, + "step": 248 + }, + { + "epoch": 0.047906207725837965, + "grad_norm": 0.35466087149773695, + "learning_rate": 0.00019983208840924026, + "loss": 1.2191, + "step": 249 + }, + { + "epoch": 0.048098602134375466, + "grad_norm": 0.3354957301340148, + "learning_rate": 0.0001998284590315937, + "loss": 1.1917, + "step": 250 + }, + { + "epoch": 0.048290996542912974, + "grad_norm": 0.40041914964435016, + "learning_rate": 0.00019982479088172405, + "loss": 1.2996, + "step": 251 + }, + { + "epoch": 0.048483390951450475, + "grad_norm": 0.3979208234651924, + "learning_rate": 0.00019982108396105584, + "loss": 1.274, + "step": 252 + }, + { + "epoch": 0.048675785359987976, + "grad_norm": 0.4826217626962861, + "learning_rate": 0.00019981733827102884, + "loss": 1.3042, + "step": 253 + }, + { + "epoch": 0.04886817976852548, + "grad_norm": 0.33611727898812377, + "learning_rate": 0.00019981355381309789, + "loss": 1.2157, + "step": 254 + }, + { + "epoch": 0.04906057417706298, + "grad_norm": 0.37058871405251453, + "learning_rate": 0.0001998097305887328, + "loss": 1.2581, + "step": 255 + }, + { + "epoch": 0.04925296858560048, + "grad_norm": 0.3092656056799116, + "learning_rate": 0.00019980586859941847, + "loss": 1.2189, + "step": 256 + }, + { + "epoch": 0.04944536299413798, + "grad_norm": 0.3548142864921303, + "learning_rate": 0.00019980196784665478, + "loss": 1.1467, + "step": 257 + }, + { + "epoch": 0.04963775740267549, + "grad_norm": 0.41277522858899796, + "learning_rate": 0.00019979802833195682, + "loss": 1.1699, + "step": 258 + }, + { + "epoch": 0.04983015181121299, + "grad_norm": 0.34689799716308284, + "learning_rate": 0.00019979405005685465, + "loss": 1.2887, + "step": 259 + }, + { + "epoch": 0.05002254621975049, + "grad_norm": 0.31082236508657474, + "learning_rate": 0.00019979003302289335, + "loss": 1.2578, + "step": 260 + }, + { + "epoch": 0.05021494062828799, + "grad_norm": 0.32128293101778826, + "learning_rate": 0.0001997859772316331, + "loss": 1.1811, + "step": 261 + }, + { + "epoch": 0.05040733503682549, + "grad_norm": 0.39289119700833075, + "learning_rate": 0.00019978188268464912, + "loss": 1.0934, + "step": 262 + }, + { + "epoch": 0.05059972944536299, + "grad_norm": 0.3161307556322266, + "learning_rate": 0.0001997777493835317, + "loss": 1.2309, + "step": 263 + }, + { + "epoch": 0.050792123853900494, + "grad_norm": 0.37715248373313104, + "learning_rate": 0.00019977357732988614, + "loss": 1.2183, + "step": 264 + }, + { + "epoch": 0.050984518262437994, + "grad_norm": 0.3773970132717969, + "learning_rate": 0.0001997693665253329, + "loss": 1.1416, + "step": 265 + }, + { + "epoch": 0.0511769126709755, + "grad_norm": 0.35699919953799203, + "learning_rate": 0.0001997651169715073, + "loss": 1.2559, + "step": 266 + }, + { + "epoch": 0.051369307079513, + "grad_norm": 0.5965240119539538, + "learning_rate": 0.00019976082867005984, + "loss": 1.1933, + "step": 267 + }, + { + "epoch": 0.051561701488050504, + "grad_norm": 0.30273904689782033, + "learning_rate": 0.00019975650162265608, + "loss": 1.2367, + "step": 268 + }, + { + "epoch": 0.051754095896588005, + "grad_norm": 0.38380292113931525, + "learning_rate": 0.0001997521358309766, + "loss": 1.2624, + "step": 269 + }, + { + "epoch": 0.051946490305125506, + "grad_norm": 0.3992247682089908, + "learning_rate": 0.00019974773129671701, + "loss": 1.2024, + "step": 270 + }, + { + "epoch": 0.05213888471366301, + "grad_norm": 0.33516486240992777, + "learning_rate": 0.00019974328802158797, + "loss": 1.1163, + "step": 271 + }, + { + "epoch": 0.05233127912220051, + "grad_norm": 0.36672908086251843, + "learning_rate": 0.0001997388060073152, + "loss": 1.2088, + "step": 272 + }, + { + "epoch": 0.052523673530738016, + "grad_norm": 0.3238539367847135, + "learning_rate": 0.00019973428525563947, + "loss": 1.262, + "step": 273 + }, + { + "epoch": 0.05271606793927552, + "grad_norm": 0.369542699902845, + "learning_rate": 0.00019972972576831656, + "loss": 1.1273, + "step": 274 + }, + { + "epoch": 0.05290846234781302, + "grad_norm": 0.4458707809374286, + "learning_rate": 0.0001997251275471174, + "loss": 1.1909, + "step": 275 + }, + { + "epoch": 0.05310085675635052, + "grad_norm": 0.4577204769176713, + "learning_rate": 0.00019972049059382782, + "loss": 1.295, + "step": 276 + }, + { + "epoch": 0.05329325116488802, + "grad_norm": 0.3116568734534432, + "learning_rate": 0.00019971581491024873, + "loss": 1.2724, + "step": 277 + }, + { + "epoch": 0.05348564557342552, + "grad_norm": 0.3148014098925133, + "learning_rate": 0.0001997111004981962, + "loss": 1.2434, + "step": 278 + }, + { + "epoch": 0.05367803998196302, + "grad_norm": 0.3313017770241379, + "learning_rate": 0.00019970634735950115, + "loss": 1.2233, + "step": 279 + }, + { + "epoch": 0.05387043439050053, + "grad_norm": 0.31638418972864946, + "learning_rate": 0.00019970155549600978, + "loss": 1.2812, + "step": 280 + }, + { + "epoch": 0.05406282879903803, + "grad_norm": 0.3440305445933539, + "learning_rate": 0.00019969672490958304, + "loss": 1.2002, + "step": 281 + }, + { + "epoch": 0.05425522320757553, + "grad_norm": 0.42212263392448246, + "learning_rate": 0.0001996918556020972, + "loss": 1.2141, + "step": 282 + }, + { + "epoch": 0.05444761761611303, + "grad_norm": 0.3913418964374786, + "learning_rate": 0.0001996869475754434, + "loss": 1.1538, + "step": 283 + }, + { + "epoch": 0.05464001202465053, + "grad_norm": 0.255157434086349, + "learning_rate": 0.00019968200083152782, + "loss": 1.1592, + "step": 284 + }, + { + "epoch": 0.054832406433188034, + "grad_norm": 0.3949120063024248, + "learning_rate": 0.00019967701537227175, + "loss": 1.2124, + "step": 285 + }, + { + "epoch": 0.055024800841725535, + "grad_norm": 0.4285206326851563, + "learning_rate": 0.00019967199119961152, + "loss": 1.2727, + "step": 286 + }, + { + "epoch": 0.055217195250263036, + "grad_norm": 0.38899814654007914, + "learning_rate": 0.0001996669283154984, + "loss": 1.3096, + "step": 287 + }, + { + "epoch": 0.055409589658800544, + "grad_norm": 0.36328914022359676, + "learning_rate": 0.0001996618267218988, + "loss": 1.3222, + "step": 288 + }, + { + "epoch": 0.055601984067338045, + "grad_norm": 0.32127260127884855, + "learning_rate": 0.00019965668642079408, + "loss": 1.2761, + "step": 289 + }, + { + "epoch": 0.055794378475875546, + "grad_norm": 0.31629289747544054, + "learning_rate": 0.00019965150741418073, + "loss": 1.2771, + "step": 290 + }, + { + "epoch": 0.05598677288441305, + "grad_norm": 0.3920429739286175, + "learning_rate": 0.0001996462897040702, + "loss": 1.1744, + "step": 291 + }, + { + "epoch": 0.05617916729295055, + "grad_norm": 0.29619481909023426, + "learning_rate": 0.0001996410332924889, + "loss": 1.262, + "step": 292 + }, + { + "epoch": 0.05637156170148805, + "grad_norm": 0.37749915135309287, + "learning_rate": 0.0001996357381814785, + "loss": 1.1249, + "step": 293 + }, + { + "epoch": 0.05656395611002555, + "grad_norm": 0.3146318722157494, + "learning_rate": 0.00019963040437309549, + "loss": 1.2694, + "step": 294 + }, + { + "epoch": 0.05675635051856306, + "grad_norm": 0.3743924130658171, + "learning_rate": 0.00019962503186941142, + "loss": 1.3085, + "step": 295 + }, + { + "epoch": 0.05694874492710056, + "grad_norm": 0.38695803467811296, + "learning_rate": 0.00019961962067251298, + "loss": 1.1613, + "step": 296 + }, + { + "epoch": 0.05714113933563806, + "grad_norm": 0.30996720449326803, + "learning_rate": 0.00019961417078450178, + "loss": 1.2838, + "step": 297 + }, + { + "epoch": 0.05733353374417556, + "grad_norm": 0.3543253614034791, + "learning_rate": 0.00019960868220749448, + "loss": 1.1588, + "step": 298 + }, + { + "epoch": 0.05752592815271306, + "grad_norm": 0.3072955296875589, + "learning_rate": 0.00019960315494362284, + "loss": 1.2817, + "step": 299 + }, + { + "epoch": 0.05771832256125056, + "grad_norm": 0.3433406675719805, + "learning_rate": 0.00019959758899503353, + "loss": 1.1408, + "step": 300 + }, + { + "epoch": 0.05791071696978806, + "grad_norm": 0.3985503674139911, + "learning_rate": 0.0001995919843638883, + "loss": 1.164, + "step": 301 + }, + { + "epoch": 0.058103111378325564, + "grad_norm": 0.3130134403606259, + "learning_rate": 0.00019958634105236395, + "loss": 1.2136, + "step": 302 + }, + { + "epoch": 0.05829550578686307, + "grad_norm": 0.42122450250188936, + "learning_rate": 0.00019958065906265228, + "loss": 1.1732, + "step": 303 + }, + { + "epoch": 0.05848790019540057, + "grad_norm": 0.42812024204470966, + "learning_rate": 0.0001995749383969601, + "loss": 1.1633, + "step": 304 + }, + { + "epoch": 0.058680294603938074, + "grad_norm": 0.2834380855521078, + "learning_rate": 0.00019956917905750924, + "loss": 1.2696, + "step": 305 + }, + { + "epoch": 0.058872689012475575, + "grad_norm": 0.4658231344202302, + "learning_rate": 0.00019956338104653657, + "loss": 1.26, + "step": 306 + }, + { + "epoch": 0.059065083421013076, + "grad_norm": 0.2913686370266581, + "learning_rate": 0.00019955754436629399, + "loss": 1.2472, + "step": 307 + }, + { + "epoch": 0.05925747782955058, + "grad_norm": 0.3246465243061967, + "learning_rate": 0.00019955166901904837, + "loss": 1.1957, + "step": 308 + }, + { + "epoch": 0.05944987223808808, + "grad_norm": 0.3469837804043362, + "learning_rate": 0.00019954575500708162, + "loss": 1.3085, + "step": 309 + }, + { + "epoch": 0.059642266646625586, + "grad_norm": 0.37556550588560683, + "learning_rate": 0.0001995398023326907, + "loss": 1.2829, + "step": 310 + }, + { + "epoch": 0.05983466105516309, + "grad_norm": 0.3499872905769135, + "learning_rate": 0.00019953381099818755, + "loss": 1.2292, + "step": 311 + }, + { + "epoch": 0.06002705546370059, + "grad_norm": 0.3717751806601391, + "learning_rate": 0.00019952778100589913, + "loss": 1.2553, + "step": 312 + }, + { + "epoch": 0.06021944987223809, + "grad_norm": 0.36402968272922626, + "learning_rate": 0.00019952171235816747, + "loss": 1.1841, + "step": 313 + }, + { + "epoch": 0.06041184428077559, + "grad_norm": 0.30485831163497246, + "learning_rate": 0.00019951560505734945, + "loss": 1.2173, + "step": 314 + }, + { + "epoch": 0.06060423868931309, + "grad_norm": 0.32969845452172397, + "learning_rate": 0.00019950945910581717, + "loss": 1.2912, + "step": 315 + }, + { + "epoch": 0.06079663309785059, + "grad_norm": 0.405942459393916, + "learning_rate": 0.00019950327450595764, + "loss": 1.1841, + "step": 316 + }, + { + "epoch": 0.06098902750638809, + "grad_norm": 0.36100896058101106, + "learning_rate": 0.00019949705126017287, + "loss": 1.1737, + "step": 317 + }, + { + "epoch": 0.0611814219149256, + "grad_norm": 0.3654122632134192, + "learning_rate": 0.00019949078937087986, + "loss": 1.0859, + "step": 318 + }, + { + "epoch": 0.0613738163234631, + "grad_norm": 0.3807698199736515, + "learning_rate": 0.0001994844888405107, + "loss": 1.2496, + "step": 319 + }, + { + "epoch": 0.0615662107320006, + "grad_norm": 0.39433143743513216, + "learning_rate": 0.00019947814967151244, + "loss": 1.2299, + "step": 320 + }, + { + "epoch": 0.0617586051405381, + "grad_norm": 0.4138480507334561, + "learning_rate": 0.00019947177186634715, + "loss": 1.1938, + "step": 321 + }, + { + "epoch": 0.061950999549075604, + "grad_norm": 0.3627091646312647, + "learning_rate": 0.00019946535542749184, + "loss": 1.2097, + "step": 322 + }, + { + "epoch": 0.062143393957613105, + "grad_norm": 0.3529420782514917, + "learning_rate": 0.00019945890035743866, + "loss": 1.2047, + "step": 323 + }, + { + "epoch": 0.062335788366150606, + "grad_norm": 0.339502693512079, + "learning_rate": 0.00019945240665869465, + "loss": 1.2139, + "step": 324 + }, + { + "epoch": 0.0625281827746881, + "grad_norm": 0.38730574906823206, + "learning_rate": 0.00019944587433378186, + "loss": 1.2119, + "step": 325 + }, + { + "epoch": 0.06272057718322561, + "grad_norm": 0.3239599185069091, + "learning_rate": 0.0001994393033852374, + "loss": 1.0793, + "step": 326 + }, + { + "epoch": 0.06291297159176311, + "grad_norm": 0.42172393932528346, + "learning_rate": 0.00019943269381561334, + "loss": 1.2387, + "step": 327 + }, + { + "epoch": 0.06310536600030062, + "grad_norm": 0.4649999946630129, + "learning_rate": 0.00019942604562747678, + "loss": 1.1935, + "step": 328 + }, + { + "epoch": 0.06329776040883812, + "grad_norm": 0.3283055534525505, + "learning_rate": 0.00019941935882340976, + "loss": 1.1858, + "step": 329 + }, + { + "epoch": 0.06349015481737562, + "grad_norm": 0.3276058870296969, + "learning_rate": 0.00019941263340600939, + "loss": 1.2873, + "step": 330 + }, + { + "epoch": 0.06368254922591313, + "grad_norm": 0.25504389436073543, + "learning_rate": 0.00019940586937788776, + "loss": 1.2078, + "step": 331 + }, + { + "epoch": 0.06387494363445062, + "grad_norm": 0.3163236598873421, + "learning_rate": 0.0001993990667416719, + "loss": 1.1835, + "step": 332 + }, + { + "epoch": 0.06406733804298813, + "grad_norm": 0.43909878586615, + "learning_rate": 0.0001993922255000039, + "loss": 1.1776, + "step": 333 + }, + { + "epoch": 0.06425973245152562, + "grad_norm": 0.38117519645111986, + "learning_rate": 0.0001993853456555408, + "loss": 1.1558, + "step": 334 + }, + { + "epoch": 0.06445212686006313, + "grad_norm": 0.31423356199533864, + "learning_rate": 0.00019937842721095468, + "loss": 1.2456, + "step": 335 + }, + { + "epoch": 0.06464452126860064, + "grad_norm": 0.27265001833117847, + "learning_rate": 0.00019937147016893257, + "loss": 1.1418, + "step": 336 + }, + { + "epoch": 0.06483691567713813, + "grad_norm": 0.34005108000768947, + "learning_rate": 0.00019936447453217646, + "loss": 1.2031, + "step": 337 + }, + { + "epoch": 0.06502931008567564, + "grad_norm": 0.3491656312670484, + "learning_rate": 0.00019935744030340346, + "loss": 1.2508, + "step": 338 + }, + { + "epoch": 0.06522170449421313, + "grad_norm": 0.3367736624898666, + "learning_rate": 0.00019935036748534553, + "loss": 1.2166, + "step": 339 + }, + { + "epoch": 0.06541409890275064, + "grad_norm": 0.3128517006692993, + "learning_rate": 0.0001993432560807497, + "loss": 1.2333, + "step": 340 + }, + { + "epoch": 0.06560649331128814, + "grad_norm": 0.30348133599339916, + "learning_rate": 0.00019933610609237793, + "loss": 1.1224, + "step": 341 + }, + { + "epoch": 0.06579888771982564, + "grad_norm": 0.29819897968115294, + "learning_rate": 0.00019932891752300717, + "loss": 1.2299, + "step": 342 + }, + { + "epoch": 0.06599128212836314, + "grad_norm": 0.33118202228380633, + "learning_rate": 0.00019932169037542946, + "loss": 1.1434, + "step": 343 + }, + { + "epoch": 0.06618367653690065, + "grad_norm": 0.410488685395728, + "learning_rate": 0.00019931442465245165, + "loss": 1.2197, + "step": 344 + }, + { + "epoch": 0.06637607094543815, + "grad_norm": 0.32967419199508924, + "learning_rate": 0.00019930712035689575, + "loss": 1.2488, + "step": 345 + }, + { + "epoch": 0.06656846535397565, + "grad_norm": 0.303306771079036, + "learning_rate": 0.00019929977749159859, + "loss": 1.1851, + "step": 346 + }, + { + "epoch": 0.06676085976251316, + "grad_norm": 0.3991113102999262, + "learning_rate": 0.00019929239605941208, + "loss": 1.1829, + "step": 347 + }, + { + "epoch": 0.06695325417105065, + "grad_norm": 0.26977548277363106, + "learning_rate": 0.0001992849760632031, + "loss": 1.2384, + "step": 348 + }, + { + "epoch": 0.06714564857958816, + "grad_norm": 0.4217022102813526, + "learning_rate": 0.00019927751750585347, + "loss": 1.1611, + "step": 349 + }, + { + "epoch": 0.06733804298812565, + "grad_norm": 0.27307458080717384, + "learning_rate": 0.00019927002039026002, + "loss": 1.2025, + "step": 350 + }, + { + "epoch": 0.06753043739666316, + "grad_norm": 0.3139832334435657, + "learning_rate": 0.00019926248471933454, + "loss": 1.228, + "step": 351 + }, + { + "epoch": 0.06772283180520067, + "grad_norm": 0.2890948080597137, + "learning_rate": 0.0001992549104960038, + "loss": 1.2003, + "step": 352 + }, + { + "epoch": 0.06791522621373816, + "grad_norm": 0.3364470939670738, + "learning_rate": 0.0001992472977232095, + "loss": 1.243, + "step": 353 + }, + { + "epoch": 0.06810762062227567, + "grad_norm": 0.3106432538005775, + "learning_rate": 0.00019923964640390843, + "loss": 1.1601, + "step": 354 + }, + { + "epoch": 0.06830001503081316, + "grad_norm": 0.2543076746328548, + "learning_rate": 0.00019923195654107225, + "loss": 1.1535, + "step": 355 + }, + { + "epoch": 0.06849240943935067, + "grad_norm": 0.3605226962761155, + "learning_rate": 0.00019922422813768758, + "loss": 1.2255, + "step": 356 + }, + { + "epoch": 0.06868480384788817, + "grad_norm": 0.3069682346498868, + "learning_rate": 0.00019921646119675605, + "loss": 1.2549, + "step": 357 + }, + { + "epoch": 0.06887719825642567, + "grad_norm": 0.3562071033482331, + "learning_rate": 0.00019920865572129425, + "loss": 1.3144, + "step": 358 + }, + { + "epoch": 0.06906959266496318, + "grad_norm": 0.2632783307081333, + "learning_rate": 0.00019920081171433379, + "loss": 1.2696, + "step": 359 + }, + { + "epoch": 0.06926198707350067, + "grad_norm": 0.3930372697927493, + "learning_rate": 0.00019919292917892112, + "loss": 1.2285, + "step": 360 + }, + { + "epoch": 0.06945438148203818, + "grad_norm": 0.29756656546134286, + "learning_rate": 0.00019918500811811778, + "loss": 1.1767, + "step": 361 + }, + { + "epoch": 0.06964677589057568, + "grad_norm": 0.2987196051114864, + "learning_rate": 0.00019917704853500016, + "loss": 1.1441, + "step": 362 + }, + { + "epoch": 0.06983917029911318, + "grad_norm": 0.3273696683415939, + "learning_rate": 0.00019916905043265972, + "loss": 1.2167, + "step": 363 + }, + { + "epoch": 0.07003156470765068, + "grad_norm": 0.32799607870164604, + "learning_rate": 0.00019916101381420285, + "loss": 1.1706, + "step": 364 + }, + { + "epoch": 0.07022395911618819, + "grad_norm": 0.31099871021757747, + "learning_rate": 0.00019915293868275083, + "loss": 1.1124, + "step": 365 + }, + { + "epoch": 0.0704163535247257, + "grad_norm": 0.2596040843750913, + "learning_rate": 0.00019914482504143995, + "loss": 1.2437, + "step": 366 + }, + { + "epoch": 0.07060874793326319, + "grad_norm": 0.34030655508034796, + "learning_rate": 0.00019913667289342147, + "loss": 1.1917, + "step": 367 + }, + { + "epoch": 0.0708011423418007, + "grad_norm": 0.3390501754848551, + "learning_rate": 0.0001991284822418616, + "loss": 1.1255, + "step": 368 + }, + { + "epoch": 0.07099353675033819, + "grad_norm": 0.3128963159659474, + "learning_rate": 0.00019912025308994148, + "loss": 1.2202, + "step": 369 + }, + { + "epoch": 0.0711859311588757, + "grad_norm": 0.3751209174302873, + "learning_rate": 0.00019911198544085722, + "loss": 1.2319, + "step": 370 + }, + { + "epoch": 0.07137832556741319, + "grad_norm": 0.3626056920250688, + "learning_rate": 0.00019910367929781988, + "loss": 1.2451, + "step": 371 + }, + { + "epoch": 0.0715707199759507, + "grad_norm": 0.2651022892019285, + "learning_rate": 0.00019909533466405546, + "loss": 1.215, + "step": 372 + }, + { + "epoch": 0.0717631143844882, + "grad_norm": 0.2695241953067502, + "learning_rate": 0.00019908695154280496, + "loss": 1.2342, + "step": 373 + }, + { + "epoch": 0.0719555087930257, + "grad_norm": 0.396793772597451, + "learning_rate": 0.00019907852993732424, + "loss": 1.2262, + "step": 374 + }, + { + "epoch": 0.07214790320156321, + "grad_norm": 0.3867323324164736, + "learning_rate": 0.0001990700698508842, + "loss": 1.2584, + "step": 375 + }, + { + "epoch": 0.0723402976101007, + "grad_norm": 0.32459859740205627, + "learning_rate": 0.0001990615712867706, + "loss": 1.2777, + "step": 376 + }, + { + "epoch": 0.07253269201863821, + "grad_norm": 0.29589772643405204, + "learning_rate": 0.00019905303424828417, + "loss": 1.1969, + "step": 377 + }, + { + "epoch": 0.0727250864271757, + "grad_norm": 0.2818076727449584, + "learning_rate": 0.00019904445873874068, + "loss": 1.2332, + "step": 378 + }, + { + "epoch": 0.07291748083571321, + "grad_norm": 0.32737308354754885, + "learning_rate": 0.00019903584476147065, + "loss": 1.1826, + "step": 379 + }, + { + "epoch": 0.07310987524425071, + "grad_norm": 0.32575071044631815, + "learning_rate": 0.00019902719231981974, + "loss": 1.2484, + "step": 380 + }, + { + "epoch": 0.07330226965278822, + "grad_norm": 0.3145521605379211, + "learning_rate": 0.00019901850141714841, + "loss": 1.1781, + "step": 381 + }, + { + "epoch": 0.07349466406132572, + "grad_norm": 0.4515392894166184, + "learning_rate": 0.0001990097720568321, + "loss": 1.2096, + "step": 382 + }, + { + "epoch": 0.07368705846986322, + "grad_norm": 0.29729760081447826, + "learning_rate": 0.00019900100424226125, + "loss": 1.1934, + "step": 383 + }, + { + "epoch": 0.07387945287840073, + "grad_norm": 0.3411834562153974, + "learning_rate": 0.00019899219797684113, + "loss": 1.2245, + "step": 384 + }, + { + "epoch": 0.07407184728693822, + "grad_norm": 0.2711191466888693, + "learning_rate": 0.000198983353263992, + "loss": 1.2488, + "step": 385 + }, + { + "epoch": 0.07426424169547573, + "grad_norm": 0.3165981210963245, + "learning_rate": 0.00019897447010714905, + "loss": 1.1832, + "step": 386 + }, + { + "epoch": 0.07445663610401322, + "grad_norm": 0.33524258854840583, + "learning_rate": 0.00019896554850976238, + "loss": 1.1359, + "step": 387 + }, + { + "epoch": 0.07464903051255073, + "grad_norm": 0.26846132333679673, + "learning_rate": 0.00019895658847529708, + "loss": 1.1609, + "step": 388 + }, + { + "epoch": 0.07484142492108824, + "grad_norm": 0.29609002005647955, + "learning_rate": 0.00019894759000723306, + "loss": 1.2819, + "step": 389 + }, + { + "epoch": 0.07503381932962573, + "grad_norm": 0.32203131595754636, + "learning_rate": 0.00019893855310906526, + "loss": 1.1585, + "step": 390 + }, + { + "epoch": 0.07522621373816324, + "grad_norm": 0.4019557977818726, + "learning_rate": 0.0001989294777843035, + "loss": 1.1633, + "step": 391 + }, + { + "epoch": 0.07541860814670073, + "grad_norm": 0.3334773688106313, + "learning_rate": 0.00019892036403647254, + "loss": 1.2853, + "step": 392 + }, + { + "epoch": 0.07561100255523824, + "grad_norm": 0.3209140493686673, + "learning_rate": 0.00019891121186911206, + "loss": 1.1525, + "step": 393 + }, + { + "epoch": 0.07580339696377573, + "grad_norm": 0.3271402047611454, + "learning_rate": 0.00019890202128577662, + "loss": 1.1953, + "step": 394 + }, + { + "epoch": 0.07599579137231324, + "grad_norm": 0.34274408744583185, + "learning_rate": 0.00019889279229003576, + "loss": 1.1798, + "step": 395 + }, + { + "epoch": 0.07618818578085075, + "grad_norm": 0.3867062817728671, + "learning_rate": 0.00019888352488547394, + "loss": 1.2351, + "step": 396 + }, + { + "epoch": 0.07638058018938824, + "grad_norm": 0.2874638157245163, + "learning_rate": 0.00019887421907569048, + "loss": 1.1834, + "step": 397 + }, + { + "epoch": 0.07657297459792575, + "grad_norm": 0.2517062899344747, + "learning_rate": 0.00019886487486429964, + "loss": 1.2664, + "step": 398 + }, + { + "epoch": 0.07676536900646325, + "grad_norm": 0.3447747875977769, + "learning_rate": 0.00019885549225493064, + "loss": 1.1547, + "step": 399 + }, + { + "epoch": 0.07695776341500075, + "grad_norm": 0.28999841111237157, + "learning_rate": 0.00019884607125122755, + "loss": 1.1282, + "step": 400 + }, + { + "epoch": 0.07715015782353825, + "grad_norm": 0.3328577533371222, + "learning_rate": 0.0001988366118568494, + "loss": 1.1872, + "step": 401 + }, + { + "epoch": 0.07734255223207576, + "grad_norm": 0.3472634071603433, + "learning_rate": 0.0001988271140754701, + "loss": 1.0705, + "step": 402 + }, + { + "epoch": 0.07753494664061325, + "grad_norm": 0.28289529408099684, + "learning_rate": 0.00019881757791077845, + "loss": 1.2342, + "step": 403 + }, + { + "epoch": 0.07772734104915076, + "grad_norm": 0.31613099586606025, + "learning_rate": 0.00019880800336647824, + "loss": 1.2602, + "step": 404 + }, + { + "epoch": 0.07791973545768827, + "grad_norm": 0.42006664644376834, + "learning_rate": 0.0001987983904462881, + "loss": 1.1138, + "step": 405 + }, + { + "epoch": 0.07811212986622576, + "grad_norm": 0.35417887060708514, + "learning_rate": 0.00019878873915394154, + "loss": 1.2261, + "step": 406 + }, + { + "epoch": 0.07830452427476327, + "grad_norm": 0.38430248445706694, + "learning_rate": 0.00019877904949318703, + "loss": 1.2139, + "step": 407 + }, + { + "epoch": 0.07849691868330076, + "grad_norm": 0.3548196863630682, + "learning_rate": 0.00019876932146778794, + "loss": 1.1664, + "step": 408 + }, + { + "epoch": 0.07868931309183827, + "grad_norm": 0.3901646019896108, + "learning_rate": 0.00019875955508152253, + "loss": 1.1328, + "step": 409 + }, + { + "epoch": 0.07888170750037576, + "grad_norm": 0.3238762071302361, + "learning_rate": 0.0001987497503381839, + "loss": 1.1553, + "step": 410 + }, + { + "epoch": 0.07907410190891327, + "grad_norm": 0.29814290274840377, + "learning_rate": 0.00019873990724158014, + "loss": 1.1672, + "step": 411 + }, + { + "epoch": 0.07926649631745078, + "grad_norm": 0.35034479493074044, + "learning_rate": 0.00019873002579553418, + "loss": 1.0997, + "step": 412 + }, + { + "epoch": 0.07945889072598827, + "grad_norm": 0.35867542565891897, + "learning_rate": 0.00019872010600388392, + "loss": 1.1303, + "step": 413 + }, + { + "epoch": 0.07965128513452578, + "grad_norm": 0.2909167833413755, + "learning_rate": 0.00019871014787048197, + "loss": 1.1295, + "step": 414 + }, + { + "epoch": 0.07984367954306328, + "grad_norm": 0.3668728514473036, + "learning_rate": 0.00019870015139919606, + "loss": 1.2464, + "step": 415 + }, + { + "epoch": 0.08003607395160078, + "grad_norm": 0.3053516661915177, + "learning_rate": 0.00019869011659390866, + "loss": 1.1474, + "step": 416 + }, + { + "epoch": 0.08022846836013828, + "grad_norm": 0.36341260589354013, + "learning_rate": 0.00019868004345851716, + "loss": 1.2101, + "step": 417 + }, + { + "epoch": 0.08042086276867579, + "grad_norm": 0.33930090088971565, + "learning_rate": 0.00019866993199693392, + "loss": 1.0921, + "step": 418 + }, + { + "epoch": 0.0806132571772133, + "grad_norm": 0.49420476471108643, + "learning_rate": 0.000198659782213086, + "loss": 1.1695, + "step": 419 + }, + { + "epoch": 0.08080565158575079, + "grad_norm": 0.2841104206366389, + "learning_rate": 0.00019864959411091556, + "loss": 1.3438, + "step": 420 + }, + { + "epoch": 0.0809980459942883, + "grad_norm": 0.3438830634633235, + "learning_rate": 0.00019863936769437955, + "loss": 1.1926, + "step": 421 + }, + { + "epoch": 0.08119044040282579, + "grad_norm": 0.32544277162464536, + "learning_rate": 0.00019862910296744967, + "loss": 1.2137, + "step": 422 + }, + { + "epoch": 0.0813828348113633, + "grad_norm": 0.30936189164901584, + "learning_rate": 0.00019861879993411275, + "loss": 1.2412, + "step": 423 + }, + { + "epoch": 0.08157522921990079, + "grad_norm": 0.371471489265311, + "learning_rate": 0.00019860845859837032, + "loss": 1.0245, + "step": 424 + }, + { + "epoch": 0.0817676236284383, + "grad_norm": 0.33541081190268485, + "learning_rate": 0.00019859807896423882, + "loss": 1.1537, + "step": 425 + }, + { + "epoch": 0.0819600180369758, + "grad_norm": 0.3085229952981492, + "learning_rate": 0.0001985876610357496, + "loss": 1.116, + "step": 426 + }, + { + "epoch": 0.0821524124455133, + "grad_norm": 0.28895856037292456, + "learning_rate": 0.00019857720481694885, + "loss": 1.203, + "step": 427 + }, + { + "epoch": 0.08234480685405081, + "grad_norm": 0.45072463668237145, + "learning_rate": 0.00019856671031189766, + "loss": 1.1595, + "step": 428 + }, + { + "epoch": 0.0825372012625883, + "grad_norm": 0.3981656496594179, + "learning_rate": 0.000198556177524672, + "loss": 1.2791, + "step": 429 + }, + { + "epoch": 0.08272959567112581, + "grad_norm": 0.34959095793759987, + "learning_rate": 0.0001985456064593626, + "loss": 1.1515, + "step": 430 + }, + { + "epoch": 0.0829219900796633, + "grad_norm": 0.42780689950352624, + "learning_rate": 0.00019853499712007522, + "loss": 1.2085, + "step": 431 + }, + { + "epoch": 0.08311438448820081, + "grad_norm": 0.3144910521861958, + "learning_rate": 0.00019852434951093034, + "loss": 1.1487, + "step": 432 + }, + { + "epoch": 0.0833067788967383, + "grad_norm": 0.40375939258608123, + "learning_rate": 0.00019851366363606346, + "loss": 1.2075, + "step": 433 + }, + { + "epoch": 0.08349917330527581, + "grad_norm": 0.2530071722324599, + "learning_rate": 0.00019850293949962478, + "loss": 1.1387, + "step": 434 + }, + { + "epoch": 0.08369156771381332, + "grad_norm": 0.28995150953088794, + "learning_rate": 0.00019849217710577946, + "loss": 1.2065, + "step": 435 + }, + { + "epoch": 0.08388396212235082, + "grad_norm": 0.3947416878413457, + "learning_rate": 0.00019848137645870747, + "loss": 1.1915, + "step": 436 + }, + { + "epoch": 0.08407635653088832, + "grad_norm": 0.2822450902075487, + "learning_rate": 0.0001984705375626036, + "loss": 1.1947, + "step": 437 + }, + { + "epoch": 0.08426875093942582, + "grad_norm": 0.35006156835637003, + "learning_rate": 0.0001984596604216777, + "loss": 1.1896, + "step": 438 + }, + { + "epoch": 0.08446114534796333, + "grad_norm": 0.2940332831878667, + "learning_rate": 0.0001984487450401542, + "loss": 1.2575, + "step": 439 + }, + { + "epoch": 0.08465353975650082, + "grad_norm": 0.2750477917770306, + "learning_rate": 0.00019843779142227256, + "loss": 1.1838, + "step": 440 + }, + { + "epoch": 0.08484593416503833, + "grad_norm": 0.44365220556057505, + "learning_rate": 0.00019842679957228704, + "loss": 1.1236, + "step": 441 + }, + { + "epoch": 0.08503832857357584, + "grad_norm": 0.35323008737584244, + "learning_rate": 0.00019841576949446675, + "loss": 1.2028, + "step": 442 + }, + { + "epoch": 0.08523072298211333, + "grad_norm": 0.3065207179608405, + "learning_rate": 0.0001984047011930956, + "loss": 1.1576, + "step": 443 + }, + { + "epoch": 0.08542311739065084, + "grad_norm": 0.337203149463998, + "learning_rate": 0.00019839359467247242, + "loss": 1.0646, + "step": 444 + }, + { + "epoch": 0.08561551179918833, + "grad_norm": 0.3237553476549483, + "learning_rate": 0.0001983824499369109, + "loss": 1.1432, + "step": 445 + }, + { + "epoch": 0.08580790620772584, + "grad_norm": 0.38487437473167235, + "learning_rate": 0.00019837126699073947, + "loss": 1.2304, + "step": 446 + }, + { + "epoch": 0.08600030061626333, + "grad_norm": 0.38638435218236167, + "learning_rate": 0.00019836004583830146, + "loss": 1.1217, + "step": 447 + }, + { + "epoch": 0.08619269502480084, + "grad_norm": 0.4032554737655603, + "learning_rate": 0.00019834878648395505, + "loss": 1.2968, + "step": 448 + }, + { + "epoch": 0.08638508943333835, + "grad_norm": 0.3920953735181232, + "learning_rate": 0.00019833748893207325, + "loss": 1.1408, + "step": 449 + }, + { + "epoch": 0.08657748384187584, + "grad_norm": 0.3308035192207735, + "learning_rate": 0.00019832615318704389, + "loss": 1.2439, + "step": 450 + }, + { + "epoch": 0.08676987825041335, + "grad_norm": 0.3162262954393446, + "learning_rate": 0.00019831477925326963, + "loss": 1.1697, + "step": 451 + }, + { + "epoch": 0.08696227265895085, + "grad_norm": 0.33534237222552854, + "learning_rate": 0.00019830336713516799, + "loss": 1.1795, + "step": 452 + }, + { + "epoch": 0.08715466706748835, + "grad_norm": 0.3577028535782865, + "learning_rate": 0.00019829191683717133, + "loss": 1.1538, + "step": 453 + }, + { + "epoch": 0.08734706147602585, + "grad_norm": 0.39848663571777154, + "learning_rate": 0.00019828042836372677, + "loss": 1.1412, + "step": 454 + }, + { + "epoch": 0.08753945588456336, + "grad_norm": 0.3876119411442906, + "learning_rate": 0.00019826890171929632, + "loss": 1.1539, + "step": 455 + }, + { + "epoch": 0.08773185029310086, + "grad_norm": 0.29051765041640587, + "learning_rate": 0.00019825733690835679, + "loss": 1.0924, + "step": 456 + }, + { + "epoch": 0.08792424470163836, + "grad_norm": 0.27391860563289705, + "learning_rate": 0.00019824573393539984, + "loss": 1.1334, + "step": 457 + }, + { + "epoch": 0.08811663911017587, + "grad_norm": 0.33462249053807647, + "learning_rate": 0.0001982340928049319, + "loss": 1.125, + "step": 458 + }, + { + "epoch": 0.08830903351871336, + "grad_norm": 0.25589837840019536, + "learning_rate": 0.00019822241352147427, + "loss": 1.1796, + "step": 459 + }, + { + "epoch": 0.08850142792725087, + "grad_norm": 0.3066481549327959, + "learning_rate": 0.00019821069608956307, + "loss": 1.2327, + "step": 460 + }, + { + "epoch": 0.08869382233578836, + "grad_norm": 0.36509002454342965, + "learning_rate": 0.00019819894051374915, + "loss": 1.2575, + "step": 461 + }, + { + "epoch": 0.08888621674432587, + "grad_norm": 0.5428321934587285, + "learning_rate": 0.0001981871467985983, + "loss": 1.1804, + "step": 462 + }, + { + "epoch": 0.08907861115286338, + "grad_norm": 0.44837883703400355, + "learning_rate": 0.00019817531494869105, + "loss": 1.2685, + "step": 463 + }, + { + "epoch": 0.08927100556140087, + "grad_norm": 0.42577528440150764, + "learning_rate": 0.00019816344496862272, + "loss": 1.1823, + "step": 464 + }, + { + "epoch": 0.08946339996993838, + "grad_norm": 0.5104164491806513, + "learning_rate": 0.00019815153686300352, + "loss": 1.1065, + "step": 465 + }, + { + "epoch": 0.08965579437847587, + "grad_norm": 0.3073359811722275, + "learning_rate": 0.0001981395906364584, + "loss": 1.2454, + "step": 466 + }, + { + "epoch": 0.08984818878701338, + "grad_norm": 0.3164709627212633, + "learning_rate": 0.00019812760629362716, + "loss": 1.2492, + "step": 467 + }, + { + "epoch": 0.09004058319555087, + "grad_norm": 0.3025378925163634, + "learning_rate": 0.0001981155838391643, + "loss": 1.2217, + "step": 468 + }, + { + "epoch": 0.09023297760408838, + "grad_norm": 0.3857186130510256, + "learning_rate": 0.00019810352327773935, + "loss": 1.1601, + "step": 469 + }, + { + "epoch": 0.09042537201262588, + "grad_norm": 0.35805603980179956, + "learning_rate": 0.00019809142461403633, + "loss": 1.1157, + "step": 470 + }, + { + "epoch": 0.09061776642116338, + "grad_norm": 0.36761443970418983, + "learning_rate": 0.00019807928785275434, + "loss": 1.2144, + "step": 471 + }, + { + "epoch": 0.09081016082970089, + "grad_norm": 0.36531426496163355, + "learning_rate": 0.0001980671129986071, + "loss": 1.0471, + "step": 472 + }, + { + "epoch": 0.09100255523823839, + "grad_norm": 0.341026820881479, + "learning_rate": 0.0001980549000563232, + "loss": 1.1444, + "step": 473 + }, + { + "epoch": 0.0911949496467759, + "grad_norm": 0.2916944170383586, + "learning_rate": 0.000198042649030646, + "loss": 1.1742, + "step": 474 + }, + { + "epoch": 0.09138734405531339, + "grad_norm": 0.32111097544336353, + "learning_rate": 0.00019803035992633366, + "loss": 1.2338, + "step": 475 + }, + { + "epoch": 0.0915797384638509, + "grad_norm": 0.4397000646582326, + "learning_rate": 0.00019801803274815917, + "loss": 1.13, + "step": 476 + }, + { + "epoch": 0.09177213287238839, + "grad_norm": 0.33090615106393406, + "learning_rate": 0.00019800566750091016, + "loss": 1.164, + "step": 477 + }, + { + "epoch": 0.0919645272809259, + "grad_norm": 0.31828284126092543, + "learning_rate": 0.00019799326418938924, + "loss": 1.1465, + "step": 478 + }, + { + "epoch": 0.0921569216894634, + "grad_norm": 0.4274700881284667, + "learning_rate": 0.0001979808228184137, + "loss": 1.1628, + "step": 479 + }, + { + "epoch": 0.0923493160980009, + "grad_norm": 0.3100930905766764, + "learning_rate": 0.0001979683433928156, + "loss": 1.2112, + "step": 480 + }, + { + "epoch": 0.09254171050653841, + "grad_norm": 0.3447305086343119, + "learning_rate": 0.0001979558259174418, + "loss": 1.1347, + "step": 481 + }, + { + "epoch": 0.0927341049150759, + "grad_norm": 0.3096479609034406, + "learning_rate": 0.00019794327039715395, + "loss": 1.1943, + "step": 482 + }, + { + "epoch": 0.09292649932361341, + "grad_norm": 0.3860457764883986, + "learning_rate": 0.0001979306768368285, + "loss": 1.1265, + "step": 483 + }, + { + "epoch": 0.0931188937321509, + "grad_norm": 0.37478427017509097, + "learning_rate": 0.0001979180452413566, + "loss": 1.1363, + "step": 484 + }, + { + "epoch": 0.09331128814068841, + "grad_norm": 0.3416737051733567, + "learning_rate": 0.00019790537561564428, + "loss": 1.1063, + "step": 485 + }, + { + "epoch": 0.09350368254922592, + "grad_norm": 0.33694559127844387, + "learning_rate": 0.00019789266796461222, + "loss": 1.1177, + "step": 486 + }, + { + "epoch": 0.09369607695776341, + "grad_norm": 0.34464276393453, + "learning_rate": 0.00019787992229319592, + "loss": 1.1136, + "step": 487 + }, + { + "epoch": 0.09388847136630092, + "grad_norm": 0.27502450360601083, + "learning_rate": 0.00019786713860634567, + "loss": 1.1176, + "step": 488 + }, + { + "epoch": 0.09408086577483842, + "grad_norm": 0.29014015439601665, + "learning_rate": 0.00019785431690902652, + "loss": 1.1984, + "step": 489 + }, + { + "epoch": 0.09427326018337592, + "grad_norm": 0.27825877470122073, + "learning_rate": 0.00019784145720621826, + "loss": 1.2074, + "step": 490 + }, + { + "epoch": 0.09446565459191342, + "grad_norm": 0.42605752272778963, + "learning_rate": 0.00019782855950291542, + "loss": 1.2561, + "step": 491 + }, + { + "epoch": 0.09465804900045093, + "grad_norm": 0.36320082065467807, + "learning_rate": 0.0001978156238041274, + "loss": 1.1957, + "step": 492 + }, + { + "epoch": 0.09485044340898843, + "grad_norm": 0.3485812315935624, + "learning_rate": 0.0001978026501148782, + "loss": 1.2829, + "step": 493 + }, + { + "epoch": 0.09504283781752593, + "grad_norm": 0.37100459131545566, + "learning_rate": 0.00019778963844020665, + "loss": 1.2091, + "step": 494 + }, + { + "epoch": 0.09523523222606343, + "grad_norm": 0.3254372234617211, + "learning_rate": 0.00019777658878516639, + "loss": 1.053, + "step": 495 + }, + { + "epoch": 0.09542762663460093, + "grad_norm": 0.3382921218252847, + "learning_rate": 0.0001977635011548257, + "loss": 1.2029, + "step": 496 + }, + { + "epoch": 0.09562002104313844, + "grad_norm": 0.33624752210662856, + "learning_rate": 0.0001977503755542677, + "loss": 1.1296, + "step": 497 + }, + { + "epoch": 0.09581241545167593, + "grad_norm": 0.29275302223924876, + "learning_rate": 0.00019773721198859022, + "loss": 1.1702, + "step": 498 + }, + { + "epoch": 0.09600480986021344, + "grad_norm": 0.38733623412226326, + "learning_rate": 0.00019772401046290586, + "loss": 1.1747, + "step": 499 + }, + { + "epoch": 0.09619720426875093, + "grad_norm": 0.32684777724574576, + "learning_rate": 0.00019771077098234186, + "loss": 1.1797, + "step": 500 + }, + { + "epoch": 0.09638959867728844, + "grad_norm": 0.3707423508963498, + "learning_rate": 0.00019769749355204032, + "loss": 1.2258, + "step": 501 + }, + { + "epoch": 0.09658199308582595, + "grad_norm": 0.3035231035025504, + "learning_rate": 0.00019768417817715809, + "loss": 1.2894, + "step": 502 + }, + { + "epoch": 0.09677438749436344, + "grad_norm": 0.3470316433153745, + "learning_rate": 0.00019767082486286665, + "loss": 1.1925, + "step": 503 + }, + { + "epoch": 0.09696678190290095, + "grad_norm": 0.389327656778959, + "learning_rate": 0.0001976574336143523, + "loss": 1.1069, + "step": 504 + }, + { + "epoch": 0.09715917631143844, + "grad_norm": 0.26923590638768, + "learning_rate": 0.00019764400443681606, + "loss": 1.2889, + "step": 505 + }, + { + "epoch": 0.09735157071997595, + "grad_norm": 0.32311857762032764, + "learning_rate": 0.00019763053733547366, + "loss": 1.2084, + "step": 506 + }, + { + "epoch": 0.09754396512851345, + "grad_norm": 0.2721190435899251, + "learning_rate": 0.0001976170323155555, + "loss": 1.0824, + "step": 507 + }, + { + "epoch": 0.09773635953705095, + "grad_norm": 0.2973336404264234, + "learning_rate": 0.0001976034893823069, + "loss": 1.1494, + "step": 508 + }, + { + "epoch": 0.09792875394558846, + "grad_norm": 0.29675934264749443, + "learning_rate": 0.0001975899085409876, + "loss": 1.1447, + "step": 509 + }, + { + "epoch": 0.09812114835412596, + "grad_norm": 0.32625734126550004, + "learning_rate": 0.00019757628979687246, + "loss": 1.2385, + "step": 510 + }, + { + "epoch": 0.09831354276266346, + "grad_norm": 0.33597951594219033, + "learning_rate": 0.0001975626331552507, + "loss": 1.1841, + "step": 511 + }, + { + "epoch": 0.09850593717120096, + "grad_norm": 0.3328618992626306, + "learning_rate": 0.00019754893862142643, + "loss": 1.1188, + "step": 512 + }, + { + "epoch": 0.09869833157973847, + "grad_norm": 0.2718437554018281, + "learning_rate": 0.00019753520620071843, + "loss": 1.1639, + "step": 513 + }, + { + "epoch": 0.09889072598827596, + "grad_norm": 0.3453880777439029, + "learning_rate": 0.0001975214358984603, + "loss": 1.1144, + "step": 514 + }, + { + "epoch": 0.09908312039681347, + "grad_norm": 0.37262831962128673, + "learning_rate": 0.00019750762772000014, + "loss": 1.2572, + "step": 515 + }, + { + "epoch": 0.09927551480535098, + "grad_norm": 0.3214984257975183, + "learning_rate": 0.000197493781670701, + "loss": 1.1261, + "step": 516 + }, + { + "epoch": 0.09946790921388847, + "grad_norm": 0.2928973383182354, + "learning_rate": 0.00019747989775594044, + "loss": 1.2205, + "step": 517 + }, + { + "epoch": 0.09966030362242598, + "grad_norm": 0.284566191322515, + "learning_rate": 0.0001974659759811109, + "loss": 1.2398, + "step": 518 + }, + { + "epoch": 0.09985269803096347, + "grad_norm": 0.4321206416751429, + "learning_rate": 0.00019745201635161936, + "loss": 1.2418, + "step": 519 + }, + { + "epoch": 0.10004509243950098, + "grad_norm": 0.35565386440658175, + "learning_rate": 0.00019743801887288763, + "loss": 1.1612, + "step": 520 + }, + { + "epoch": 0.10023748684803847, + "grad_norm": 0.37881504999540855, + "learning_rate": 0.0001974239835503521, + "loss": 1.2318, + "step": 521 + }, + { + "epoch": 0.10042988125657598, + "grad_norm": 0.2980709180313274, + "learning_rate": 0.00019740991038946404, + "loss": 1.1698, + "step": 522 + }, + { + "epoch": 0.10062227566511349, + "grad_norm": 0.27872265876760344, + "learning_rate": 0.0001973957993956892, + "loss": 1.2114, + "step": 523 + }, + { + "epoch": 0.10081467007365098, + "grad_norm": 0.34523418964020874, + "learning_rate": 0.00019738165057450816, + "loss": 1.1714, + "step": 524 + }, + { + "epoch": 0.10100706448218849, + "grad_norm": 0.2934710661951568, + "learning_rate": 0.00019736746393141617, + "loss": 1.1414, + "step": 525 + }, + { + "epoch": 0.10119945889072599, + "grad_norm": 0.36189734017239755, + "learning_rate": 0.00019735323947192316, + "loss": 1.1254, + "step": 526 + }, + { + "epoch": 0.10139185329926349, + "grad_norm": 0.30398986072937584, + "learning_rate": 0.00019733897720155375, + "loss": 1.1203, + "step": 527 + }, + { + "epoch": 0.10158424770780099, + "grad_norm": 0.31062230964476817, + "learning_rate": 0.00019732467712584722, + "loss": 1.2721, + "step": 528 + }, + { + "epoch": 0.1017766421163385, + "grad_norm": 0.38555634475514416, + "learning_rate": 0.0001973103392503576, + "loss": 1.1986, + "step": 529 + }, + { + "epoch": 0.10196903652487599, + "grad_norm": 0.30085992478038925, + "learning_rate": 0.00019729596358065345, + "loss": 1.1228, + "step": 530 + }, + { + "epoch": 0.1021614309334135, + "grad_norm": 0.36534253873049205, + "learning_rate": 0.00019728155012231825, + "loss": 1.2129, + "step": 531 + }, + { + "epoch": 0.102353825341951, + "grad_norm": 0.33291541665040153, + "learning_rate": 0.00019726709888094992, + "loss": 1.1892, + "step": 532 + }, + { + "epoch": 0.1025462197504885, + "grad_norm": 0.31412881601436915, + "learning_rate": 0.0001972526098621612, + "loss": 1.1958, + "step": 533 + }, + { + "epoch": 0.102738614159026, + "grad_norm": 0.32320825465482145, + "learning_rate": 0.00019723808307157948, + "loss": 1.2098, + "step": 534 + }, + { + "epoch": 0.1029310085675635, + "grad_norm": 0.304629295771607, + "learning_rate": 0.00019722351851484676, + "loss": 1.203, + "step": 535 + }, + { + "epoch": 0.10312340297610101, + "grad_norm": 0.25790056795714794, + "learning_rate": 0.00019720891619761974, + "loss": 1.1093, + "step": 536 + }, + { + "epoch": 0.1033157973846385, + "grad_norm": 0.31643287793830754, + "learning_rate": 0.0001971942761255698, + "loss": 1.1214, + "step": 537 + }, + { + "epoch": 0.10350819179317601, + "grad_norm": 0.28974558037543857, + "learning_rate": 0.00019717959830438302, + "loss": 1.2127, + "step": 538 + }, + { + "epoch": 0.10370058620171352, + "grad_norm": 0.3166167560445395, + "learning_rate": 0.00019716488273976003, + "loss": 1.1031, + "step": 539 + }, + { + "epoch": 0.10389298061025101, + "grad_norm": 0.3114566629739266, + "learning_rate": 0.0001971501294374162, + "loss": 1.2519, + "step": 540 + }, + { + "epoch": 0.10408537501878852, + "grad_norm": 0.36586917893261767, + "learning_rate": 0.00019713533840308157, + "loss": 1.1902, + "step": 541 + }, + { + "epoch": 0.10427776942732601, + "grad_norm": 0.3068846886376433, + "learning_rate": 0.00019712050964250082, + "loss": 1.1257, + "step": 542 + }, + { + "epoch": 0.10447016383586352, + "grad_norm": 0.2996806635344164, + "learning_rate": 0.00019710564316143323, + "loss": 1.1542, + "step": 543 + }, + { + "epoch": 0.10466255824440102, + "grad_norm": 0.3361726600173983, + "learning_rate": 0.00019709073896565275, + "loss": 1.1917, + "step": 544 + }, + { + "epoch": 0.10485495265293852, + "grad_norm": 0.2661540790748322, + "learning_rate": 0.00019707579706094807, + "loss": 1.1324, + "step": 545 + }, + { + "epoch": 0.10504734706147603, + "grad_norm": 0.29974300350194294, + "learning_rate": 0.0001970608174531224, + "loss": 1.16, + "step": 546 + }, + { + "epoch": 0.10523974147001353, + "grad_norm": 0.40286568801141787, + "learning_rate": 0.0001970458001479937, + "loss": 1.1623, + "step": 547 + }, + { + "epoch": 0.10543213587855103, + "grad_norm": 0.3963793410502087, + "learning_rate": 0.00019703074515139445, + "loss": 1.1597, + "step": 548 + }, + { + "epoch": 0.10562453028708853, + "grad_norm": 0.30374122274040766, + "learning_rate": 0.00019701565246917183, + "loss": 1.1892, + "step": 549 + }, + { + "epoch": 0.10581692469562604, + "grad_norm": 0.34744259445162395, + "learning_rate": 0.00019700052210718777, + "loss": 1.0312, + "step": 550 + }, + { + "epoch": 0.10600931910416353, + "grad_norm": 0.237042250882054, + "learning_rate": 0.00019698535407131862, + "loss": 1.108, + "step": 551 + }, + { + "epoch": 0.10620171351270104, + "grad_norm": 0.295802633280917, + "learning_rate": 0.00019697014836745553, + "loss": 1.068, + "step": 552 + }, + { + "epoch": 0.10639410792123855, + "grad_norm": 0.35869197263976094, + "learning_rate": 0.00019695490500150418, + "loss": 1.125, + "step": 553 + }, + { + "epoch": 0.10658650232977604, + "grad_norm": 0.36099269578333354, + "learning_rate": 0.00019693962397938496, + "loss": 1.1599, + "step": 554 + }, + { + "epoch": 0.10677889673831355, + "grad_norm": 0.3698722082113688, + "learning_rate": 0.00019692430530703282, + "loss": 1.2795, + "step": 555 + }, + { + "epoch": 0.10697129114685104, + "grad_norm": 0.33108172990111345, + "learning_rate": 0.00019690894899039734, + "loss": 1.2043, + "step": 556 + }, + { + "epoch": 0.10716368555538855, + "grad_norm": 0.2739477760052005, + "learning_rate": 0.00019689355503544275, + "loss": 1.2031, + "step": 557 + }, + { + "epoch": 0.10735607996392604, + "grad_norm": 0.2573316852826777, + "learning_rate": 0.0001968781234481479, + "loss": 1.1078, + "step": 558 + }, + { + "epoch": 0.10754847437246355, + "grad_norm": 0.396727594032576, + "learning_rate": 0.00019686265423450624, + "loss": 1.1868, + "step": 559 + }, + { + "epoch": 0.10774086878100106, + "grad_norm": 0.31138858864954155, + "learning_rate": 0.00019684714740052583, + "loss": 1.2124, + "step": 560 + }, + { + "epoch": 0.10793326318953855, + "grad_norm": 0.33749954783121383, + "learning_rate": 0.00019683160295222934, + "loss": 1.0093, + "step": 561 + }, + { + "epoch": 0.10812565759807606, + "grad_norm": 0.30779628174143353, + "learning_rate": 0.00019681602089565402, + "loss": 1.1597, + "step": 562 + }, + { + "epoch": 0.10831805200661355, + "grad_norm": 0.3269447594143558, + "learning_rate": 0.0001968004012368518, + "loss": 1.1063, + "step": 563 + }, + { + "epoch": 0.10851044641515106, + "grad_norm": 0.35023284288802403, + "learning_rate": 0.0001967847439818892, + "loss": 1.2306, + "step": 564 + }, + { + "epoch": 0.10870284082368856, + "grad_norm": 0.3425290709701458, + "learning_rate": 0.00019676904913684727, + "loss": 1.1684, + "step": 565 + }, + { + "epoch": 0.10889523523222606, + "grad_norm": 0.36001084910187836, + "learning_rate": 0.0001967533167078217, + "loss": 1.1282, + "step": 566 + }, + { + "epoch": 0.10908762964076356, + "grad_norm": 0.303046847655797, + "learning_rate": 0.00019673754670092284, + "loss": 1.1031, + "step": 567 + }, + { + "epoch": 0.10928002404930107, + "grad_norm": 0.28181341140110044, + "learning_rate": 0.00019672173912227553, + "loss": 1.2519, + "step": 568 + }, + { + "epoch": 0.10947241845783857, + "grad_norm": 0.3041260099407497, + "learning_rate": 0.0001967058939780193, + "loss": 1.2757, + "step": 569 + }, + { + "epoch": 0.10966481286637607, + "grad_norm": 0.2671824279958905, + "learning_rate": 0.00019669001127430816, + "loss": 1.2552, + "step": 570 + }, + { + "epoch": 0.10985720727491358, + "grad_norm": 0.2687800355481347, + "learning_rate": 0.00019667409101731083, + "loss": 1.1103, + "step": 571 + }, + { + "epoch": 0.11004960168345107, + "grad_norm": 0.3036411946803846, + "learning_rate": 0.0001966581332132105, + "loss": 1.1307, + "step": 572 + }, + { + "epoch": 0.11024199609198858, + "grad_norm": 0.29180830681947484, + "learning_rate": 0.00019664213786820502, + "loss": 1.1453, + "step": 573 + }, + { + "epoch": 0.11043439050052607, + "grad_norm": 0.30847019352865407, + "learning_rate": 0.00019662610498850683, + "loss": 1.2159, + "step": 574 + }, + { + "epoch": 0.11062678490906358, + "grad_norm": 0.3073157926850266, + "learning_rate": 0.00019661003458034285, + "loss": 1.1318, + "step": 575 + }, + { + "epoch": 0.11081917931760109, + "grad_norm": 0.2880595028819985, + "learning_rate": 0.0001965939266499547, + "loss": 1.1767, + "step": 576 + }, + { + "epoch": 0.11101157372613858, + "grad_norm": 0.3128324784161832, + "learning_rate": 0.00019657778120359847, + "loss": 1.1344, + "step": 577 + }, + { + "epoch": 0.11120396813467609, + "grad_norm": 0.34450148018842414, + "learning_rate": 0.0001965615982475449, + "loss": 1.1217, + "step": 578 + }, + { + "epoch": 0.11139636254321358, + "grad_norm": 0.37021048106097726, + "learning_rate": 0.00019654537778807923, + "loss": 1.1722, + "step": 579 + }, + { + "epoch": 0.11158875695175109, + "grad_norm": 0.3221029145705702, + "learning_rate": 0.00019652911983150136, + "loss": 1.2321, + "step": 580 + }, + { + "epoch": 0.11178115136028859, + "grad_norm": 0.29322374483179536, + "learning_rate": 0.0001965128243841256, + "loss": 1.1758, + "step": 581 + }, + { + "epoch": 0.1119735457688261, + "grad_norm": 0.3949924836157765, + "learning_rate": 0.00019649649145228102, + "loss": 1.2365, + "step": 582 + }, + { + "epoch": 0.1121659401773636, + "grad_norm": 0.5050059547697262, + "learning_rate": 0.00019648012104231106, + "loss": 1.1964, + "step": 583 + }, + { + "epoch": 0.1123583345859011, + "grad_norm": 0.35969810608901864, + "learning_rate": 0.00019646371316057383, + "loss": 1.134, + "step": 584 + }, + { + "epoch": 0.1125507289944386, + "grad_norm": 0.27437844541592243, + "learning_rate": 0.00019644726781344195, + "loss": 1.1395, + "step": 585 + }, + { + "epoch": 0.1127431234029761, + "grad_norm": 0.3310514422937076, + "learning_rate": 0.0001964307850073026, + "loss": 1.2128, + "step": 586 + }, + { + "epoch": 0.1129355178115136, + "grad_norm": 0.32108661804456523, + "learning_rate": 0.00019641426474855758, + "loss": 1.1611, + "step": 587 + }, + { + "epoch": 0.1131279122200511, + "grad_norm": 0.294995552222423, + "learning_rate": 0.00019639770704362307, + "loss": 1.2312, + "step": 588 + }, + { + "epoch": 0.11332030662858861, + "grad_norm": 0.2956510671437227, + "learning_rate": 0.00019638111189892993, + "loss": 1.111, + "step": 589 + }, + { + "epoch": 0.11351270103712612, + "grad_norm": 0.31541196367129015, + "learning_rate": 0.00019636447932092353, + "loss": 1.1694, + "step": 590 + }, + { + "epoch": 0.11370509544566361, + "grad_norm": 0.3513652794131551, + "learning_rate": 0.0001963478093160638, + "loss": 1.1923, + "step": 591 + }, + { + "epoch": 0.11389748985420112, + "grad_norm": 0.2766388671785139, + "learning_rate": 0.00019633110189082512, + "loss": 1.2838, + "step": 592 + }, + { + "epoch": 0.11408988426273861, + "grad_norm": 0.24850099821971594, + "learning_rate": 0.0001963143570516965, + "loss": 1.1378, + "step": 593 + }, + { + "epoch": 0.11428227867127612, + "grad_norm": 0.2517680673946296, + "learning_rate": 0.00019629757480518143, + "loss": 1.1493, + "step": 594 + }, + { + "epoch": 0.11447467307981361, + "grad_norm": 0.33744656655923166, + "learning_rate": 0.00019628075515779796, + "loss": 1.1672, + "step": 595 + }, + { + "epoch": 0.11466706748835112, + "grad_norm": 0.3949598405631972, + "learning_rate": 0.0001962638981160786, + "loss": 1.2305, + "step": 596 + }, + { + "epoch": 0.11485946189688861, + "grad_norm": 0.33116982611180057, + "learning_rate": 0.00019624700368657045, + "loss": 1.1266, + "step": 597 + }, + { + "epoch": 0.11505185630542612, + "grad_norm": 0.31943827662655716, + "learning_rate": 0.00019623007187583515, + "loss": 1.1764, + "step": 598 + }, + { + "epoch": 0.11524425071396363, + "grad_norm": 0.30767537653897864, + "learning_rate": 0.0001962131026904488, + "loss": 1.0916, + "step": 599 + }, + { + "epoch": 0.11543664512250112, + "grad_norm": 0.2967990341230032, + "learning_rate": 0.000196196096137002, + "loss": 1.1939, + "step": 600 + }, + { + "epoch": 0.11562903953103863, + "grad_norm": 0.3323779023336841, + "learning_rate": 0.00019617905222209996, + "loss": 1.2729, + "step": 601 + }, + { + "epoch": 0.11582143393957613, + "grad_norm": 0.2892387381043252, + "learning_rate": 0.00019616197095236227, + "loss": 1.1489, + "step": 602 + }, + { + "epoch": 0.11601382834811363, + "grad_norm": 0.3309218093655699, + "learning_rate": 0.00019614485233442316, + "loss": 1.2117, + "step": 603 + }, + { + "epoch": 0.11620622275665113, + "grad_norm": 0.3418986857756515, + "learning_rate": 0.00019612769637493126, + "loss": 1.1968, + "step": 604 + }, + { + "epoch": 0.11639861716518864, + "grad_norm": 0.3075216824816062, + "learning_rate": 0.0001961105030805498, + "loss": 1.0761, + "step": 605 + }, + { + "epoch": 0.11659101157372614, + "grad_norm": 0.3786085859243023, + "learning_rate": 0.00019609327245795642, + "loss": 1.1554, + "step": 606 + }, + { + "epoch": 0.11678340598226364, + "grad_norm": 0.4261893933120443, + "learning_rate": 0.00019607600451384326, + "loss": 1.1284, + "step": 607 + }, + { + "epoch": 0.11697580039080115, + "grad_norm": 0.4337950865116119, + "learning_rate": 0.00019605869925491706, + "loss": 1.1337, + "step": 608 + }, + { + "epoch": 0.11716819479933864, + "grad_norm": 0.2589664058294049, + "learning_rate": 0.00019604135668789896, + "loss": 1.2624, + "step": 609 + }, + { + "epoch": 0.11736058920787615, + "grad_norm": 0.3125044485167045, + "learning_rate": 0.00019602397681952463, + "loss": 1.1433, + "step": 610 + }, + { + "epoch": 0.11755298361641364, + "grad_norm": 0.3132992996532646, + "learning_rate": 0.00019600655965654412, + "loss": 1.1506, + "step": 611 + }, + { + "epoch": 0.11774537802495115, + "grad_norm": 0.3475342829246708, + "learning_rate": 0.00019598910520572218, + "loss": 1.1024, + "step": 612 + }, + { + "epoch": 0.11793777243348866, + "grad_norm": 0.3159575506311138, + "learning_rate": 0.00019597161347383784, + "loss": 1.1873, + "step": 613 + }, + { + "epoch": 0.11813016684202615, + "grad_norm": 0.4186559897421048, + "learning_rate": 0.00019595408446768472, + "loss": 1.1431, + "step": 614 + }, + { + "epoch": 0.11832256125056366, + "grad_norm": 0.4317825982999988, + "learning_rate": 0.00019593651819407084, + "loss": 1.1347, + "step": 615 + }, + { + "epoch": 0.11851495565910115, + "grad_norm": 0.2566486445476346, + "learning_rate": 0.0001959189146598188, + "loss": 1.211, + "step": 616 + }, + { + "epoch": 0.11870735006763866, + "grad_norm": 0.3586375479841208, + "learning_rate": 0.00019590127387176555, + "loss": 1.1637, + "step": 617 + }, + { + "epoch": 0.11889974447617616, + "grad_norm": 0.25657719466271134, + "learning_rate": 0.00019588359583676263, + "loss": 1.1877, + "step": 618 + }, + { + "epoch": 0.11909213888471366, + "grad_norm": 0.31393933921644207, + "learning_rate": 0.00019586588056167594, + "loss": 1.1385, + "step": 619 + }, + { + "epoch": 0.11928453329325117, + "grad_norm": 0.3697433194772041, + "learning_rate": 0.0001958481280533859, + "loss": 1.1338, + "step": 620 + }, + { + "epoch": 0.11947692770178867, + "grad_norm": 0.3487066366400267, + "learning_rate": 0.0001958303383187874, + "loss": 1.2398, + "step": 621 + }, + { + "epoch": 0.11966932211032617, + "grad_norm": 0.3608575916161132, + "learning_rate": 0.00019581251136478972, + "loss": 1.0889, + "step": 622 + }, + { + "epoch": 0.11986171651886367, + "grad_norm": 0.3116804686595518, + "learning_rate": 0.00019579464719831667, + "loss": 1.1536, + "step": 623 + }, + { + "epoch": 0.12005411092740118, + "grad_norm": 0.2934512229988681, + "learning_rate": 0.00019577674582630652, + "loss": 1.1699, + "step": 624 + }, + { + "epoch": 0.12024650533593867, + "grad_norm": 0.3360185158130375, + "learning_rate": 0.0001957588072557119, + "loss": 1.1948, + "step": 625 + }, + { + "epoch": 0.12043889974447618, + "grad_norm": 0.2714327580965586, + "learning_rate": 0.0001957408314935, + "loss": 1.213, + "step": 626 + }, + { + "epoch": 0.12063129415301367, + "grad_norm": 0.3423978741298657, + "learning_rate": 0.00019572281854665234, + "loss": 0.9985, + "step": 627 + }, + { + "epoch": 0.12082368856155118, + "grad_norm": 0.31276118810174985, + "learning_rate": 0.00019570476842216498, + "loss": 1.2471, + "step": 628 + }, + { + "epoch": 0.12101608297008869, + "grad_norm": 0.40709474999843404, + "learning_rate": 0.00019568668112704838, + "loss": 1.2399, + "step": 629 + }, + { + "epoch": 0.12120847737862618, + "grad_norm": 0.3063475832949149, + "learning_rate": 0.00019566855666832743, + "loss": 1.1734, + "step": 630 + }, + { + "epoch": 0.12140087178716369, + "grad_norm": 0.45420817847126915, + "learning_rate": 0.00019565039505304145, + "loss": 1.2326, + "step": 631 + }, + { + "epoch": 0.12159326619570118, + "grad_norm": 0.3263169978266844, + "learning_rate": 0.0001956321962882442, + "loss": 1.1265, + "step": 632 + }, + { + "epoch": 0.12178566060423869, + "grad_norm": 0.41784651430485065, + "learning_rate": 0.0001956139603810039, + "loss": 1.218, + "step": 633 + }, + { + "epoch": 0.12197805501277618, + "grad_norm": 0.2917325675032357, + "learning_rate": 0.00019559568733840314, + "loss": 1.2036, + "step": 634 + }, + { + "epoch": 0.12217044942131369, + "grad_norm": 0.31376546729484533, + "learning_rate": 0.00019557737716753896, + "loss": 1.1802, + "step": 635 + }, + { + "epoch": 0.1223628438298512, + "grad_norm": 0.3082219008645867, + "learning_rate": 0.00019555902987552283, + "loss": 1.2094, + "step": 636 + }, + { + "epoch": 0.1225552382383887, + "grad_norm": 0.39551471644425645, + "learning_rate": 0.00019554064546948064, + "loss": 1.1182, + "step": 637 + }, + { + "epoch": 0.1227476326469262, + "grad_norm": 0.31166773309736256, + "learning_rate": 0.0001955222239565526, + "loss": 1.2583, + "step": 638 + }, + { + "epoch": 0.1229400270554637, + "grad_norm": 0.29134632055315, + "learning_rate": 0.00019550376534389357, + "loss": 1.1864, + "step": 639 + }, + { + "epoch": 0.1231324214640012, + "grad_norm": 0.27423356896383116, + "learning_rate": 0.00019548526963867252, + "loss": 1.0974, + "step": 640 + }, + { + "epoch": 0.1233248158725387, + "grad_norm": 0.3271884183989453, + "learning_rate": 0.00019546673684807302, + "loss": 1.1921, + "step": 641 + }, + { + "epoch": 0.1235172102810762, + "grad_norm": 0.3109567121565924, + "learning_rate": 0.000195448166979293, + "loss": 1.1702, + "step": 642 + }, + { + "epoch": 0.12370960468961371, + "grad_norm": 0.2667237796300738, + "learning_rate": 0.00019542956003954477, + "loss": 1.137, + "step": 643 + }, + { + "epoch": 0.12390199909815121, + "grad_norm": 0.44318329785914495, + "learning_rate": 0.00019541091603605506, + "loss": 1.1498, + "step": 644 + }, + { + "epoch": 0.12409439350668872, + "grad_norm": 0.31705928261829636, + "learning_rate": 0.000195392234976065, + "loss": 1.2049, + "step": 645 + }, + { + "epoch": 0.12428678791522621, + "grad_norm": 0.2822955984588036, + "learning_rate": 0.00019537351686683003, + "loss": 1.2138, + "step": 646 + }, + { + "epoch": 0.12447918232376372, + "grad_norm": 0.4290138786224797, + "learning_rate": 0.00019535476171562012, + "loss": 1.0149, + "step": 647 + }, + { + "epoch": 0.12467157673230121, + "grad_norm": 0.3224504244698334, + "learning_rate": 0.00019533596952971954, + "loss": 1.1165, + "step": 648 + }, + { + "epoch": 0.12486397114083872, + "grad_norm": 0.35160525473344234, + "learning_rate": 0.00019531714031642696, + "loss": 1.1436, + "step": 649 + }, + { + "epoch": 0.1250563655493762, + "grad_norm": 0.38812851323647324, + "learning_rate": 0.0001952982740830554, + "loss": 1.1181, + "step": 650 + }, + { + "epoch": 0.12524875995791374, + "grad_norm": 0.2837084654576034, + "learning_rate": 0.00019527937083693231, + "loss": 1.2084, + "step": 651 + }, + { + "epoch": 0.12544115436645123, + "grad_norm": 0.2984545559635266, + "learning_rate": 0.0001952604305853995, + "loss": 1.1595, + "step": 652 + }, + { + "epoch": 0.12563354877498872, + "grad_norm": 0.27005753072057015, + "learning_rate": 0.00019524145333581317, + "loss": 1.1116, + "step": 653 + }, + { + "epoch": 0.12582594318352622, + "grad_norm": 0.35325056392352405, + "learning_rate": 0.00019522243909554377, + "loss": 1.0745, + "step": 654 + }, + { + "epoch": 0.12601833759206374, + "grad_norm": 0.32629179491927846, + "learning_rate": 0.00019520338787197629, + "loss": 1.1742, + "step": 655 + }, + { + "epoch": 0.12621073200060123, + "grad_norm": 0.33635967732822475, + "learning_rate": 0.00019518429967251, + "loss": 1.2089, + "step": 656 + }, + { + "epoch": 0.12640312640913873, + "grad_norm": 0.2858003747033703, + "learning_rate": 0.00019516517450455853, + "loss": 1.2316, + "step": 657 + }, + { + "epoch": 0.12659552081767625, + "grad_norm": 0.3409332228422794, + "learning_rate": 0.00019514601237554988, + "loss": 1.2583, + "step": 658 + }, + { + "epoch": 0.12678791522621374, + "grad_norm": 0.4402773693540164, + "learning_rate": 0.00019512681329292636, + "loss": 1.2327, + "step": 659 + }, + { + "epoch": 0.12698030963475124, + "grad_norm": 0.3900372680219423, + "learning_rate": 0.00019510757726414472, + "loss": 1.2713, + "step": 660 + }, + { + "epoch": 0.12717270404328873, + "grad_norm": 0.4246181197809418, + "learning_rate": 0.000195088304296676, + "loss": 1.1656, + "step": 661 + }, + { + "epoch": 0.12736509845182625, + "grad_norm": 0.31519949057608326, + "learning_rate": 0.00019506899439800557, + "loss": 1.1441, + "step": 662 + }, + { + "epoch": 0.12755749286036375, + "grad_norm": 0.35574730499498314, + "learning_rate": 0.0001950496475756332, + "loss": 1.1566, + "step": 663 + }, + { + "epoch": 0.12774988726890124, + "grad_norm": 0.4397955821474415, + "learning_rate": 0.000195030263837073, + "loss": 1.1647, + "step": 664 + }, + { + "epoch": 0.12794228167743876, + "grad_norm": 0.32612620971918455, + "learning_rate": 0.00019501084318985335, + "loss": 1.1546, + "step": 665 + }, + { + "epoch": 0.12813467608597626, + "grad_norm": 0.26818914422362755, + "learning_rate": 0.000194991385641517, + "loss": 1.2254, + "step": 666 + }, + { + "epoch": 0.12832707049451375, + "grad_norm": 0.2658810327746161, + "learning_rate": 0.00019497189119962105, + "loss": 1.191, + "step": 667 + }, + { + "epoch": 0.12851946490305124, + "grad_norm": 0.3179415955938622, + "learning_rate": 0.00019495235987173693, + "loss": 1.1735, + "step": 668 + }, + { + "epoch": 0.12871185931158877, + "grad_norm": 0.30240306152889646, + "learning_rate": 0.00019493279166545038, + "loss": 1.0778, + "step": 669 + }, + { + "epoch": 0.12890425372012626, + "grad_norm": 0.5190729683524917, + "learning_rate": 0.00019491318658836142, + "loss": 1.0943, + "step": 670 + }, + { + "epoch": 0.12909664812866375, + "grad_norm": 0.34474065356984984, + "learning_rate": 0.0001948935446480845, + "loss": 1.1523, + "step": 671 + }, + { + "epoch": 0.12928904253720128, + "grad_norm": 0.3722781054637813, + "learning_rate": 0.0001948738658522483, + "loss": 1.1652, + "step": 672 + }, + { + "epoch": 0.12948143694573877, + "grad_norm": 0.36891802402904267, + "learning_rate": 0.00019485415020849582, + "loss": 1.1259, + "step": 673 + }, + { + "epoch": 0.12967383135427626, + "grad_norm": 0.2963423815169123, + "learning_rate": 0.0001948343977244844, + "loss": 1.2379, + "step": 674 + }, + { + "epoch": 0.12986622576281376, + "grad_norm": 0.3373200157829875, + "learning_rate": 0.00019481460840788573, + "loss": 1.1569, + "step": 675 + }, + { + "epoch": 0.13005862017135128, + "grad_norm": 0.2519551023236678, + "learning_rate": 0.00019479478226638565, + "loss": 1.187, + "step": 676 + }, + { + "epoch": 0.13025101457988877, + "grad_norm": 0.3597424367996489, + "learning_rate": 0.0001947749193076845, + "loss": 1.1901, + "step": 677 + }, + { + "epoch": 0.13044340898842627, + "grad_norm": 0.25548776537217965, + "learning_rate": 0.00019475501953949672, + "loss": 1.2037, + "step": 678 + }, + { + "epoch": 0.13063580339696376, + "grad_norm": 0.3216285266643529, + "learning_rate": 0.00019473508296955126, + "loss": 1.1111, + "step": 679 + }, + { + "epoch": 0.13082819780550128, + "grad_norm": 0.28443045254975347, + "learning_rate": 0.0001947151096055912, + "loss": 1.1015, + "step": 680 + }, + { + "epoch": 0.13102059221403878, + "grad_norm": 0.3023199152678026, + "learning_rate": 0.00019469509945537397, + "loss": 1.1786, + "step": 681 + }, + { + "epoch": 0.13121298662257627, + "grad_norm": 0.2601298485674686, + "learning_rate": 0.00019467505252667127, + "loss": 1.1047, + "step": 682 + }, + { + "epoch": 0.1314053810311138, + "grad_norm": 0.2677502741737002, + "learning_rate": 0.0001946549688272691, + "loss": 1.1468, + "step": 683 + }, + { + "epoch": 0.1315977754396513, + "grad_norm": 0.2967509161856476, + "learning_rate": 0.0001946348483649678, + "loss": 1.1309, + "step": 684 + }, + { + "epoch": 0.13179016984818878, + "grad_norm": 0.3527712165405441, + "learning_rate": 0.0001946146911475818, + "loss": 1.166, + "step": 685 + }, + { + "epoch": 0.13198256425672628, + "grad_norm": 0.3550092050671919, + "learning_rate": 0.00019459449718294008, + "loss": 1.1108, + "step": 686 + }, + { + "epoch": 0.1321749586652638, + "grad_norm": 0.2724946168937622, + "learning_rate": 0.00019457426647888563, + "loss": 1.1882, + "step": 687 + }, + { + "epoch": 0.1323673530738013, + "grad_norm": 0.33903673003058277, + "learning_rate": 0.00019455399904327585, + "loss": 1.2428, + "step": 688 + }, + { + "epoch": 0.13255974748233879, + "grad_norm": 0.32867101987082065, + "learning_rate": 0.00019453369488398238, + "loss": 1.1282, + "step": 689 + }, + { + "epoch": 0.1327521418908763, + "grad_norm": 0.233291729397467, + "learning_rate": 0.00019451335400889116, + "loss": 1.2656, + "step": 690 + }, + { + "epoch": 0.1329445362994138, + "grad_norm": 0.2632137189381223, + "learning_rate": 0.00019449297642590228, + "loss": 1.1921, + "step": 691 + }, + { + "epoch": 0.1331369307079513, + "grad_norm": 0.3454510077016807, + "learning_rate": 0.00019447256214293025, + "loss": 1.2308, + "step": 692 + }, + { + "epoch": 0.1333293251164888, + "grad_norm": 0.4226280707358105, + "learning_rate": 0.00019445211116790362, + "loss": 1.1702, + "step": 693 + }, + { + "epoch": 0.1335217195250263, + "grad_norm": 0.29712061464132294, + "learning_rate": 0.00019443162350876546, + "loss": 1.2282, + "step": 694 + }, + { + "epoch": 0.1337141139335638, + "grad_norm": 0.389303579201947, + "learning_rate": 0.0001944110991734728, + "loss": 1.15, + "step": 695 + }, + { + "epoch": 0.1339065083421013, + "grad_norm": 0.26901192469594143, + "learning_rate": 0.00019439053816999716, + "loss": 1.1921, + "step": 696 + }, + { + "epoch": 0.13409890275063882, + "grad_norm": 0.25690282098352424, + "learning_rate": 0.00019436994050632414, + "loss": 1.1227, + "step": 697 + }, + { + "epoch": 0.13429129715917631, + "grad_norm": 0.32254783607969956, + "learning_rate": 0.00019434930619045368, + "loss": 1.1631, + "step": 698 + }, + { + "epoch": 0.1344836915677138, + "grad_norm": 0.25296267034031994, + "learning_rate": 0.00019432863523039987, + "loss": 1.1099, + "step": 699 + }, + { + "epoch": 0.1346760859762513, + "grad_norm": 0.31051863649002237, + "learning_rate": 0.00019430792763419107, + "loss": 1.1477, + "step": 700 + }, + { + "epoch": 0.13486848038478882, + "grad_norm": 0.3935551030369805, + "learning_rate": 0.00019428718340986988, + "loss": 1.1295, + "step": 701 + }, + { + "epoch": 0.13506087479332632, + "grad_norm": 0.3334321571060536, + "learning_rate": 0.00019426640256549313, + "loss": 1.1976, + "step": 702 + }, + { + "epoch": 0.1352532692018638, + "grad_norm": 0.3671116292804533, + "learning_rate": 0.00019424558510913186, + "loss": 1.2061, + "step": 703 + }, + { + "epoch": 0.13544566361040133, + "grad_norm": 0.31295502063785, + "learning_rate": 0.00019422473104887134, + "loss": 1.1298, + "step": 704 + }, + { + "epoch": 0.13563805801893883, + "grad_norm": 0.30832991842179336, + "learning_rate": 0.000194203840392811, + "loss": 1.2077, + "step": 705 + }, + { + "epoch": 0.13583045242747632, + "grad_norm": 0.3441228111516066, + "learning_rate": 0.00019418291314906457, + "loss": 1.139, + "step": 706 + }, + { + "epoch": 0.13602284683601382, + "grad_norm": 0.29980184796539466, + "learning_rate": 0.00019416194932576, + "loss": 1.1696, + "step": 707 + }, + { + "epoch": 0.13621524124455134, + "grad_norm": 0.37352958842959627, + "learning_rate": 0.00019414094893103928, + "loss": 1.2371, + "step": 708 + }, + { + "epoch": 0.13640763565308883, + "grad_norm": 0.2993593241103526, + "learning_rate": 0.00019411991197305879, + "loss": 1.1402, + "step": 709 + }, + { + "epoch": 0.13660003006162633, + "grad_norm": 0.30263817504707197, + "learning_rate": 0.00019409883845998904, + "loss": 1.2487, + "step": 710 + }, + { + "epoch": 0.13679242447016385, + "grad_norm": 0.30046276364974284, + "learning_rate": 0.00019407772840001472, + "loss": 1.1926, + "step": 711 + }, + { + "epoch": 0.13698481887870134, + "grad_norm": 0.3057603250342118, + "learning_rate": 0.00019405658180133477, + "loss": 1.2414, + "step": 712 + }, + { + "epoch": 0.13717721328723884, + "grad_norm": 0.3296249849908774, + "learning_rate": 0.00019403539867216224, + "loss": 1.0804, + "step": 713 + }, + { + "epoch": 0.13736960769577633, + "grad_norm": 0.2865861175683603, + "learning_rate": 0.00019401417902072446, + "loss": 1.1768, + "step": 714 + }, + { + "epoch": 0.13756200210431385, + "grad_norm": 0.33154873450288447, + "learning_rate": 0.00019399292285526284, + "loss": 0.9976, + "step": 715 + }, + { + "epoch": 0.13775439651285135, + "grad_norm": 0.3074844848363132, + "learning_rate": 0.00019397163018403308, + "loss": 1.2146, + "step": 716 + }, + { + "epoch": 0.13794679092138884, + "grad_norm": 0.42828725391083877, + "learning_rate": 0.00019395030101530502, + "loss": 1.2077, + "step": 717 + }, + { + "epoch": 0.13813918532992636, + "grad_norm": 0.31963625158099845, + "learning_rate": 0.0001939289353573626, + "loss": 1.217, + "step": 718 + }, + { + "epoch": 0.13833157973846386, + "grad_norm": 0.27900395869668665, + "learning_rate": 0.00019390753321850404, + "loss": 1.1168, + "step": 719 + }, + { + "epoch": 0.13852397414700135, + "grad_norm": 0.3663090546371539, + "learning_rate": 0.00019388609460704168, + "loss": 1.1639, + "step": 720 + }, + { + "epoch": 0.13871636855553884, + "grad_norm": 0.31428494460147827, + "learning_rate": 0.000193864619531302, + "loss": 1.1102, + "step": 721 + }, + { + "epoch": 0.13890876296407637, + "grad_norm": 0.33792751647033115, + "learning_rate": 0.00019384310799962573, + "loss": 1.2313, + "step": 722 + }, + { + "epoch": 0.13910115737261386, + "grad_norm": 0.28551842776909775, + "learning_rate": 0.00019382156002036764, + "loss": 1.0712, + "step": 723 + }, + { + "epoch": 0.13929355178115135, + "grad_norm": 0.341504059854706, + "learning_rate": 0.00019379997560189675, + "loss": 1.1658, + "step": 724 + }, + { + "epoch": 0.13948594618968888, + "grad_norm": 0.35086663643353283, + "learning_rate": 0.0001937783547525962, + "loss": 1.1607, + "step": 725 + }, + { + "epoch": 0.13967834059822637, + "grad_norm": 0.28325989689285075, + "learning_rate": 0.00019375669748086327, + "loss": 1.1789, + "step": 726 + }, + { + "epoch": 0.13987073500676386, + "grad_norm": 0.33350804230008896, + "learning_rate": 0.00019373500379510938, + "loss": 1.0801, + "step": 727 + }, + { + "epoch": 0.14006312941530136, + "grad_norm": 0.32123906832371024, + "learning_rate": 0.00019371327370376016, + "loss": 1.0821, + "step": 728 + }, + { + "epoch": 0.14025552382383888, + "grad_norm": 0.28028449090200813, + "learning_rate": 0.00019369150721525527, + "loss": 1.1797, + "step": 729 + }, + { + "epoch": 0.14044791823237637, + "grad_norm": 0.3075175308222164, + "learning_rate": 0.0001936697043380486, + "loss": 1.0829, + "step": 730 + }, + { + "epoch": 0.14064031264091387, + "grad_norm": 0.3162620198508448, + "learning_rate": 0.00019364786508060808, + "loss": 1.158, + "step": 731 + }, + { + "epoch": 0.1408327070494514, + "grad_norm": 0.41615403042261184, + "learning_rate": 0.0001936259894514159, + "loss": 1.2405, + "step": 732 + }, + { + "epoch": 0.14102510145798888, + "grad_norm": 0.29028201429154643, + "learning_rate": 0.00019360407745896827, + "loss": 1.1307, + "step": 733 + }, + { + "epoch": 0.14121749586652638, + "grad_norm": 0.3093500865259683, + "learning_rate": 0.00019358212911177556, + "loss": 1.1043, + "step": 734 + }, + { + "epoch": 0.14140989027506387, + "grad_norm": 0.29262454000494764, + "learning_rate": 0.0001935601444183622, + "loss": 1.2763, + "step": 735 + }, + { + "epoch": 0.1416022846836014, + "grad_norm": 0.3386241705717509, + "learning_rate": 0.0001935381233872669, + "loss": 1.1684, + "step": 736 + }, + { + "epoch": 0.1417946790921389, + "grad_norm": 0.30740449007229154, + "learning_rate": 0.00019351606602704228, + "loss": 1.0948, + "step": 737 + }, + { + "epoch": 0.14198707350067638, + "grad_norm": 0.33655278237879394, + "learning_rate": 0.0001934939723462552, + "loss": 1.1573, + "step": 738 + }, + { + "epoch": 0.14217946790921387, + "grad_norm": 0.26829855266766456, + "learning_rate": 0.00019347184235348662, + "loss": 1.1148, + "step": 739 + }, + { + "epoch": 0.1423718623177514, + "grad_norm": 0.40228640762909235, + "learning_rate": 0.00019344967605733153, + "loss": 1.0593, + "step": 740 + }, + { + "epoch": 0.1425642567262889, + "grad_norm": 0.3036716964034863, + "learning_rate": 0.0001934274734663991, + "loss": 1.2155, + "step": 741 + }, + { + "epoch": 0.14275665113482638, + "grad_norm": 0.33668244178189094, + "learning_rate": 0.00019340523458931253, + "loss": 1.1548, + "step": 742 + }, + { + "epoch": 0.1429490455433639, + "grad_norm": 0.3056566736084597, + "learning_rate": 0.00019338295943470914, + "loss": 0.9979, + "step": 743 + }, + { + "epoch": 0.1431414399519014, + "grad_norm": 0.3658077888999858, + "learning_rate": 0.00019336064801124035, + "loss": 1.2117, + "step": 744 + }, + { + "epoch": 0.1433338343604389, + "grad_norm": 0.23031350279498447, + "learning_rate": 0.0001933383003275717, + "loss": 1.1979, + "step": 745 + }, + { + "epoch": 0.1435262287689764, + "grad_norm": 0.28872191286584314, + "learning_rate": 0.0001933159163923827, + "loss": 1.1149, + "step": 746 + }, + { + "epoch": 0.1437186231775139, + "grad_norm": 0.2881579427563588, + "learning_rate": 0.00019329349621436708, + "loss": 1.1209, + "step": 747 + }, + { + "epoch": 0.1439110175860514, + "grad_norm": 0.29406143250864686, + "learning_rate": 0.00019327103980223254, + "loss": 1.136, + "step": 748 + }, + { + "epoch": 0.1441034119945889, + "grad_norm": 0.23922482328152583, + "learning_rate": 0.0001932485471647009, + "loss": 1.0635, + "step": 749 + }, + { + "epoch": 0.14429580640312642, + "grad_norm": 0.4006690705666417, + "learning_rate": 0.00019322601831050804, + "loss": 1.0788, + "step": 750 + }, + { + "epoch": 0.1444882008116639, + "grad_norm": 0.24385411707755578, + "learning_rate": 0.00019320345324840395, + "loss": 1.2207, + "step": 751 + }, + { + "epoch": 0.1446805952202014, + "grad_norm": 0.2973876722404917, + "learning_rate": 0.00019318085198715256, + "loss": 1.1887, + "step": 752 + }, + { + "epoch": 0.1448729896287389, + "grad_norm": 0.3161938501268713, + "learning_rate": 0.000193158214535532, + "loss": 1.1368, + "step": 753 + }, + { + "epoch": 0.14506538403727642, + "grad_norm": 0.32930645037368317, + "learning_rate": 0.00019313554090233436, + "loss": 1.113, + "step": 754 + }, + { + "epoch": 0.14525777844581392, + "grad_norm": 0.33772301031487795, + "learning_rate": 0.00019311283109636583, + "loss": 1.1531, + "step": 755 + }, + { + "epoch": 0.1454501728543514, + "grad_norm": 0.3030071615648059, + "learning_rate": 0.00019309008512644667, + "loss": 1.2325, + "step": 756 + }, + { + "epoch": 0.14564256726288893, + "grad_norm": 0.29485903432025434, + "learning_rate": 0.0001930673030014111, + "loss": 1.0776, + "step": 757 + }, + { + "epoch": 0.14583496167142643, + "grad_norm": 0.3342344262001743, + "learning_rate": 0.00019304448473010747, + "loss": 1.0979, + "step": 758 + }, + { + "epoch": 0.14602735607996392, + "grad_norm": 0.3028861389284532, + "learning_rate": 0.00019302163032139814, + "loss": 1.0541, + "step": 759 + }, + { + "epoch": 0.14621975048850142, + "grad_norm": 0.3698971219635232, + "learning_rate": 0.00019299873978415947, + "loss": 1.2273, + "step": 760 + }, + { + "epoch": 0.14641214489703894, + "grad_norm": 0.38111657029746016, + "learning_rate": 0.00019297581312728186, + "loss": 1.0848, + "step": 761 + }, + { + "epoch": 0.14660453930557643, + "grad_norm": 0.3025925274883586, + "learning_rate": 0.0001929528503596698, + "loss": 1.0944, + "step": 762 + }, + { + "epoch": 0.14679693371411393, + "grad_norm": 0.3663075568500179, + "learning_rate": 0.00019292985149024178, + "loss": 1.1709, + "step": 763 + }, + { + "epoch": 0.14698932812265145, + "grad_norm": 0.2983274522938622, + "learning_rate": 0.00019290681652793027, + "loss": 1.0795, + "step": 764 + }, + { + "epoch": 0.14718172253118894, + "grad_norm": 0.3553278650475889, + "learning_rate": 0.0001928837454816818, + "loss": 1.1076, + "step": 765 + }, + { + "epoch": 0.14737411693972644, + "grad_norm": 0.26420511967690036, + "learning_rate": 0.00019286063836045685, + "loss": 1.24, + "step": 766 + }, + { + "epoch": 0.14756651134826393, + "grad_norm": 0.38557410354088034, + "learning_rate": 0.00019283749517322999, + "loss": 1.1849, + "step": 767 + }, + { + "epoch": 0.14775890575680145, + "grad_norm": 0.3733684238269839, + "learning_rate": 0.00019281431592898978, + "loss": 1.0788, + "step": 768 + }, + { + "epoch": 0.14795130016533894, + "grad_norm": 0.3514037785406932, + "learning_rate": 0.0001927911006367388, + "loss": 1.1839, + "step": 769 + }, + { + "epoch": 0.14814369457387644, + "grad_norm": 0.41333244132631397, + "learning_rate": 0.0001927678493054935, + "loss": 1.0934, + "step": 770 + }, + { + "epoch": 0.14833608898241396, + "grad_norm": 0.28091746417692043, + "learning_rate": 0.00019274456194428454, + "loss": 1.2135, + "step": 771 + }, + { + "epoch": 0.14852848339095145, + "grad_norm": 0.33204022967467484, + "learning_rate": 0.0001927212385621564, + "loss": 1.2235, + "step": 772 + }, + { + "epoch": 0.14872087779948895, + "grad_norm": 0.2821537355021002, + "learning_rate": 0.00019269787916816763, + "loss": 1.0979, + "step": 773 + }, + { + "epoch": 0.14891327220802644, + "grad_norm": 0.324873916493364, + "learning_rate": 0.00019267448377139075, + "loss": 1.0753, + "step": 774 + }, + { + "epoch": 0.14910566661656396, + "grad_norm": 0.33646229080038254, + "learning_rate": 0.00019265105238091228, + "loss": 1.1231, + "step": 775 + }, + { + "epoch": 0.14929806102510146, + "grad_norm": 0.30687158590591035, + "learning_rate": 0.00019262758500583263, + "loss": 1.1744, + "step": 776 + }, + { + "epoch": 0.14949045543363895, + "grad_norm": 0.27220475967262675, + "learning_rate": 0.00019260408165526637, + "loss": 1.1274, + "step": 777 + }, + { + "epoch": 0.14968284984217647, + "grad_norm": 0.4015295724178326, + "learning_rate": 0.00019258054233834183, + "loss": 1.1402, + "step": 778 + }, + { + "epoch": 0.14987524425071397, + "grad_norm": 0.38173730816352625, + "learning_rate": 0.00019255696706420148, + "loss": 1.1096, + "step": 779 + }, + { + "epoch": 0.15006763865925146, + "grad_norm": 0.36746617850090224, + "learning_rate": 0.00019253335584200164, + "loss": 1.1574, + "step": 780 + }, + { + "epoch": 0.15026003306778896, + "grad_norm": 0.39411538039792743, + "learning_rate": 0.00019250970868091267, + "loss": 1.1671, + "step": 781 + }, + { + "epoch": 0.15045242747632648, + "grad_norm": 0.2777342437180254, + "learning_rate": 0.00019248602559011882, + "loss": 1.1785, + "step": 782 + }, + { + "epoch": 0.15064482188486397, + "grad_norm": 0.29772234080795185, + "learning_rate": 0.00019246230657881834, + "loss": 1.1733, + "step": 783 + }, + { + "epoch": 0.15083721629340147, + "grad_norm": 0.3734372712775911, + "learning_rate": 0.00019243855165622343, + "loss": 1.1536, + "step": 784 + }, + { + "epoch": 0.151029610701939, + "grad_norm": 0.3132685824888845, + "learning_rate": 0.00019241476083156026, + "loss": 1.1949, + "step": 785 + }, + { + "epoch": 0.15122200511047648, + "grad_norm": 0.30393801252644465, + "learning_rate": 0.00019239093411406886, + "loss": 1.2607, + "step": 786 + }, + { + "epoch": 0.15141439951901398, + "grad_norm": 0.3348326895793918, + "learning_rate": 0.00019236707151300327, + "loss": 1.278, + "step": 787 + }, + { + "epoch": 0.15160679392755147, + "grad_norm": 0.32052908291461146, + "learning_rate": 0.00019234317303763144, + "loss": 1.0974, + "step": 788 + }, + { + "epoch": 0.151799188336089, + "grad_norm": 0.2877592148113061, + "learning_rate": 0.00019231923869723528, + "loss": 1.3036, + "step": 789 + }, + { + "epoch": 0.15199158274462649, + "grad_norm": 0.2890470422306886, + "learning_rate": 0.0001922952685011106, + "loss": 1.0717, + "step": 790 + }, + { + "epoch": 0.15218397715316398, + "grad_norm": 0.24426070968325814, + "learning_rate": 0.00019227126245856716, + "loss": 1.105, + "step": 791 + }, + { + "epoch": 0.1523763715617015, + "grad_norm": 0.3547678254358933, + "learning_rate": 0.00019224722057892862, + "loss": 1.2498, + "step": 792 + }, + { + "epoch": 0.152568765970239, + "grad_norm": 0.34701905568691055, + "learning_rate": 0.00019222314287153255, + "loss": 1.1205, + "step": 793 + }, + { + "epoch": 0.1527611603787765, + "grad_norm": 0.255481892215242, + "learning_rate": 0.00019219902934573048, + "loss": 1.084, + "step": 794 + }, + { + "epoch": 0.15295355478731398, + "grad_norm": 0.34174367123324506, + "learning_rate": 0.00019217488001088784, + "loss": 1.1106, + "step": 795 + }, + { + "epoch": 0.1531459491958515, + "grad_norm": 0.27720599753661185, + "learning_rate": 0.00019215069487638395, + "loss": 1.1615, + "step": 796 + }, + { + "epoch": 0.153338343604389, + "grad_norm": 0.29089081360146063, + "learning_rate": 0.000192126473951612, + "loss": 1.1356, + "step": 797 + }, + { + "epoch": 0.1535307380129265, + "grad_norm": 0.2809112708989489, + "learning_rate": 0.0001921022172459791, + "loss": 1.1027, + "step": 798 + }, + { + "epoch": 0.15372313242146401, + "grad_norm": 0.2597355632766002, + "learning_rate": 0.00019207792476890641, + "loss": 1.0677, + "step": 799 + }, + { + "epoch": 0.1539155268300015, + "grad_norm": 0.33815897510144577, + "learning_rate": 0.00019205359652982868, + "loss": 1.1062, + "step": 800 + }, + { + "epoch": 0.154107921238539, + "grad_norm": 0.34276618435363154, + "learning_rate": 0.0001920292325381948, + "loss": 1.1211, + "step": 801 + }, + { + "epoch": 0.1543003156470765, + "grad_norm": 0.29298351083394736, + "learning_rate": 0.00019200483280346748, + "loss": 1.0546, + "step": 802 + }, + { + "epoch": 0.15449271005561402, + "grad_norm": 0.26131048122503264, + "learning_rate": 0.00019198039733512326, + "loss": 1.1759, + "step": 803 + }, + { + "epoch": 0.1546851044641515, + "grad_norm": 0.32613803501487826, + "learning_rate": 0.00019195592614265261, + "loss": 1.1096, + "step": 804 + }, + { + "epoch": 0.154877498872689, + "grad_norm": 0.4607077594257645, + "learning_rate": 0.00019193141923555984, + "loss": 1.0602, + "step": 805 + }, + { + "epoch": 0.1550698932812265, + "grad_norm": 0.44246263863816204, + "learning_rate": 0.00019190687662336316, + "loss": 1.1464, + "step": 806 + }, + { + "epoch": 0.15526228768976402, + "grad_norm": 0.299767950010681, + "learning_rate": 0.00019188229831559466, + "loss": 1.152, + "step": 807 + }, + { + "epoch": 0.15545468209830152, + "grad_norm": 0.31157228663338715, + "learning_rate": 0.00019185768432180024, + "loss": 1.1159, + "step": 808 + }, + { + "epoch": 0.155647076506839, + "grad_norm": 0.31860625960099725, + "learning_rate": 0.00019183303465153973, + "loss": 1.1988, + "step": 809 + }, + { + "epoch": 0.15583947091537653, + "grad_norm": 0.37974976781254716, + "learning_rate": 0.00019180834931438673, + "loss": 1.1397, + "step": 810 + }, + { + "epoch": 0.15603186532391403, + "grad_norm": 0.27548286783184456, + "learning_rate": 0.00019178362831992878, + "loss": 1.1609, + "step": 811 + }, + { + "epoch": 0.15622425973245152, + "grad_norm": 0.3175285807908584, + "learning_rate": 0.00019175887167776717, + "loss": 1.2606, + "step": 812 + }, + { + "epoch": 0.15641665414098901, + "grad_norm": 0.29071790808442327, + "learning_rate": 0.0001917340793975172, + "loss": 1.0629, + "step": 813 + }, + { + "epoch": 0.15660904854952654, + "grad_norm": 0.324071592911566, + "learning_rate": 0.0001917092514888078, + "loss": 1.1034, + "step": 814 + }, + { + "epoch": 0.15680144295806403, + "grad_norm": 0.32818180341941944, + "learning_rate": 0.00019168438796128193, + "loss": 1.0915, + "step": 815 + }, + { + "epoch": 0.15699383736660152, + "grad_norm": 0.3068324250807365, + "learning_rate": 0.0001916594888245962, + "loss": 1.1302, + "step": 816 + }, + { + "epoch": 0.15718623177513905, + "grad_norm": 0.31647388090551803, + "learning_rate": 0.00019163455408842124, + "loss": 1.0526, + "step": 817 + }, + { + "epoch": 0.15737862618367654, + "grad_norm": 0.28980654638138603, + "learning_rate": 0.00019160958376244136, + "loss": 1.0557, + "step": 818 + }, + { + "epoch": 0.15757102059221403, + "grad_norm": 0.32202286568568234, + "learning_rate": 0.00019158457785635477, + "loss": 1.1345, + "step": 819 + }, + { + "epoch": 0.15776341500075153, + "grad_norm": 0.31924262071093423, + "learning_rate": 0.00019155953637987346, + "loss": 1.1974, + "step": 820 + }, + { + "epoch": 0.15795580940928905, + "grad_norm": 0.4223233232381367, + "learning_rate": 0.0001915344593427233, + "loss": 1.1268, + "step": 821 + }, + { + "epoch": 0.15814820381782654, + "grad_norm": 0.27051746451790504, + "learning_rate": 0.00019150934675464382, + "loss": 1.1094, + "step": 822 + }, + { + "epoch": 0.15834059822636404, + "grad_norm": 0.45168762181674427, + "learning_rate": 0.00019148419862538858, + "loss": 1.1691, + "step": 823 + }, + { + "epoch": 0.15853299263490156, + "grad_norm": 0.31748888324701396, + "learning_rate": 0.00019145901496472472, + "loss": 1.0949, + "step": 824 + }, + { + "epoch": 0.15872538704343905, + "grad_norm": 0.310329531497297, + "learning_rate": 0.00019143379578243335, + "loss": 1.1133, + "step": 825 + }, + { + "epoch": 0.15891778145197655, + "grad_norm": 0.2846016287860886, + "learning_rate": 0.0001914085410883093, + "loss": 1.1618, + "step": 826 + }, + { + "epoch": 0.15911017586051404, + "grad_norm": 0.3284237977185717, + "learning_rate": 0.00019138325089216118, + "loss": 1.1741, + "step": 827 + }, + { + "epoch": 0.15930257026905156, + "grad_norm": 0.2975616368168792, + "learning_rate": 0.00019135792520381142, + "loss": 1.1328, + "step": 828 + }, + { + "epoch": 0.15949496467758906, + "grad_norm": 0.26313851589609943, + "learning_rate": 0.00019133256403309625, + "loss": 1.11, + "step": 829 + }, + { + "epoch": 0.15968735908612655, + "grad_norm": 0.33184440012914745, + "learning_rate": 0.0001913071673898656, + "loss": 1.2214, + "step": 830 + }, + { + "epoch": 0.15987975349466407, + "grad_norm": 0.34785475366882723, + "learning_rate": 0.0001912817352839833, + "loss": 1.1308, + "step": 831 + }, + { + "epoch": 0.16007214790320157, + "grad_norm": 0.3129360536748428, + "learning_rate": 0.00019125626772532683, + "loss": 1.1295, + "step": 832 + }, + { + "epoch": 0.16026454231173906, + "grad_norm": 0.32037099108764544, + "learning_rate": 0.00019123076472378752, + "loss": 1.189, + "step": 833 + }, + { + "epoch": 0.16045693672027656, + "grad_norm": 0.3289132196265171, + "learning_rate": 0.00019120522628927045, + "loss": 1.0475, + "step": 834 + }, + { + "epoch": 0.16064933112881408, + "grad_norm": 0.27198972535598354, + "learning_rate": 0.00019117965243169445, + "loss": 1.0564, + "step": 835 + }, + { + "epoch": 0.16084172553735157, + "grad_norm": 0.3198607890298292, + "learning_rate": 0.00019115404316099213, + "loss": 1.1272, + "step": 836 + }, + { + "epoch": 0.16103411994588906, + "grad_norm": 0.2450658439115008, + "learning_rate": 0.00019112839848710978, + "loss": 1.0965, + "step": 837 + }, + { + "epoch": 0.1612265143544266, + "grad_norm": 0.36560766257746974, + "learning_rate": 0.00019110271842000755, + "loss": 1.1042, + "step": 838 + }, + { + "epoch": 0.16141890876296408, + "grad_norm": 0.28244008536255183, + "learning_rate": 0.00019107700296965927, + "loss": 1.216, + "step": 839 + }, + { + "epoch": 0.16161130317150157, + "grad_norm": 0.3797374199336646, + "learning_rate": 0.0001910512521460525, + "loss": 1.1358, + "step": 840 + }, + { + "epoch": 0.16180369758003907, + "grad_norm": 0.2863827331712721, + "learning_rate": 0.00019102546595918857, + "loss": 1.1834, + "step": 841 + }, + { + "epoch": 0.1619960919885766, + "grad_norm": 0.44116663877154816, + "learning_rate": 0.00019099964441908257, + "loss": 1.1378, + "step": 842 + }, + { + "epoch": 0.16218848639711408, + "grad_norm": 0.3182949726587924, + "learning_rate": 0.00019097378753576324, + "loss": 1.1621, + "step": 843 + }, + { + "epoch": 0.16238088080565158, + "grad_norm": 0.29900025684267784, + "learning_rate": 0.00019094789531927316, + "loss": 1.0422, + "step": 844 + }, + { + "epoch": 0.1625732752141891, + "grad_norm": 0.3871378818101089, + "learning_rate": 0.00019092196777966848, + "loss": 1.1545, + "step": 845 + }, + { + "epoch": 0.1627656696227266, + "grad_norm": 0.2894517762882134, + "learning_rate": 0.00019089600492701924, + "loss": 1.1463, + "step": 846 + }, + { + "epoch": 0.1629580640312641, + "grad_norm": 0.34456572840821104, + "learning_rate": 0.00019087000677140908, + "loss": 1.1074, + "step": 847 + }, + { + "epoch": 0.16315045843980158, + "grad_norm": 0.2641773341362384, + "learning_rate": 0.00019084397332293538, + "loss": 1.1674, + "step": 848 + }, + { + "epoch": 0.1633428528483391, + "grad_norm": 0.46450522629757435, + "learning_rate": 0.00019081790459170926, + "loss": 1.0286, + "step": 849 + }, + { + "epoch": 0.1635352472568766, + "grad_norm": 0.27296123065945493, + "learning_rate": 0.00019079180058785547, + "loss": 1.1109, + "step": 850 + }, + { + "epoch": 0.1637276416654141, + "grad_norm": 0.3531101160607227, + "learning_rate": 0.00019076566132151252, + "loss": 1.1079, + "step": 851 + }, + { + "epoch": 0.1639200360739516, + "grad_norm": 0.31451057559755285, + "learning_rate": 0.0001907394868028326, + "loss": 1.1573, + "step": 852 + }, + { + "epoch": 0.1641124304824891, + "grad_norm": 0.26160877730349263, + "learning_rate": 0.0001907132770419816, + "loss": 1.1183, + "step": 853 + }, + { + "epoch": 0.1643048248910266, + "grad_norm": 0.2687391961006805, + "learning_rate": 0.0001906870320491391, + "loss": 1.1199, + "step": 854 + }, + { + "epoch": 0.1644972192995641, + "grad_norm": 0.3580615465083372, + "learning_rate": 0.00019066075183449835, + "loss": 1.0355, + "step": 855 + }, + { + "epoch": 0.16468961370810162, + "grad_norm": 0.3155966375753538, + "learning_rate": 0.00019063443640826623, + "loss": 1.0485, + "step": 856 + }, + { + "epoch": 0.1648820081166391, + "grad_norm": 0.3169001927767128, + "learning_rate": 0.0001906080857806634, + "loss": 1.2239, + "step": 857 + }, + { + "epoch": 0.1650744025251766, + "grad_norm": 0.42209780972140887, + "learning_rate": 0.00019058169996192417, + "loss": 1.1579, + "step": 858 + }, + { + "epoch": 0.16526679693371413, + "grad_norm": 0.3491712473462913, + "learning_rate": 0.00019055527896229643, + "loss": 1.1395, + "step": 859 + }, + { + "epoch": 0.16545919134225162, + "grad_norm": 0.26242186175294313, + "learning_rate": 0.00019052882279204178, + "loss": 1.2278, + "step": 860 + }, + { + "epoch": 0.16565158575078912, + "grad_norm": 0.28129617812226465, + "learning_rate": 0.00019050233146143556, + "loss": 1.127, + "step": 861 + }, + { + "epoch": 0.1658439801593266, + "grad_norm": 0.3579369959705776, + "learning_rate": 0.00019047580498076662, + "loss": 1.1962, + "step": 862 + }, + { + "epoch": 0.16603637456786413, + "grad_norm": 0.2963805299448607, + "learning_rate": 0.00019044924336033758, + "loss": 1.1414, + "step": 863 + }, + { + "epoch": 0.16622876897640163, + "grad_norm": 0.3604448636331328, + "learning_rate": 0.0001904226466104647, + "loss": 1.0947, + "step": 864 + }, + { + "epoch": 0.16642116338493912, + "grad_norm": 0.3041402934601379, + "learning_rate": 0.0001903960147414778, + "loss": 1.0996, + "step": 865 + }, + { + "epoch": 0.1666135577934766, + "grad_norm": 0.3301423609008923, + "learning_rate": 0.0001903693477637204, + "loss": 1.12, + "step": 866 + }, + { + "epoch": 0.16680595220201413, + "grad_norm": 0.34247370181151415, + "learning_rate": 0.00019034264568754968, + "loss": 1.2024, + "step": 867 + }, + { + "epoch": 0.16699834661055163, + "grad_norm": 0.30221363752669755, + "learning_rate": 0.00019031590852333635, + "loss": 1.1315, + "step": 868 + }, + { + "epoch": 0.16719074101908912, + "grad_norm": 0.2861518235079073, + "learning_rate": 0.00019028913628146488, + "loss": 1.1671, + "step": 869 + }, + { + "epoch": 0.16738313542762664, + "grad_norm": 0.27077370903842124, + "learning_rate": 0.00019026232897233328, + "loss": 1.1831, + "step": 870 + }, + { + "epoch": 0.16757552983616414, + "grad_norm": 0.2416018295611357, + "learning_rate": 0.0001902354866063532, + "loss": 1.1248, + "step": 871 + }, + { + "epoch": 0.16776792424470163, + "grad_norm": 0.3659243145736444, + "learning_rate": 0.0001902086091939499, + "loss": 1.1242, + "step": 872 + }, + { + "epoch": 0.16796031865323913, + "grad_norm": 0.3676593771940267, + "learning_rate": 0.00019018169674556227, + "loss": 1.2159, + "step": 873 + }, + { + "epoch": 0.16815271306177665, + "grad_norm": 0.38525812623930245, + "learning_rate": 0.0001901547492716428, + "loss": 1.0718, + "step": 874 + }, + { + "epoch": 0.16834510747031414, + "grad_norm": 0.321411107035241, + "learning_rate": 0.00019012776678265756, + "loss": 1.1952, + "step": 875 + }, + { + "epoch": 0.16853750187885164, + "grad_norm": 0.33528943639128383, + "learning_rate": 0.00019010074928908622, + "loss": 1.1034, + "step": 876 + }, + { + "epoch": 0.16872989628738916, + "grad_norm": 0.3329529201785243, + "learning_rate": 0.0001900736968014221, + "loss": 1.1608, + "step": 877 + }, + { + "epoch": 0.16892229069592665, + "grad_norm": 0.4191008002324991, + "learning_rate": 0.0001900466093301721, + "loss": 1.2329, + "step": 878 + }, + { + "epoch": 0.16911468510446415, + "grad_norm": 0.3069985978622253, + "learning_rate": 0.0001900194868858566, + "loss": 1.112, + "step": 879 + }, + { + "epoch": 0.16930707951300164, + "grad_norm": 0.3264232356759452, + "learning_rate": 0.0001899923294790097, + "loss": 1.176, + "step": 880 + }, + { + "epoch": 0.16949947392153916, + "grad_norm": 0.2852300817259893, + "learning_rate": 0.00018996513712017898, + "loss": 1.1319, + "step": 881 + }, + { + "epoch": 0.16969186833007666, + "grad_norm": 0.3794771500255884, + "learning_rate": 0.0001899379098199257, + "loss": 1.1355, + "step": 882 + }, + { + "epoch": 0.16988426273861415, + "grad_norm": 0.30199788724423193, + "learning_rate": 0.0001899106475888246, + "loss": 1.1564, + "step": 883 + }, + { + "epoch": 0.17007665714715167, + "grad_norm": 0.2587179613264272, + "learning_rate": 0.00018988335043746403, + "loss": 1.1544, + "step": 884 + }, + { + "epoch": 0.17026905155568917, + "grad_norm": 0.24636680815062553, + "learning_rate": 0.00018985601837644587, + "loss": 1.1943, + "step": 885 + }, + { + "epoch": 0.17046144596422666, + "grad_norm": 0.28949769006470716, + "learning_rate": 0.00018982865141638558, + "loss": 1.119, + "step": 886 + }, + { + "epoch": 0.17065384037276415, + "grad_norm": 0.2962030770378514, + "learning_rate": 0.00018980124956791215, + "loss": 1.1325, + "step": 887 + }, + { + "epoch": 0.17084623478130168, + "grad_norm": 0.28474813221509665, + "learning_rate": 0.00018977381284166817, + "loss": 1.0596, + "step": 888 + }, + { + "epoch": 0.17103862918983917, + "grad_norm": 0.377399006385986, + "learning_rate": 0.00018974634124830977, + "loss": 1.0844, + "step": 889 + }, + { + "epoch": 0.17123102359837666, + "grad_norm": 0.2566488000812951, + "learning_rate": 0.0001897188347985066, + "loss": 1.141, + "step": 890 + }, + { + "epoch": 0.17142341800691419, + "grad_norm": 0.2895835063247774, + "learning_rate": 0.00018969129350294178, + "loss": 1.1985, + "step": 891 + }, + { + "epoch": 0.17161581241545168, + "grad_norm": 0.3035037609978652, + "learning_rate": 0.00018966371737231207, + "loss": 1.1131, + "step": 892 + }, + { + "epoch": 0.17180820682398917, + "grad_norm": 0.2587965640604509, + "learning_rate": 0.00018963610641732777, + "loss": 1.1641, + "step": 893 + }, + { + "epoch": 0.17200060123252667, + "grad_norm": 0.28287571253611005, + "learning_rate": 0.00018960846064871258, + "loss": 1.2225, + "step": 894 + }, + { + "epoch": 0.1721929956410642, + "grad_norm": 0.2566382349288544, + "learning_rate": 0.00018958078007720385, + "loss": 1.1806, + "step": 895 + }, + { + "epoch": 0.17238539004960168, + "grad_norm": 0.3362457182888903, + "learning_rate": 0.00018955306471355235, + "loss": 1.1477, + "step": 896 + }, + { + "epoch": 0.17257778445813918, + "grad_norm": 0.32165469122272955, + "learning_rate": 0.00018952531456852247, + "loss": 1.0985, + "step": 897 + }, + { + "epoch": 0.1727701788666767, + "grad_norm": 0.3804335438726372, + "learning_rate": 0.00018949752965289197, + "loss": 1.1691, + "step": 898 + }, + { + "epoch": 0.1729625732752142, + "grad_norm": 0.30636012685614056, + "learning_rate": 0.0001894697099774523, + "loss": 1.0103, + "step": 899 + }, + { + "epoch": 0.1731549676837517, + "grad_norm": 0.27844105821472565, + "learning_rate": 0.0001894418555530082, + "loss": 1.1434, + "step": 900 + }, + { + "epoch": 0.17334736209228918, + "grad_norm": 0.37303786723431925, + "learning_rate": 0.000189413966390378, + "loss": 1.0954, + "step": 901 + }, + { + "epoch": 0.1735397565008267, + "grad_norm": 0.2729300729659083, + "learning_rate": 0.00018938604250039363, + "loss": 1.165, + "step": 902 + }, + { + "epoch": 0.1737321509093642, + "grad_norm": 0.3456435945341589, + "learning_rate": 0.00018935808389390033, + "loss": 1.1683, + "step": 903 + }, + { + "epoch": 0.1739245453179017, + "grad_norm": 0.2975897828170863, + "learning_rate": 0.0001893300905817569, + "loss": 1.0943, + "step": 904 + }, + { + "epoch": 0.1741169397264392, + "grad_norm": 0.275977650169121, + "learning_rate": 0.00018930206257483564, + "loss": 1.1824, + "step": 905 + }, + { + "epoch": 0.1743093341349767, + "grad_norm": 0.3222837564004834, + "learning_rate": 0.00018927399988402232, + "loss": 1.2206, + "step": 906 + }, + { + "epoch": 0.1745017285435142, + "grad_norm": 0.32050967591965934, + "learning_rate": 0.00018924590252021614, + "loss": 1.1409, + "step": 907 + }, + { + "epoch": 0.1746941229520517, + "grad_norm": 0.29730979170161537, + "learning_rate": 0.00018921777049432984, + "loss": 1.1026, + "step": 908 + }, + { + "epoch": 0.17488651736058922, + "grad_norm": 0.38965679795992625, + "learning_rate": 0.00018918960381728947, + "loss": 1.1554, + "step": 909 + }, + { + "epoch": 0.1750789117691267, + "grad_norm": 0.2695704834341497, + "learning_rate": 0.00018916140250003474, + "loss": 1.0725, + "step": 910 + }, + { + "epoch": 0.1752713061776642, + "grad_norm": 0.2921095094340929, + "learning_rate": 0.0001891331665535187, + "loss": 1.1436, + "step": 911 + }, + { + "epoch": 0.17546370058620173, + "grad_norm": 0.28035493258201616, + "learning_rate": 0.00018910489598870784, + "loss": 1.1653, + "step": 912 + }, + { + "epoch": 0.17565609499473922, + "grad_norm": 0.31810075713057345, + "learning_rate": 0.00018907659081658214, + "loss": 1.1106, + "step": 913 + }, + { + "epoch": 0.17584848940327671, + "grad_norm": 0.34009373133269605, + "learning_rate": 0.00018904825104813498, + "loss": 1.2197, + "step": 914 + }, + { + "epoch": 0.1760408838118142, + "grad_norm": 0.33559978944965874, + "learning_rate": 0.0001890198766943732, + "loss": 1.1783, + "step": 915 + }, + { + "epoch": 0.17623327822035173, + "grad_norm": 0.3115413830736144, + "learning_rate": 0.00018899146776631712, + "loss": 1.1791, + "step": 916 + }, + { + "epoch": 0.17642567262888922, + "grad_norm": 0.2943923602246328, + "learning_rate": 0.0001889630242750004, + "loss": 1.0869, + "step": 917 + }, + { + "epoch": 0.17661806703742672, + "grad_norm": 0.28803120777551305, + "learning_rate": 0.0001889345462314702, + "loss": 1.1188, + "step": 918 + }, + { + "epoch": 0.17681046144596424, + "grad_norm": 0.25659535236204284, + "learning_rate": 0.000188906033646787, + "loss": 1.0719, + "step": 919 + }, + { + "epoch": 0.17700285585450173, + "grad_norm": 0.331679780896181, + "learning_rate": 0.00018887748653202477, + "loss": 1.1102, + "step": 920 + }, + { + "epoch": 0.17719525026303923, + "grad_norm": 0.25875343699934933, + "learning_rate": 0.00018884890489827096, + "loss": 1.1138, + "step": 921 + }, + { + "epoch": 0.17738764467157672, + "grad_norm": 0.31183044674758037, + "learning_rate": 0.00018882028875662627, + "loss": 1.1264, + "step": 922 + }, + { + "epoch": 0.17758003908011424, + "grad_norm": 0.29341552756913425, + "learning_rate": 0.0001887916381182049, + "loss": 1.1726, + "step": 923 + }, + { + "epoch": 0.17777243348865174, + "grad_norm": 0.3446672146063943, + "learning_rate": 0.00018876295299413443, + "loss": 1.1337, + "step": 924 + }, + { + "epoch": 0.17796482789718923, + "grad_norm": 0.27291731920987883, + "learning_rate": 0.00018873423339555584, + "loss": 1.0639, + "step": 925 + }, + { + "epoch": 0.17815722230572675, + "grad_norm": 0.35088318536829566, + "learning_rate": 0.0001887054793336235, + "loss": 1.0908, + "step": 926 + }, + { + "epoch": 0.17834961671426425, + "grad_norm": 0.3024956335824073, + "learning_rate": 0.0001886766908195051, + "loss": 1.0772, + "step": 927 + }, + { + "epoch": 0.17854201112280174, + "grad_norm": 0.31363029589079705, + "learning_rate": 0.00018864786786438185, + "loss": 1.1242, + "step": 928 + }, + { + "epoch": 0.17873440553133924, + "grad_norm": 0.2978871762659467, + "learning_rate": 0.00018861901047944818, + "loss": 1.108, + "step": 929 + }, + { + "epoch": 0.17892679993987676, + "grad_norm": 0.385312391673112, + "learning_rate": 0.00018859011867591202, + "loss": 1.2112, + "step": 930 + }, + { + "epoch": 0.17911919434841425, + "grad_norm": 0.3779748655240065, + "learning_rate": 0.00018856119246499458, + "loss": 1.1026, + "step": 931 + }, + { + "epoch": 0.17931158875695175, + "grad_norm": 0.275071436808685, + "learning_rate": 0.00018853223185793047, + "loss": 1.1871, + "step": 932 + }, + { + "epoch": 0.17950398316548924, + "grad_norm": 0.3605707147022138, + "learning_rate": 0.00018850323686596764, + "loss": 1.2237, + "step": 933 + }, + { + "epoch": 0.17969637757402676, + "grad_norm": 0.27307640028901337, + "learning_rate": 0.00018847420750036747, + "loss": 1.1931, + "step": 934 + }, + { + "epoch": 0.17988877198256426, + "grad_norm": 0.2774789154423623, + "learning_rate": 0.00018844514377240457, + "loss": 1.1712, + "step": 935 + }, + { + "epoch": 0.18008116639110175, + "grad_norm": 0.2973511135010746, + "learning_rate": 0.000188416045693367, + "loss": 1.1914, + "step": 936 + }, + { + "epoch": 0.18027356079963927, + "grad_norm": 0.3611544428784761, + "learning_rate": 0.0001883869132745561, + "loss": 1.1679, + "step": 937 + }, + { + "epoch": 0.18046595520817676, + "grad_norm": 0.3095639368120166, + "learning_rate": 0.0001883577465272866, + "loss": 1.0873, + "step": 938 + }, + { + "epoch": 0.18065834961671426, + "grad_norm": 0.27455848950725376, + "learning_rate": 0.00018832854546288642, + "loss": 1.1294, + "step": 939 + }, + { + "epoch": 0.18085074402525175, + "grad_norm": 0.3205017163632537, + "learning_rate": 0.00018829931009269705, + "loss": 1.1613, + "step": 940 + }, + { + "epoch": 0.18104313843378927, + "grad_norm": 0.30501737625062797, + "learning_rate": 0.0001882700404280731, + "loss": 1.0524, + "step": 941 + }, + { + "epoch": 0.18123553284232677, + "grad_norm": 0.34182345612171766, + "learning_rate": 0.00018824073648038257, + "loss": 1.1838, + "step": 942 + }, + { + "epoch": 0.18142792725086426, + "grad_norm": 0.31842117371139816, + "learning_rate": 0.0001882113982610068, + "loss": 0.9934, + "step": 943 + }, + { + "epoch": 0.18162032165940178, + "grad_norm": 0.30823825549781764, + "learning_rate": 0.00018818202578134042, + "loss": 1.1544, + "step": 944 + }, + { + "epoch": 0.18181271606793928, + "grad_norm": 0.3999509421889017, + "learning_rate": 0.0001881526190527913, + "loss": 1.0364, + "step": 945 + }, + { + "epoch": 0.18200511047647677, + "grad_norm": 0.2767317165622152, + "learning_rate": 0.00018812317808678076, + "loss": 1.1617, + "step": 946 + }, + { + "epoch": 0.18219750488501427, + "grad_norm": 0.35609542855148246, + "learning_rate": 0.0001880937028947433, + "loss": 1.106, + "step": 947 + }, + { + "epoch": 0.1823898992935518, + "grad_norm": 0.33916490177667435, + "learning_rate": 0.00018806419348812672, + "loss": 1.1731, + "step": 948 + }, + { + "epoch": 0.18258229370208928, + "grad_norm": 0.320424334406229, + "learning_rate": 0.00018803464987839216, + "loss": 1.0738, + "step": 949 + }, + { + "epoch": 0.18277468811062678, + "grad_norm": 0.3057200921750843, + "learning_rate": 0.000188005072077014, + "loss": 1.1095, + "step": 950 + }, + { + "epoch": 0.1829670825191643, + "grad_norm": 0.26604360333033583, + "learning_rate": 0.00018797546009547995, + "loss": 1.164, + "step": 951 + }, + { + "epoch": 0.1831594769277018, + "grad_norm": 0.28116284631909794, + "learning_rate": 0.0001879458139452909, + "loss": 1.1259, + "step": 952 + }, + { + "epoch": 0.18335187133623929, + "grad_norm": 0.30711206838212346, + "learning_rate": 0.00018791613363796118, + "loss": 1.2019, + "step": 953 + }, + { + "epoch": 0.18354426574477678, + "grad_norm": 0.319453796282546, + "learning_rate": 0.00018788641918501814, + "loss": 1.1093, + "step": 954 + }, + { + "epoch": 0.1837366601533143, + "grad_norm": 0.32916626205461036, + "learning_rate": 0.00018785667059800264, + "loss": 1.208, + "step": 955 + }, + { + "epoch": 0.1839290545618518, + "grad_norm": 0.37459457263543294, + "learning_rate": 0.00018782688788846865, + "loss": 1.1141, + "step": 956 + }, + { + "epoch": 0.1841214489703893, + "grad_norm": 0.4593672811701854, + "learning_rate": 0.0001877970710679834, + "loss": 1.1838, + "step": 957 + }, + { + "epoch": 0.1843138433789268, + "grad_norm": 0.29660875397432696, + "learning_rate": 0.0001877672201481275, + "loss": 1.1834, + "step": 958 + }, + { + "epoch": 0.1845062377874643, + "grad_norm": 0.3207005263565337, + "learning_rate": 0.0001877373351404946, + "loss": 1.1197, + "step": 959 + }, + { + "epoch": 0.1846986321960018, + "grad_norm": 0.28124102221285346, + "learning_rate": 0.00018770741605669171, + "loss": 1.1413, + "step": 960 + }, + { + "epoch": 0.1848910266045393, + "grad_norm": 0.33377097507399034, + "learning_rate": 0.0001876774629083391, + "loss": 1.159, + "step": 961 + }, + { + "epoch": 0.18508342101307682, + "grad_norm": 0.33389954881976147, + "learning_rate": 0.00018764747570707019, + "loss": 1.1281, + "step": 962 + }, + { + "epoch": 0.1852758154216143, + "grad_norm": 0.3159594033179389, + "learning_rate": 0.00018761745446453168, + "loss": 1.1569, + "step": 963 + }, + { + "epoch": 0.1854682098301518, + "grad_norm": 0.3180637936569423, + "learning_rate": 0.00018758739919238349, + "loss": 1.2511, + "step": 964 + }, + { + "epoch": 0.18566060423868933, + "grad_norm": 0.2858832191127742, + "learning_rate": 0.00018755730990229868, + "loss": 1.0574, + "step": 965 + }, + { + "epoch": 0.18585299864722682, + "grad_norm": 0.3175702687502775, + "learning_rate": 0.00018752718660596367, + "loss": 1.159, + "step": 966 + }, + { + "epoch": 0.1860453930557643, + "grad_norm": 0.3010254344666017, + "learning_rate": 0.00018749702931507796, + "loss": 1.1986, + "step": 967 + }, + { + "epoch": 0.1862377874643018, + "grad_norm": 0.342427437750701, + "learning_rate": 0.00018746683804135429, + "loss": 1.1233, + "step": 968 + }, + { + "epoch": 0.18643018187283933, + "grad_norm": 0.32207018715516766, + "learning_rate": 0.00018743661279651855, + "loss": 1.1991, + "step": 969 + }, + { + "epoch": 0.18662257628137682, + "grad_norm": 0.3773847455479765, + "learning_rate": 0.00018740635359231, + "loss": 1.1778, + "step": 970 + }, + { + "epoch": 0.18681497068991432, + "grad_norm": 0.2960118645511741, + "learning_rate": 0.00018737606044048086, + "loss": 1.0833, + "step": 971 + }, + { + "epoch": 0.18700736509845184, + "grad_norm": 0.3757855285007589, + "learning_rate": 0.00018734573335279668, + "loss": 1.1829, + "step": 972 + }, + { + "epoch": 0.18719975950698933, + "grad_norm": 0.3996595936719864, + "learning_rate": 0.00018731537234103617, + "loss": 1.0772, + "step": 973 + }, + { + "epoch": 0.18739215391552683, + "grad_norm": 0.2847051068279051, + "learning_rate": 0.00018728497741699117, + "loss": 1.1134, + "step": 974 + }, + { + "epoch": 0.18758454832406432, + "grad_norm": 0.27549970128215284, + "learning_rate": 0.0001872545485924667, + "loss": 1.1685, + "step": 975 + }, + { + "epoch": 0.18777694273260184, + "grad_norm": 0.33484321399459477, + "learning_rate": 0.00018722408587928103, + "loss": 1.1473, + "step": 976 + }, + { + "epoch": 0.18796933714113934, + "grad_norm": 0.35316226832778747, + "learning_rate": 0.00018719358928926546, + "loss": 1.173, + "step": 977 + }, + { + "epoch": 0.18816173154967683, + "grad_norm": 0.3462465232862975, + "learning_rate": 0.00018716305883426454, + "loss": 1.0195, + "step": 978 + }, + { + "epoch": 0.18835412595821435, + "grad_norm": 0.25745398549898946, + "learning_rate": 0.00018713249452613598, + "loss": 1.1412, + "step": 979 + }, + { + "epoch": 0.18854652036675185, + "grad_norm": 0.42279872297244486, + "learning_rate": 0.00018710189637675056, + "loss": 1.1855, + "step": 980 + }, + { + "epoch": 0.18873891477528934, + "grad_norm": 0.3185714083210854, + "learning_rate": 0.00018707126439799224, + "loss": 1.1447, + "step": 981 + }, + { + "epoch": 0.18893130918382683, + "grad_norm": 0.29351698832238055, + "learning_rate": 0.00018704059860175817, + "loss": 1.1501, + "step": 982 + }, + { + "epoch": 0.18912370359236436, + "grad_norm": 0.4140759363951703, + "learning_rate": 0.00018700989899995857, + "loss": 1.1193, + "step": 983 + }, + { + "epoch": 0.18931609800090185, + "grad_norm": 0.3307058322568098, + "learning_rate": 0.00018697916560451682, + "loss": 1.0535, + "step": 984 + }, + { + "epoch": 0.18950849240943934, + "grad_norm": 0.3263000631533168, + "learning_rate": 0.0001869483984273694, + "loss": 1.1253, + "step": 985 + }, + { + "epoch": 0.18970088681797687, + "grad_norm": 0.28214016640278927, + "learning_rate": 0.00018691759748046594, + "loss": 1.1605, + "step": 986 + }, + { + "epoch": 0.18989328122651436, + "grad_norm": 0.27238602443860643, + "learning_rate": 0.00018688676277576916, + "loss": 1.1358, + "step": 987 + }, + { + "epoch": 0.19008567563505185, + "grad_norm": 0.34227702832507034, + "learning_rate": 0.00018685589432525492, + "loss": 1.2518, + "step": 988 + }, + { + "epoch": 0.19027807004358935, + "grad_norm": 0.28004287312705406, + "learning_rate": 0.0001868249921409122, + "loss": 1.0112, + "step": 989 + }, + { + "epoch": 0.19047046445212687, + "grad_norm": 0.2873550611088721, + "learning_rate": 0.00018679405623474296, + "loss": 1.2342, + "step": 990 + }, + { + "epoch": 0.19066285886066436, + "grad_norm": 0.25418784393800076, + "learning_rate": 0.00018676308661876243, + "loss": 1.0562, + "step": 991 + }, + { + "epoch": 0.19085525326920186, + "grad_norm": 0.2835909005007071, + "learning_rate": 0.00018673208330499883, + "loss": 1.2144, + "step": 992 + }, + { + "epoch": 0.19104764767773938, + "grad_norm": 0.3332844782432229, + "learning_rate": 0.00018670104630549348, + "loss": 1.2369, + "step": 993 + }, + { + "epoch": 0.19124004208627687, + "grad_norm": 0.3933352226114899, + "learning_rate": 0.0001866699756323008, + "loss": 1.1, + "step": 994 + }, + { + "epoch": 0.19143243649481437, + "grad_norm": 0.2935380742083632, + "learning_rate": 0.0001866388712974883, + "loss": 1.1043, + "step": 995 + }, + { + "epoch": 0.19162483090335186, + "grad_norm": 0.27358121515773026, + "learning_rate": 0.0001866077333131365, + "loss": 1.0905, + "step": 996 + }, + { + "epoch": 0.19181722531188938, + "grad_norm": 0.306629379220065, + "learning_rate": 0.00018657656169133907, + "loss": 1.1934, + "step": 997 + }, + { + "epoch": 0.19200961972042688, + "grad_norm": 0.28871486180626765, + "learning_rate": 0.00018654535644420275, + "loss": 1.1733, + "step": 998 + }, + { + "epoch": 0.19220201412896437, + "grad_norm": 0.2882640971719597, + "learning_rate": 0.00018651411758384718, + "loss": 1.1741, + "step": 999 + }, + { + "epoch": 0.19239440853750187, + "grad_norm": 0.13947954655889114, + "learning_rate": 0.0001864828451224053, + "loss": 1.127, + "step": 1000 + }, + { + "epoch": 0.1925868029460394, + "grad_norm": 0.3025899712629719, + "learning_rate": 0.00018645153907202285, + "loss": 1.1595, + "step": 1001 + }, + { + "epoch": 0.19277919735457688, + "grad_norm": 0.27681580595475636, + "learning_rate": 0.00018642019944485884, + "loss": 1.1769, + "step": 1002 + }, + { + "epoch": 0.19297159176311438, + "grad_norm": 0.2966134805269456, + "learning_rate": 0.00018638882625308522, + "loss": 1.0378, + "step": 1003 + }, + { + "epoch": 0.1931639861716519, + "grad_norm": 0.35833152967747617, + "learning_rate": 0.0001863574195088869, + "loss": 1.0837, + "step": 1004 + }, + { + "epoch": 0.1933563805801894, + "grad_norm": 0.2958991050800634, + "learning_rate": 0.00018632597922446195, + "loss": 1.1686, + "step": 1005 + }, + { + "epoch": 0.19354877498872688, + "grad_norm": 0.3492786365549858, + "learning_rate": 0.0001862945054120214, + "loss": 1.2175, + "step": 1006 + }, + { + "epoch": 0.19374116939726438, + "grad_norm": 0.31341562808569357, + "learning_rate": 0.00018626299808378934, + "loss": 1.2116, + "step": 1007 + }, + { + "epoch": 0.1939335638058019, + "grad_norm": 0.29824132488910254, + "learning_rate": 0.00018623145725200278, + "loss": 1.1256, + "step": 1008 + }, + { + "epoch": 0.1941259582143394, + "grad_norm": 0.3322377671494098, + "learning_rate": 0.0001861998829289119, + "loss": 1.1351, + "step": 1009 + }, + { + "epoch": 0.1943183526228769, + "grad_norm": 0.343812947236017, + "learning_rate": 0.0001861682751267798, + "loss": 1.1791, + "step": 1010 + }, + { + "epoch": 0.1945107470314144, + "grad_norm": 0.3148311494011386, + "learning_rate": 0.00018613663385788252, + "loss": 1.1292, + "step": 1011 + }, + { + "epoch": 0.1947031414399519, + "grad_norm": 0.24541134382521418, + "learning_rate": 0.0001861049591345092, + "loss": 1.1716, + "step": 1012 + }, + { + "epoch": 0.1948955358484894, + "grad_norm": 0.3182043268234669, + "learning_rate": 0.00018607325096896195, + "loss": 1.2349, + "step": 1013 + }, + { + "epoch": 0.1950879302570269, + "grad_norm": 0.283737706616924, + "learning_rate": 0.00018604150937355586, + "loss": 1.1599, + "step": 1014 + }, + { + "epoch": 0.19528032466556441, + "grad_norm": 0.3329832592951454, + "learning_rate": 0.00018600973436061897, + "loss": 1.0952, + "step": 1015 + }, + { + "epoch": 0.1954727190741019, + "grad_norm": 0.2796509308766962, + "learning_rate": 0.00018597792594249234, + "loss": 1.033, + "step": 1016 + }, + { + "epoch": 0.1956651134826394, + "grad_norm": 0.26260876337805816, + "learning_rate": 0.00018594608413153001, + "loss": 1.0979, + "step": 1017 + }, + { + "epoch": 0.19585750789117692, + "grad_norm": 0.3297883785799931, + "learning_rate": 0.00018591420894009897, + "loss": 1.2115, + "step": 1018 + }, + { + "epoch": 0.19604990229971442, + "grad_norm": 0.29719183536715366, + "learning_rate": 0.00018588230038057914, + "loss": 1.1183, + "step": 1019 + }, + { + "epoch": 0.1962422967082519, + "grad_norm": 0.4358341202415237, + "learning_rate": 0.00018585035846536348, + "loss": 1.1326, + "step": 1020 + }, + { + "epoch": 0.1964346911167894, + "grad_norm": 0.3454508791763001, + "learning_rate": 0.00018581838320685782, + "loss": 1.1479, + "step": 1021 + }, + { + "epoch": 0.19662708552532693, + "grad_norm": 0.30060039291533, + "learning_rate": 0.00018578637461748106, + "loss": 1.1453, + "step": 1022 + }, + { + "epoch": 0.19681947993386442, + "grad_norm": 0.3338543540979239, + "learning_rate": 0.00018575433270966486, + "loss": 1.1294, + "step": 1023 + }, + { + "epoch": 0.19701187434240192, + "grad_norm": 0.27357267072948194, + "learning_rate": 0.00018572225749585402, + "loss": 1.1806, + "step": 1024 + }, + { + "epoch": 0.19720426875093944, + "grad_norm": 0.3120343250408094, + "learning_rate": 0.00018569014898850612, + "loss": 1.133, + "step": 1025 + }, + { + "epoch": 0.19739666315947693, + "grad_norm": 0.3477166900063389, + "learning_rate": 0.0001856580072000918, + "loss": 1.1008, + "step": 1026 + }, + { + "epoch": 0.19758905756801443, + "grad_norm": 0.2234282696276886, + "learning_rate": 0.00018562583214309445, + "loss": 1.1976, + "step": 1027 + }, + { + "epoch": 0.19778145197655192, + "grad_norm": 0.3280499890572628, + "learning_rate": 0.0001855936238300106, + "loss": 1.1355, + "step": 1028 + }, + { + "epoch": 0.19797384638508944, + "grad_norm": 0.2929436441679051, + "learning_rate": 0.00018556138227334955, + "loss": 1.1707, + "step": 1029 + }, + { + "epoch": 0.19816624079362694, + "grad_norm": 0.30070036527879784, + "learning_rate": 0.00018552910748563357, + "loss": 1.1875, + "step": 1030 + }, + { + "epoch": 0.19835863520216443, + "grad_norm": 0.23634706250113938, + "learning_rate": 0.00018549679947939778, + "loss": 1.149, + "step": 1031 + }, + { + "epoch": 0.19855102961070195, + "grad_norm": 0.28545181143680126, + "learning_rate": 0.00018546445826719024, + "loss": 1.0201, + "step": 1032 + }, + { + "epoch": 0.19874342401923945, + "grad_norm": 0.307121954461118, + "learning_rate": 0.00018543208386157193, + "loss": 1.0307, + "step": 1033 + }, + { + "epoch": 0.19893581842777694, + "grad_norm": 0.31654414999389935, + "learning_rate": 0.00018539967627511668, + "loss": 1.1158, + "step": 1034 + }, + { + "epoch": 0.19912821283631443, + "grad_norm": 0.3543921185896822, + "learning_rate": 0.00018536723552041123, + "loss": 1.1381, + "step": 1035 + }, + { + "epoch": 0.19932060724485196, + "grad_norm": 0.2902046620820293, + "learning_rate": 0.00018533476161005518, + "loss": 1.1096, + "step": 1036 + }, + { + "epoch": 0.19951300165338945, + "grad_norm": 0.3746716210376939, + "learning_rate": 0.00018530225455666102, + "loss": 1.1124, + "step": 1037 + }, + { + "epoch": 0.19970539606192694, + "grad_norm": 0.2596079542898459, + "learning_rate": 0.00018526971437285417, + "loss": 1.1852, + "step": 1038 + }, + { + "epoch": 0.19989779047046446, + "grad_norm": 0.25952519079003095, + "learning_rate": 0.00018523714107127276, + "loss": 1.0874, + "step": 1039 + }, + { + "epoch": 0.20009018487900196, + "grad_norm": 0.2974050284024279, + "learning_rate": 0.00018520453466456797, + "loss": 1.1883, + "step": 1040 + }, + { + "epoch": 0.20028257928753945, + "grad_norm": 0.37566020443352655, + "learning_rate": 0.00018517189516540378, + "loss": 1.0979, + "step": 1041 + }, + { + "epoch": 0.20047497369607695, + "grad_norm": 0.3227350978880759, + "learning_rate": 0.00018513922258645685, + "loss": 1.1349, + "step": 1042 + }, + { + "epoch": 0.20066736810461447, + "grad_norm": 0.2707840184031702, + "learning_rate": 0.000185106516940417, + "loss": 1.1197, + "step": 1043 + }, + { + "epoch": 0.20085976251315196, + "grad_norm": 0.24086167817755455, + "learning_rate": 0.00018507377823998665, + "loss": 1.1774, + "step": 1044 + }, + { + "epoch": 0.20105215692168946, + "grad_norm": 0.318161256984643, + "learning_rate": 0.0001850410064978811, + "loss": 1.0991, + "step": 1045 + }, + { + "epoch": 0.20124455133022698, + "grad_norm": 0.3034513893807528, + "learning_rate": 0.00018500820172682856, + "loss": 1.0625, + "step": 1046 + }, + { + "epoch": 0.20143694573876447, + "grad_norm": 0.33655796773506963, + "learning_rate": 0.00018497536393957003, + "loss": 1.1437, + "step": 1047 + }, + { + "epoch": 0.20162934014730197, + "grad_norm": 0.3437961976086482, + "learning_rate": 0.0001849424931488593, + "loss": 1.1544, + "step": 1048 + }, + { + "epoch": 0.20182173455583946, + "grad_norm": 0.2900820367137122, + "learning_rate": 0.00018490958936746306, + "loss": 1.1205, + "step": 1049 + }, + { + "epoch": 0.20201412896437698, + "grad_norm": 0.3036320803489058, + "learning_rate": 0.0001848766526081607, + "loss": 1.0781, + "step": 1050 + }, + { + "epoch": 0.20220652337291448, + "grad_norm": 0.35547222447306837, + "learning_rate": 0.0001848436828837445, + "loss": 1.2098, + "step": 1051 + }, + { + "epoch": 0.20239891778145197, + "grad_norm": 0.40675861481643805, + "learning_rate": 0.00018481068020701952, + "loss": 0.9895, + "step": 1052 + }, + { + "epoch": 0.2025913121899895, + "grad_norm": 0.2858892320887802, + "learning_rate": 0.00018477764459080362, + "loss": 1.0918, + "step": 1053 + }, + { + "epoch": 0.20278370659852699, + "grad_norm": 0.3542974949205585, + "learning_rate": 0.00018474457604792746, + "loss": 1.1099, + "step": 1054 + }, + { + "epoch": 0.20297610100706448, + "grad_norm": 0.2815771555898335, + "learning_rate": 0.00018471147459123448, + "loss": 1.1638, + "step": 1055 + }, + { + "epoch": 0.20316849541560197, + "grad_norm": 0.23213614124799611, + "learning_rate": 0.00018467834023358086, + "loss": 1.0601, + "step": 1056 + }, + { + "epoch": 0.2033608898241395, + "grad_norm": 0.3110000979581457, + "learning_rate": 0.0001846451729878357, + "loss": 1.1102, + "step": 1057 + }, + { + "epoch": 0.203553284232677, + "grad_norm": 0.343155039665011, + "learning_rate": 0.00018461197286688067, + "loss": 1.0351, + "step": 1058 + }, + { + "epoch": 0.20374567864121448, + "grad_norm": 0.31762030521485873, + "learning_rate": 0.00018457873988361037, + "loss": 1.1313, + "step": 1059 + }, + { + "epoch": 0.20393807304975198, + "grad_norm": 0.3034099270607324, + "learning_rate": 0.00018454547405093212, + "loss": 1.1124, + "step": 1060 + }, + { + "epoch": 0.2041304674582895, + "grad_norm": 0.31508523484241846, + "learning_rate": 0.00018451217538176598, + "loss": 1.0995, + "step": 1061 + }, + { + "epoch": 0.204322861866827, + "grad_norm": 0.32737658093192185, + "learning_rate": 0.0001844788438890447, + "loss": 1.134, + "step": 1062 + }, + { + "epoch": 0.2045152562753645, + "grad_norm": 0.30017505137458866, + "learning_rate": 0.00018444547958571396, + "loss": 1.1007, + "step": 1063 + }, + { + "epoch": 0.204707650683902, + "grad_norm": 0.35179798282014624, + "learning_rate": 0.00018441208248473197, + "loss": 1.1319, + "step": 1064 + }, + { + "epoch": 0.2049000450924395, + "grad_norm": 0.39720645495643087, + "learning_rate": 0.00018437865259906985, + "loss": 1.1498, + "step": 1065 + }, + { + "epoch": 0.205092439500977, + "grad_norm": 0.3130211632216441, + "learning_rate": 0.00018434518994171135, + "loss": 1.0338, + "step": 1066 + }, + { + "epoch": 0.2052848339095145, + "grad_norm": 0.31935911128412864, + "learning_rate": 0.000184311694525653, + "loss": 1.1226, + "step": 1067 + }, + { + "epoch": 0.205477228318052, + "grad_norm": 0.2983925784553987, + "learning_rate": 0.00018427816636390398, + "loss": 1.1327, + "step": 1068 + }, + { + "epoch": 0.2056696227265895, + "grad_norm": 0.2986694436938893, + "learning_rate": 0.0001842446054694863, + "loss": 1.1313, + "step": 1069 + }, + { + "epoch": 0.205862017135127, + "grad_norm": 0.27645054405518904, + "learning_rate": 0.00018421101185543465, + "loss": 1.0862, + "step": 1070 + }, + { + "epoch": 0.20605441154366452, + "grad_norm": 0.34654391537794565, + "learning_rate": 0.0001841773855347963, + "loss": 1.1144, + "step": 1071 + }, + { + "epoch": 0.20624680595220202, + "grad_norm": 0.2921548719447003, + "learning_rate": 0.0001841437265206314, + "loss": 1.093, + "step": 1072 + }, + { + "epoch": 0.2064392003607395, + "grad_norm": 0.33910685260576345, + "learning_rate": 0.0001841100348260127, + "loss": 1.052, + "step": 1073 + }, + { + "epoch": 0.206631594769277, + "grad_norm": 0.356207723915354, + "learning_rate": 0.0001840763104640257, + "loss": 1.1316, + "step": 1074 + }, + { + "epoch": 0.20682398917781453, + "grad_norm": 0.3213575450598716, + "learning_rate": 0.0001840425534477685, + "loss": 1.0683, + "step": 1075 + }, + { + "epoch": 0.20701638358635202, + "grad_norm": 0.3611732703231641, + "learning_rate": 0.000184008763790352, + "loss": 1.1431, + "step": 1076 + }, + { + "epoch": 0.20720877799488951, + "grad_norm": 0.2599301226681828, + "learning_rate": 0.00018397494150489965, + "loss": 1.1671, + "step": 1077 + }, + { + "epoch": 0.20740117240342704, + "grad_norm": 0.3573389190698938, + "learning_rate": 0.00018394108660454766, + "loss": 1.2706, + "step": 1078 + }, + { + "epoch": 0.20759356681196453, + "grad_norm": 0.3943059266582131, + "learning_rate": 0.00018390719910244487, + "loss": 1.1055, + "step": 1079 + }, + { + "epoch": 0.20778596122050202, + "grad_norm": 0.2868571124179789, + "learning_rate": 0.00018387327901175284, + "loss": 1.1002, + "step": 1080 + }, + { + "epoch": 0.20797835562903952, + "grad_norm": 0.2559251868316223, + "learning_rate": 0.00018383932634564572, + "loss": 1.1053, + "step": 1081 + }, + { + "epoch": 0.20817075003757704, + "grad_norm": 0.3153264220444252, + "learning_rate": 0.00018380534111731028, + "loss": 1.2179, + "step": 1082 + }, + { + "epoch": 0.20836314444611453, + "grad_norm": 0.34781515789392425, + "learning_rate": 0.00018377132333994607, + "loss": 1.0962, + "step": 1083 + }, + { + "epoch": 0.20855553885465203, + "grad_norm": 0.27326408571733896, + "learning_rate": 0.0001837372730267652, + "loss": 1.1088, + "step": 1084 + }, + { + "epoch": 0.20874793326318955, + "grad_norm": 0.3860607559252603, + "learning_rate": 0.00018370319019099234, + "loss": 1.0594, + "step": 1085 + }, + { + "epoch": 0.20894032767172704, + "grad_norm": 0.30528297689667394, + "learning_rate": 0.00018366907484586496, + "loss": 1.0382, + "step": 1086 + }, + { + "epoch": 0.20913272208026454, + "grad_norm": 0.3847453799085574, + "learning_rate": 0.000183634927004633, + "loss": 1.0607, + "step": 1087 + }, + { + "epoch": 0.20932511648880203, + "grad_norm": 0.38962908628574694, + "learning_rate": 0.00018360074668055914, + "loss": 1.0069, + "step": 1088 + }, + { + "epoch": 0.20951751089733955, + "grad_norm": 0.4797244342763735, + "learning_rate": 0.00018356653388691859, + "loss": 1.075, + "step": 1089 + }, + { + "epoch": 0.20970990530587705, + "grad_norm": 0.38371768004891116, + "learning_rate": 0.00018353228863699923, + "loss": 1.2317, + "step": 1090 + }, + { + "epoch": 0.20990229971441454, + "grad_norm": 0.2648546710256649, + "learning_rate": 0.00018349801094410148, + "loss": 1.1473, + "step": 1091 + }, + { + "epoch": 0.21009469412295206, + "grad_norm": 0.3810434720358997, + "learning_rate": 0.00018346370082153844, + "loss": 1.0398, + "step": 1092 + }, + { + "epoch": 0.21028708853148956, + "grad_norm": 0.29686409378819517, + "learning_rate": 0.00018342935828263575, + "loss": 1.125, + "step": 1093 + }, + { + "epoch": 0.21047948294002705, + "grad_norm": 0.34449014797680627, + "learning_rate": 0.00018339498334073168, + "loss": 1.1458, + "step": 1094 + }, + { + "epoch": 0.21067187734856455, + "grad_norm": 0.25325758368432677, + "learning_rate": 0.000183360576009177, + "loss": 1.0441, + "step": 1095 + }, + { + "epoch": 0.21086427175710207, + "grad_norm": 0.27215896328261197, + "learning_rate": 0.00018332613630133517, + "loss": 1.1009, + "step": 1096 + }, + { + "epoch": 0.21105666616563956, + "grad_norm": 0.38131166614991063, + "learning_rate": 0.0001832916642305822, + "loss": 1.094, + "step": 1097 + }, + { + "epoch": 0.21124906057417706, + "grad_norm": 0.3369507588000508, + "learning_rate": 0.00018325715981030657, + "loss": 1.2038, + "step": 1098 + }, + { + "epoch": 0.21144145498271458, + "grad_norm": 0.35080617220553933, + "learning_rate": 0.00018322262305390947, + "loss": 1.0997, + "step": 1099 + }, + { + "epoch": 0.21163384939125207, + "grad_norm": 0.2889199724957277, + "learning_rate": 0.00018318805397480453, + "loss": 1.1646, + "step": 1100 + }, + { + "epoch": 0.21182624379978957, + "grad_norm": 0.33774441430192076, + "learning_rate": 0.00018315345258641802, + "loss": 1.1139, + "step": 1101 + }, + { + "epoch": 0.21201863820832706, + "grad_norm": 0.28859761991935223, + "learning_rate": 0.00018311881890218873, + "loss": 1.1036, + "step": 1102 + }, + { + "epoch": 0.21221103261686458, + "grad_norm": 0.3200606896820387, + "learning_rate": 0.0001830841529355679, + "loss": 1.125, + "step": 1103 + }, + { + "epoch": 0.21240342702540208, + "grad_norm": 0.5111080731032329, + "learning_rate": 0.0001830494547000195, + "loss": 1.2038, + "step": 1104 + }, + { + "epoch": 0.21259582143393957, + "grad_norm": 0.32682771642658637, + "learning_rate": 0.00018301472420901983, + "loss": 1.0573, + "step": 1105 + }, + { + "epoch": 0.2127882158424771, + "grad_norm": 0.2614531758423323, + "learning_rate": 0.00018297996147605787, + "loss": 1.0501, + "step": 1106 + }, + { + "epoch": 0.21298061025101458, + "grad_norm": 0.27390724391931975, + "learning_rate": 0.00018294516651463507, + "loss": 1.189, + "step": 1107 + }, + { + "epoch": 0.21317300465955208, + "grad_norm": 0.44717349512111515, + "learning_rate": 0.00018291033933826533, + "loss": 1.1307, + "step": 1108 + }, + { + "epoch": 0.21336539906808957, + "grad_norm": 0.2772467096253637, + "learning_rate": 0.0001828754799604752, + "loss": 1.1356, + "step": 1109 + }, + { + "epoch": 0.2135577934766271, + "grad_norm": 0.3173355957614274, + "learning_rate": 0.00018284058839480361, + "loss": 1.1513, + "step": 1110 + }, + { + "epoch": 0.2137501878851646, + "grad_norm": 0.3019262414205688, + "learning_rate": 0.00018280566465480206, + "loss": 1.1567, + "step": 1111 + }, + { + "epoch": 0.21394258229370208, + "grad_norm": 0.3414952526642444, + "learning_rate": 0.00018277070875403456, + "loss": 1.1014, + "step": 1112 + }, + { + "epoch": 0.2141349767022396, + "grad_norm": 0.30910820398515804, + "learning_rate": 0.00018273572070607753, + "loss": 1.151, + "step": 1113 + }, + { + "epoch": 0.2143273711107771, + "grad_norm": 0.3190859453631591, + "learning_rate": 0.00018270070052451993, + "loss": 1.1189, + "step": 1114 + }, + { + "epoch": 0.2145197655193146, + "grad_norm": 0.3587526892073246, + "learning_rate": 0.00018266564822296322, + "loss": 1.214, + "step": 1115 + }, + { + "epoch": 0.2147121599278521, + "grad_norm": 0.3388880499961147, + "learning_rate": 0.0001826305638150213, + "loss": 1.0513, + "step": 1116 + }, + { + "epoch": 0.2149045543363896, + "grad_norm": 0.25139899206117644, + "learning_rate": 0.0001825954473143205, + "loss": 1.1252, + "step": 1117 + }, + { + "epoch": 0.2150969487449271, + "grad_norm": 0.2916843674505857, + "learning_rate": 0.00018256029873449974, + "loss": 1.1292, + "step": 1118 + }, + { + "epoch": 0.2152893431534646, + "grad_norm": 0.29562311422619053, + "learning_rate": 0.00018252511808921032, + "loss": 1.0445, + "step": 1119 + }, + { + "epoch": 0.21548173756200212, + "grad_norm": 0.3544800663138205, + "learning_rate": 0.00018248990539211594, + "loss": 1.111, + "step": 1120 + }, + { + "epoch": 0.2156741319705396, + "grad_norm": 0.27711328478108577, + "learning_rate": 0.00018245466065689282, + "loss": 1.1431, + "step": 1121 + }, + { + "epoch": 0.2158665263790771, + "grad_norm": 0.2840924369934067, + "learning_rate": 0.00018241938389722967, + "loss": 1.0795, + "step": 1122 + }, + { + "epoch": 0.2160589207876146, + "grad_norm": 0.29463204757123623, + "learning_rate": 0.0001823840751268275, + "loss": 0.9893, + "step": 1123 + }, + { + "epoch": 0.21625131519615212, + "grad_norm": 0.2802878329344074, + "learning_rate": 0.00018234873435939988, + "loss": 1.183, + "step": 1124 + }, + { + "epoch": 0.21644370960468962, + "grad_norm": 0.26295221742941355, + "learning_rate": 0.00018231336160867275, + "loss": 1.0616, + "step": 1125 + }, + { + "epoch": 0.2166361040132271, + "grad_norm": 0.27645528237930805, + "learning_rate": 0.00018227795688838445, + "loss": 1.1208, + "step": 1126 + }, + { + "epoch": 0.2168284984217646, + "grad_norm": 0.41457200035887104, + "learning_rate": 0.00018224252021228579, + "loss": 1.158, + "step": 1127 + }, + { + "epoch": 0.21702089283030213, + "grad_norm": 0.2504237141422832, + "learning_rate": 0.00018220705159413994, + "loss": 1.0543, + "step": 1128 + }, + { + "epoch": 0.21721328723883962, + "grad_norm": 0.3564033693238237, + "learning_rate": 0.00018217155104772258, + "loss": 1.0876, + "step": 1129 + }, + { + "epoch": 0.2174056816473771, + "grad_norm": 0.4176021547932737, + "learning_rate": 0.0001821360185868216, + "loss": 1.1573, + "step": 1130 + }, + { + "epoch": 0.21759807605591464, + "grad_norm": 0.27679006739125084, + "learning_rate": 0.00018210045422523745, + "loss": 1.1081, + "step": 1131 + }, + { + "epoch": 0.21779047046445213, + "grad_norm": 0.27540677311560946, + "learning_rate": 0.00018206485797678295, + "loss": 1.1444, + "step": 1132 + }, + { + "epoch": 0.21798286487298962, + "grad_norm": 0.32759307506513685, + "learning_rate": 0.00018202922985528322, + "loss": 1.2375, + "step": 1133 + }, + { + "epoch": 0.21817525928152712, + "grad_norm": 0.38015487387193025, + "learning_rate": 0.00018199356987457586, + "loss": 1.1722, + "step": 1134 + }, + { + "epoch": 0.21836765369006464, + "grad_norm": 0.35890990510196485, + "learning_rate": 0.00018195787804851078, + "loss": 1.1687, + "step": 1135 + }, + { + "epoch": 0.21856004809860213, + "grad_norm": 0.3268318356976673, + "learning_rate": 0.00018192215439095024, + "loss": 1.0936, + "step": 1136 + }, + { + "epoch": 0.21875244250713963, + "grad_norm": 0.2862558292108705, + "learning_rate": 0.00018188639891576893, + "loss": 1.0997, + "step": 1137 + }, + { + "epoch": 0.21894483691567715, + "grad_norm": 0.3225348998051424, + "learning_rate": 0.00018185061163685385, + "loss": 1.0969, + "step": 1138 + }, + { + "epoch": 0.21913723132421464, + "grad_norm": 0.32115413379940383, + "learning_rate": 0.0001818147925681044, + "loss": 0.995, + "step": 1139 + }, + { + "epoch": 0.21932962573275214, + "grad_norm": 0.34817571357384747, + "learning_rate": 0.00018177894172343226, + "loss": 1.2191, + "step": 1140 + }, + { + "epoch": 0.21952202014128963, + "grad_norm": 0.290960976788888, + "learning_rate": 0.00018174305911676148, + "loss": 1.0787, + "step": 1141 + }, + { + "epoch": 0.21971441454982715, + "grad_norm": 0.28993414066683065, + "learning_rate": 0.00018170714476202848, + "loss": 1.0371, + "step": 1142 + }, + { + "epoch": 0.21990680895836465, + "grad_norm": 0.30313896925824735, + "learning_rate": 0.00018167119867318197, + "loss": 1.069, + "step": 1143 + }, + { + "epoch": 0.22009920336690214, + "grad_norm": 0.2418135547481211, + "learning_rate": 0.00018163522086418299, + "loss": 1.1091, + "step": 1144 + }, + { + "epoch": 0.22029159777543966, + "grad_norm": 0.28439664411507437, + "learning_rate": 0.00018159921134900487, + "loss": 1.1865, + "step": 1145 + }, + { + "epoch": 0.22048399218397716, + "grad_norm": 0.3293388321514027, + "learning_rate": 0.00018156317014163338, + "loss": 1.1025, + "step": 1146 + }, + { + "epoch": 0.22067638659251465, + "grad_norm": 0.3454728241335007, + "learning_rate": 0.0001815270972560664, + "loss": 1.1852, + "step": 1147 + }, + { + "epoch": 0.22086878100105214, + "grad_norm": 0.31567062075947233, + "learning_rate": 0.00018149099270631434, + "loss": 1.0556, + "step": 1148 + }, + { + "epoch": 0.22106117540958967, + "grad_norm": 0.3215089924268069, + "learning_rate": 0.00018145485650639974, + "loss": 1.1194, + "step": 1149 + }, + { + "epoch": 0.22125356981812716, + "grad_norm": 0.39734413486853337, + "learning_rate": 0.00018141868867035745, + "loss": 1.1557, + "step": 1150 + }, + { + "epoch": 0.22144596422666465, + "grad_norm": 0.28251570115437874, + "learning_rate": 0.00018138248921223467, + "loss": 1.1367, + "step": 1151 + }, + { + "epoch": 0.22163835863520218, + "grad_norm": 0.3243676259260902, + "learning_rate": 0.00018134625814609083, + "loss": 1.0262, + "step": 1152 + }, + { + "epoch": 0.22183075304373967, + "grad_norm": 0.29467276277484045, + "learning_rate": 0.00018130999548599767, + "loss": 1.242, + "step": 1153 + }, + { + "epoch": 0.22202314745227716, + "grad_norm": 0.2534482334810057, + "learning_rate": 0.00018127370124603926, + "loss": 1.079, + "step": 1154 + }, + { + "epoch": 0.22221554186081466, + "grad_norm": 0.29830626369083707, + "learning_rate": 0.00018123737544031176, + "loss": 1.0556, + "step": 1155 + }, + { + "epoch": 0.22240793626935218, + "grad_norm": 0.28095587277034634, + "learning_rate": 0.00018120101808292372, + "loss": 1.1014, + "step": 1156 + }, + { + "epoch": 0.22260033067788967, + "grad_norm": 0.33055926490969234, + "learning_rate": 0.000181164629187996, + "loss": 1.1262, + "step": 1157 + }, + { + "epoch": 0.22279272508642717, + "grad_norm": 0.2989713967872641, + "learning_rate": 0.00018112820876966152, + "loss": 1.1338, + "step": 1158 + }, + { + "epoch": 0.2229851194949647, + "grad_norm": 0.3102486797503027, + "learning_rate": 0.00018109175684206559, + "loss": 1.2324, + "step": 1159 + }, + { + "epoch": 0.22317751390350218, + "grad_norm": 0.29154857656779337, + "learning_rate": 0.00018105527341936574, + "loss": 1.1259, + "step": 1160 + }, + { + "epoch": 0.22336990831203968, + "grad_norm": 0.28834012144968674, + "learning_rate": 0.0001810187585157317, + "loss": 1.1156, + "step": 1161 + }, + { + "epoch": 0.22356230272057717, + "grad_norm": 0.3206750966434899, + "learning_rate": 0.00018098221214534542, + "loss": 1.0863, + "step": 1162 + }, + { + "epoch": 0.2237546971291147, + "grad_norm": 0.2853576319076801, + "learning_rate": 0.00018094563432240107, + "loss": 1.0819, + "step": 1163 + }, + { + "epoch": 0.2239470915376522, + "grad_norm": 0.3347862601048052, + "learning_rate": 0.0001809090250611051, + "loss": 1.1518, + "step": 1164 + }, + { + "epoch": 0.22413948594618968, + "grad_norm": 0.3679728314356357, + "learning_rate": 0.00018087238437567614, + "loss": 1.1644, + "step": 1165 + }, + { + "epoch": 0.2243318803547272, + "grad_norm": 0.27506822040907014, + "learning_rate": 0.00018083571228034496, + "loss": 1.1143, + "step": 1166 + }, + { + "epoch": 0.2245242747632647, + "grad_norm": 0.3802297920197197, + "learning_rate": 0.0001807990087893546, + "loss": 1.1691, + "step": 1167 + }, + { + "epoch": 0.2247166691718022, + "grad_norm": 0.4106487228143322, + "learning_rate": 0.00018076227391696027, + "loss": 1.1561, + "step": 1168 + }, + { + "epoch": 0.22490906358033969, + "grad_norm": 0.26517102054630975, + "learning_rate": 0.0001807255076774294, + "loss": 1.05, + "step": 1169 + }, + { + "epoch": 0.2251014579888772, + "grad_norm": 0.322212844818469, + "learning_rate": 0.00018068871008504152, + "loss": 1.0668, + "step": 1170 + }, + { + "epoch": 0.2252938523974147, + "grad_norm": 0.30209211301167815, + "learning_rate": 0.00018065188115408843, + "loss": 1.1396, + "step": 1171 + }, + { + "epoch": 0.2254862468059522, + "grad_norm": 0.26949018914889616, + "learning_rate": 0.00018061502089887407, + "loss": 1.1514, + "step": 1172 + }, + { + "epoch": 0.22567864121448972, + "grad_norm": 0.281250937196479, + "learning_rate": 0.0001805781293337145, + "loss": 1.22, + "step": 1173 + }, + { + "epoch": 0.2258710356230272, + "grad_norm": 0.314498538535587, + "learning_rate": 0.00018054120647293797, + "loss": 1.149, + "step": 1174 + }, + { + "epoch": 0.2260634300315647, + "grad_norm": 0.4176947148865461, + "learning_rate": 0.00018050425233088495, + "loss": 1.174, + "step": 1175 + }, + { + "epoch": 0.2262558244401022, + "grad_norm": 0.3435354809332262, + "learning_rate": 0.000180467266921908, + "loss": 1.2005, + "step": 1176 + }, + { + "epoch": 0.22644821884863972, + "grad_norm": 0.31856542143170125, + "learning_rate": 0.00018043025026037176, + "loss": 1.0522, + "step": 1177 + }, + { + "epoch": 0.22664061325717721, + "grad_norm": 0.34809079323389497, + "learning_rate": 0.00018039320236065313, + "loss": 1.1386, + "step": 1178 + }, + { + "epoch": 0.2268330076657147, + "grad_norm": 0.37234319975743585, + "learning_rate": 0.0001803561232371411, + "loss": 1.2279, + "step": 1179 + }, + { + "epoch": 0.22702540207425223, + "grad_norm": 0.2848123093467546, + "learning_rate": 0.0001803190129042367, + "loss": 1.1423, + "step": 1180 + }, + { + "epoch": 0.22721779648278972, + "grad_norm": 0.3219113886369367, + "learning_rate": 0.00018028187137635325, + "loss": 1.0994, + "step": 1181 + }, + { + "epoch": 0.22741019089132722, + "grad_norm": 0.33774998210198537, + "learning_rate": 0.000180244698667916, + "loss": 1.109, + "step": 1182 + }, + { + "epoch": 0.2276025852998647, + "grad_norm": 0.32188700800860826, + "learning_rate": 0.00018020749479336247, + "loss": 1.157, + "step": 1183 + }, + { + "epoch": 0.22779497970840223, + "grad_norm": 0.30941105723702805, + "learning_rate": 0.00018017025976714218, + "loss": 1.0672, + "step": 1184 + }, + { + "epoch": 0.22798737411693973, + "grad_norm": 0.25333574862334907, + "learning_rate": 0.00018013299360371686, + "loss": 1.0461, + "step": 1185 + }, + { + "epoch": 0.22817976852547722, + "grad_norm": 0.32315928483092893, + "learning_rate": 0.00018009569631756013, + "loss": 1.0754, + "step": 1186 + }, + { + "epoch": 0.22837216293401472, + "grad_norm": 0.29511060133275824, + "learning_rate": 0.0001800583679231579, + "loss": 1.1893, + "step": 1187 + }, + { + "epoch": 0.22856455734255224, + "grad_norm": 0.27697451351241503, + "learning_rate": 0.0001800210084350081, + "loss": 1.1206, + "step": 1188 + }, + { + "epoch": 0.22875695175108973, + "grad_norm": 0.3479481888443204, + "learning_rate": 0.0001799836178676207, + "loss": 1.1708, + "step": 1189 + }, + { + "epoch": 0.22894934615962723, + "grad_norm": 0.2795161515312204, + "learning_rate": 0.00017994619623551777, + "loss": 1.1509, + "step": 1190 + }, + { + "epoch": 0.22914174056816475, + "grad_norm": 0.2532610764898941, + "learning_rate": 0.00017990874355323344, + "loss": 1.1401, + "step": 1191 + }, + { + "epoch": 0.22933413497670224, + "grad_norm": 0.319630712163536, + "learning_rate": 0.00017987125983531392, + "loss": 1.1577, + "step": 1192 + }, + { + "epoch": 0.22952652938523974, + "grad_norm": 0.2574571635106825, + "learning_rate": 0.00017983374509631742, + "loss": 1.0504, + "step": 1193 + }, + { + "epoch": 0.22971892379377723, + "grad_norm": 0.3250445193011001, + "learning_rate": 0.00017979619935081426, + "loss": 1.1761, + "step": 1194 + }, + { + "epoch": 0.22991131820231475, + "grad_norm": 0.34924141858396734, + "learning_rate": 0.0001797586226133867, + "loss": 1.1741, + "step": 1195 + }, + { + "epoch": 0.23010371261085225, + "grad_norm": 0.2772679255919962, + "learning_rate": 0.00017972101489862924, + "loss": 1.0978, + "step": 1196 + }, + { + "epoch": 0.23029610701938974, + "grad_norm": 0.28693778256166075, + "learning_rate": 0.00017968337622114824, + "loss": 1.0746, + "step": 1197 + }, + { + "epoch": 0.23048850142792726, + "grad_norm": 0.2754003056772629, + "learning_rate": 0.00017964570659556207, + "loss": 1.1347, + "step": 1198 + }, + { + "epoch": 0.23068089583646476, + "grad_norm": 0.3823588245718991, + "learning_rate": 0.0001796080060365012, + "loss": 1.2152, + "step": 1199 + }, + { + "epoch": 0.23087329024500225, + "grad_norm": 0.26500918067157575, + "learning_rate": 0.00017957027455860813, + "loss": 1.2071, + "step": 1200 + }, + { + "epoch": 0.23106568465353974, + "grad_norm": 0.29457638314716683, + "learning_rate": 0.0001795325121765373, + "loss": 1.0402, + "step": 1201 + }, + { + "epoch": 0.23125807906207727, + "grad_norm": 0.3350062968757272, + "learning_rate": 0.00017949471890495522, + "loss": 1.1064, + "step": 1202 + }, + { + "epoch": 0.23145047347061476, + "grad_norm": 0.29950556650813864, + "learning_rate": 0.0001794568947585403, + "loss": 1.1487, + "step": 1203 + }, + { + "epoch": 0.23164286787915225, + "grad_norm": 0.3487488652341284, + "learning_rate": 0.00017941903975198306, + "loss": 1.0572, + "step": 1204 + }, + { + "epoch": 0.23183526228768978, + "grad_norm": 0.30685417559592054, + "learning_rate": 0.00017938115389998593, + "loss": 1.0642, + "step": 1205 + }, + { + "epoch": 0.23202765669622727, + "grad_norm": 0.34244144646472363, + "learning_rate": 0.00017934323721726332, + "loss": 1.1652, + "step": 1206 + }, + { + "epoch": 0.23222005110476476, + "grad_norm": 0.27706379853450247, + "learning_rate": 0.00017930528971854163, + "loss": 1.1486, + "step": 1207 + }, + { + "epoch": 0.23241244551330226, + "grad_norm": 0.28539288522573164, + "learning_rate": 0.00017926731141855928, + "loss": 1.1481, + "step": 1208 + }, + { + "epoch": 0.23260483992183978, + "grad_norm": 0.35450120298693044, + "learning_rate": 0.00017922930233206657, + "loss": 1.0455, + "step": 1209 + }, + { + "epoch": 0.23279723433037727, + "grad_norm": 0.2667447976577724, + "learning_rate": 0.00017919126247382575, + "loss": 1.0984, + "step": 1210 + }, + { + "epoch": 0.23298962873891477, + "grad_norm": 0.42106334402168993, + "learning_rate": 0.00017915319185861118, + "loss": 1.0382, + "step": 1211 + }, + { + "epoch": 0.2331820231474523, + "grad_norm": 0.28217679770550086, + "learning_rate": 0.0001791150905012089, + "loss": 1.1259, + "step": 1212 + }, + { + "epoch": 0.23337441755598978, + "grad_norm": 0.34894691076199397, + "learning_rate": 0.00017907695841641713, + "loss": 1.1057, + "step": 1213 + }, + { + "epoch": 0.23356681196452728, + "grad_norm": 0.2750436859307013, + "learning_rate": 0.00017903879561904598, + "loss": 1.1406, + "step": 1214 + }, + { + "epoch": 0.23375920637306477, + "grad_norm": 0.26620287407774024, + "learning_rate": 0.0001790006021239173, + "loss": 1.1932, + "step": 1215 + }, + { + "epoch": 0.2339516007816023, + "grad_norm": 0.2938806768287713, + "learning_rate": 0.00017896237794586513, + "loss": 1.1647, + "step": 1216 + }, + { + "epoch": 0.2341439951901398, + "grad_norm": 0.2743820663073566, + "learning_rate": 0.0001789241230997352, + "loss": 1.1365, + "step": 1217 + }, + { + "epoch": 0.23433638959867728, + "grad_norm": 0.3692521698288092, + "learning_rate": 0.00017888583760038534, + "loss": 1.1886, + "step": 1218 + }, + { + "epoch": 0.2345287840072148, + "grad_norm": 0.3158175244074631, + "learning_rate": 0.00017884752146268512, + "loss": 1.1692, + "step": 1219 + }, + { + "epoch": 0.2347211784157523, + "grad_norm": 0.2883654846012361, + "learning_rate": 0.00017880917470151615, + "loss": 1.1261, + "step": 1220 + }, + { + "epoch": 0.2349135728242898, + "grad_norm": 0.28889478488936415, + "learning_rate": 0.00017877079733177184, + "loss": 0.9975, + "step": 1221 + }, + { + "epoch": 0.23510596723282728, + "grad_norm": 0.26912644178255984, + "learning_rate": 0.0001787323893683575, + "loss": 1.1787, + "step": 1222 + }, + { + "epoch": 0.2352983616413648, + "grad_norm": 0.3864415141838015, + "learning_rate": 0.0001786939508261904, + "loss": 1.1553, + "step": 1223 + }, + { + "epoch": 0.2354907560499023, + "grad_norm": 0.32914781984423924, + "learning_rate": 0.00017865548172019957, + "loss": 1.1807, + "step": 1224 + }, + { + "epoch": 0.2356831504584398, + "grad_norm": 0.3063745994020058, + "learning_rate": 0.00017861698206532596, + "loss": 1.1849, + "step": 1225 + }, + { + "epoch": 0.23587554486697732, + "grad_norm": 0.2822649345099394, + "learning_rate": 0.00017857845187652246, + "loss": 1.188, + "step": 1226 + }, + { + "epoch": 0.2360679392755148, + "grad_norm": 0.3428920492026431, + "learning_rate": 0.00017853989116875375, + "loss": 1.048, + "step": 1227 + }, + { + "epoch": 0.2362603336840523, + "grad_norm": 0.29465300250942583, + "learning_rate": 0.00017850129995699624, + "loss": 1.1281, + "step": 1228 + }, + { + "epoch": 0.2364527280925898, + "grad_norm": 0.2749752378776852, + "learning_rate": 0.00017846267825623845, + "loss": 1.1953, + "step": 1229 + }, + { + "epoch": 0.23664512250112732, + "grad_norm": 0.2773158783886057, + "learning_rate": 0.00017842402608148053, + "loss": 1.1553, + "step": 1230 + }, + { + "epoch": 0.2368375169096648, + "grad_norm": 0.31520186939135053, + "learning_rate": 0.00017838534344773453, + "loss": 1.1738, + "step": 1231 + }, + { + "epoch": 0.2370299113182023, + "grad_norm": 0.28568878520482865, + "learning_rate": 0.00017834663037002443, + "loss": 1.0519, + "step": 1232 + }, + { + "epoch": 0.23722230572673983, + "grad_norm": 0.30214167610218123, + "learning_rate": 0.00017830788686338584, + "loss": 1.1558, + "step": 1233 + }, + { + "epoch": 0.23741470013527732, + "grad_norm": 0.27876486396876926, + "learning_rate": 0.00017826911294286637, + "loss": 1.1407, + "step": 1234 + }, + { + "epoch": 0.23760709454381482, + "grad_norm": 0.28188499197703776, + "learning_rate": 0.0001782303086235253, + "loss": 1.1676, + "step": 1235 + }, + { + "epoch": 0.2377994889523523, + "grad_norm": 0.274298371565878, + "learning_rate": 0.0001781914739204338, + "loss": 1.0646, + "step": 1236 + }, + { + "epoch": 0.23799188336088983, + "grad_norm": 0.2638538852788373, + "learning_rate": 0.00017815260884867484, + "loss": 1.1829, + "step": 1237 + }, + { + "epoch": 0.23818427776942733, + "grad_norm": 0.30382234224952787, + "learning_rate": 0.00017811371342334317, + "loss": 1.1374, + "step": 1238 + }, + { + "epoch": 0.23837667217796482, + "grad_norm": 0.2847681047396438, + "learning_rate": 0.0001780747876595453, + "loss": 1.129, + "step": 1239 + }, + { + "epoch": 0.23856906658650234, + "grad_norm": 0.27300639332127213, + "learning_rate": 0.00017803583157239956, + "loss": 1.1228, + "step": 1240 + }, + { + "epoch": 0.23876146099503984, + "grad_norm": 0.2702508456930589, + "learning_rate": 0.00017799684517703603, + "loss": 1.162, + "step": 1241 + }, + { + "epoch": 0.23895385540357733, + "grad_norm": 0.2328771461936807, + "learning_rate": 0.0001779578284885966, + "loss": 1.1438, + "step": 1242 + }, + { + "epoch": 0.23914624981211483, + "grad_norm": 0.3637716867076837, + "learning_rate": 0.00017791878152223492, + "loss": 1.0683, + "step": 1243 + }, + { + "epoch": 0.23933864422065235, + "grad_norm": 0.36480632819887904, + "learning_rate": 0.00017787970429311632, + "loss": 1.1075, + "step": 1244 + }, + { + "epoch": 0.23953103862918984, + "grad_norm": 0.24593441584995046, + "learning_rate": 0.00017784059681641797, + "loss": 1.1435, + "step": 1245 + }, + { + "epoch": 0.23972343303772733, + "grad_norm": 0.2853362577993421, + "learning_rate": 0.0001778014591073288, + "loss": 1.1555, + "step": 1246 + }, + { + "epoch": 0.23991582744626486, + "grad_norm": 0.3349418304761187, + "learning_rate": 0.00017776229118104942, + "loss": 1.0824, + "step": 1247 + }, + { + "epoch": 0.24010822185480235, + "grad_norm": 0.33607502766195857, + "learning_rate": 0.0001777230930527922, + "loss": 1.0934, + "step": 1248 + }, + { + "epoch": 0.24030061626333984, + "grad_norm": 0.253362720817648, + "learning_rate": 0.00017768386473778123, + "loss": 1.0601, + "step": 1249 + }, + { + "epoch": 0.24049301067187734, + "grad_norm": 0.29191360658259746, + "learning_rate": 0.00017764460625125235, + "loss": 1.1672, + "step": 1250 + }, + { + "epoch": 0.24068540508041486, + "grad_norm": 0.286059804578617, + "learning_rate": 0.0001776053176084531, + "loss": 1.0844, + "step": 1251 + }, + { + "epoch": 0.24087779948895235, + "grad_norm": 0.2660256018022285, + "learning_rate": 0.00017756599882464273, + "loss": 1.0799, + "step": 1252 + }, + { + "epoch": 0.24107019389748985, + "grad_norm": 0.290677237969466, + "learning_rate": 0.00017752664991509224, + "loss": 1.111, + "step": 1253 + }, + { + "epoch": 0.24126258830602734, + "grad_norm": 0.30940893460643915, + "learning_rate": 0.0001774872708950842, + "loss": 1.1163, + "step": 1254 + }, + { + "epoch": 0.24145498271456486, + "grad_norm": 0.30167890473086945, + "learning_rate": 0.0001774478617799131, + "loss": 1.2083, + "step": 1255 + }, + { + "epoch": 0.24164737712310236, + "grad_norm": 0.3065808480956745, + "learning_rate": 0.00017740842258488488, + "loss": 1.0402, + "step": 1256 + }, + { + "epoch": 0.24183977153163985, + "grad_norm": 0.2708046298496205, + "learning_rate": 0.0001773689533253173, + "loss": 1.1752, + "step": 1257 + }, + { + "epoch": 0.24203216594017737, + "grad_norm": 0.2902593409578126, + "learning_rate": 0.00017732945401653977, + "loss": 1.2355, + "step": 1258 + }, + { + "epoch": 0.24222456034871487, + "grad_norm": 0.27686437648111334, + "learning_rate": 0.0001772899246738934, + "loss": 1.1647, + "step": 1259 + }, + { + "epoch": 0.24241695475725236, + "grad_norm": 0.31381879978772104, + "learning_rate": 0.00017725036531273088, + "loss": 0.9973, + "step": 1260 + }, + { + "epoch": 0.24260934916578986, + "grad_norm": 0.36802195227643747, + "learning_rate": 0.00017721077594841662, + "loss": 1.2049, + "step": 1261 + }, + { + "epoch": 0.24280174357432738, + "grad_norm": 0.37794334516855255, + "learning_rate": 0.00017717115659632667, + "loss": 1.2405, + "step": 1262 + }, + { + "epoch": 0.24299413798286487, + "grad_norm": 0.29779155499018656, + "learning_rate": 0.00017713150727184876, + "loss": 1.1347, + "step": 1263 + }, + { + "epoch": 0.24318653239140237, + "grad_norm": 0.29407342168966677, + "learning_rate": 0.00017709182799038222, + "loss": 1.1909, + "step": 1264 + }, + { + "epoch": 0.2433789267999399, + "grad_norm": 0.29804169723370155, + "learning_rate": 0.000177052118767338, + "loss": 1.1199, + "step": 1265 + }, + { + "epoch": 0.24357132120847738, + "grad_norm": 0.342090125715215, + "learning_rate": 0.00017701237961813875, + "loss": 1.1875, + "step": 1266 + }, + { + "epoch": 0.24376371561701488, + "grad_norm": 0.35629197989307626, + "learning_rate": 0.00017697261055821863, + "loss": 1.1291, + "step": 1267 + }, + { + "epoch": 0.24395611002555237, + "grad_norm": 0.3234468201376417, + "learning_rate": 0.00017693281160302356, + "loss": 1.134, + "step": 1268 + }, + { + "epoch": 0.2441485044340899, + "grad_norm": 0.29092606485392436, + "learning_rate": 0.00017689298276801093, + "loss": 1.1479, + "step": 1269 + }, + { + "epoch": 0.24434089884262739, + "grad_norm": 0.29993750386802515, + "learning_rate": 0.00017685312406864984, + "loss": 1.1448, + "step": 1270 + }, + { + "epoch": 0.24453329325116488, + "grad_norm": 0.37728993263992666, + "learning_rate": 0.00017681323552042096, + "loss": 1.0735, + "step": 1271 + }, + { + "epoch": 0.2447256876597024, + "grad_norm": 0.34114197040493005, + "learning_rate": 0.00017677331713881648, + "loss": 1.1709, + "step": 1272 + }, + { + "epoch": 0.2449180820682399, + "grad_norm": 0.26461087252963844, + "learning_rate": 0.0001767333689393403, + "loss": 1.0138, + "step": 1273 + }, + { + "epoch": 0.2451104764767774, + "grad_norm": 0.3638451559635149, + "learning_rate": 0.00017669339093750785, + "loss": 1.0947, + "step": 1274 + }, + { + "epoch": 0.24530287088531488, + "grad_norm": 0.35659073194040136, + "learning_rate": 0.00017665338314884607, + "loss": 1.1172, + "step": 1275 + }, + { + "epoch": 0.2454952652938524, + "grad_norm": 0.3402194557182349, + "learning_rate": 0.00017661334558889357, + "loss": 1.1208, + "step": 1276 + }, + { + "epoch": 0.2456876597023899, + "grad_norm": 0.37768772187033833, + "learning_rate": 0.00017657327827320045, + "loss": 1.1458, + "step": 1277 + }, + { + "epoch": 0.2458800541109274, + "grad_norm": 0.30279301402497205, + "learning_rate": 0.0001765331812173284, + "loss": 1.142, + "step": 1278 + }, + { + "epoch": 0.24607244851946491, + "grad_norm": 0.3701728235917981, + "learning_rate": 0.00017649305443685068, + "loss": 1.1281, + "step": 1279 + }, + { + "epoch": 0.2462648429280024, + "grad_norm": 0.3389863139016665, + "learning_rate": 0.000176452897947352, + "loss": 1.0786, + "step": 1280 + }, + { + "epoch": 0.2464572373365399, + "grad_norm": 0.2850160393505762, + "learning_rate": 0.00017641271176442876, + "loss": 1.0966, + "step": 1281 + }, + { + "epoch": 0.2466496317450774, + "grad_norm": 0.3070292820238925, + "learning_rate": 0.00017637249590368877, + "loss": 1.0818, + "step": 1282 + }, + { + "epoch": 0.24684202615361492, + "grad_norm": 0.28431255931479377, + "learning_rate": 0.00017633225038075142, + "loss": 1.0456, + "step": 1283 + }, + { + "epoch": 0.2470344205621524, + "grad_norm": 0.46514505409715884, + "learning_rate": 0.00017629197521124755, + "loss": 1.1113, + "step": 1284 + }, + { + "epoch": 0.2472268149706899, + "grad_norm": 0.3219692065437815, + "learning_rate": 0.00017625167041081965, + "loss": 1.1805, + "step": 1285 + }, + { + "epoch": 0.24741920937922743, + "grad_norm": 0.3857745502111839, + "learning_rate": 0.00017621133599512163, + "loss": 1.1315, + "step": 1286 + }, + { + "epoch": 0.24761160378776492, + "grad_norm": 0.30358705433041244, + "learning_rate": 0.00017617097197981889, + "loss": 1.1414, + "step": 1287 + }, + { + "epoch": 0.24780399819630242, + "grad_norm": 0.3239256918533029, + "learning_rate": 0.0001761305783805883, + "loss": 1.1976, + "step": 1288 + }, + { + "epoch": 0.2479963926048399, + "grad_norm": 0.28888282191266984, + "learning_rate": 0.00017609015521311835, + "loss": 1.0658, + "step": 1289 + }, + { + "epoch": 0.24818878701337743, + "grad_norm": 0.29897663650648887, + "learning_rate": 0.0001760497024931089, + "loss": 1.1095, + "step": 1290 + }, + { + "epoch": 0.24838118142191493, + "grad_norm": 0.36423970435303815, + "learning_rate": 0.00017600922023627136, + "loss": 1.129, + "step": 1291 + }, + { + "epoch": 0.24857357583045242, + "grad_norm": 0.3246330646747353, + "learning_rate": 0.0001759687084583285, + "loss": 1.1871, + "step": 1292 + }, + { + "epoch": 0.24876597023898994, + "grad_norm": 0.27683582059501827, + "learning_rate": 0.0001759281671750147, + "loss": 1.0987, + "step": 1293 + }, + { + "epoch": 0.24895836464752744, + "grad_norm": 0.3391467350342309, + "learning_rate": 0.00017588759640207563, + "loss": 1.0394, + "step": 1294 + }, + { + "epoch": 0.24915075905606493, + "grad_norm": 0.28629417027256443, + "learning_rate": 0.00017584699615526858, + "loss": 1.1892, + "step": 1295 + }, + { + "epoch": 0.24934315346460242, + "grad_norm": 0.29617680549837705, + "learning_rate": 0.00017580636645036225, + "loss": 1.0634, + "step": 1296 + }, + { + "epoch": 0.24953554787313995, + "grad_norm": 0.3241373057473967, + "learning_rate": 0.0001757657073031367, + "loss": 1.1073, + "step": 1297 + }, + { + "epoch": 0.24972794228167744, + "grad_norm": 0.2741305975028379, + "learning_rate": 0.00017572501872938342, + "loss": 1.1571, + "step": 1298 + }, + { + "epoch": 0.24992033669021493, + "grad_norm": 0.3541371188862645, + "learning_rate": 0.0001756843007449055, + "loss": 1.1162, + "step": 1299 + }, + { + "epoch": 0.2501127310987524, + "grad_norm": 0.2936939680371704, + "learning_rate": 0.00017564355336551727, + "loss": 1.0505, + "step": 1300 + }, + { + "epoch": 0.25030512550728995, + "grad_norm": 0.2949885721381335, + "learning_rate": 0.00017560277660704453, + "loss": 1.112, + "step": 1301 + }, + { + "epoch": 0.25049751991582747, + "grad_norm": 0.2750557636829198, + "learning_rate": 0.00017556197048532456, + "loss": 1.1058, + "step": 1302 + }, + { + "epoch": 0.25068991432436494, + "grad_norm": 0.264142176765815, + "learning_rate": 0.00017552113501620594, + "loss": 1.1379, + "step": 1303 + }, + { + "epoch": 0.25088230873290246, + "grad_norm": 0.42570119279794666, + "learning_rate": 0.00017548027021554874, + "loss": 1.1599, + "step": 1304 + }, + { + "epoch": 0.2510747031414399, + "grad_norm": 0.2707421360257628, + "learning_rate": 0.0001754393760992243, + "loss": 1.1376, + "step": 1305 + }, + { + "epoch": 0.25126709754997745, + "grad_norm": 0.34846977453024036, + "learning_rate": 0.00017539845268311547, + "loss": 1.0918, + "step": 1306 + }, + { + "epoch": 0.25145949195851497, + "grad_norm": 0.3149632413586983, + "learning_rate": 0.00017535749998311647, + "loss": 1.0166, + "step": 1307 + }, + { + "epoch": 0.25165188636705244, + "grad_norm": 0.35554953079039586, + "learning_rate": 0.0001753165180151328, + "loss": 1.0824, + "step": 1308 + }, + { + "epoch": 0.25184428077558996, + "grad_norm": 0.2896831942210788, + "learning_rate": 0.0001752755067950814, + "loss": 1.1177, + "step": 1309 + }, + { + "epoch": 0.2520366751841275, + "grad_norm": 0.26323499493106395, + "learning_rate": 0.0001752344663388906, + "loss": 1.1737, + "step": 1310 + }, + { + "epoch": 0.25222906959266495, + "grad_norm": 0.3186714366459116, + "learning_rate": 0.00017519339666249997, + "loss": 1.0434, + "step": 1311 + }, + { + "epoch": 0.25242146400120247, + "grad_norm": 0.3465213490943624, + "learning_rate": 0.00017515229778186054, + "loss": 1.0519, + "step": 1312 + }, + { + "epoch": 0.25261385840974, + "grad_norm": 0.334036168381196, + "learning_rate": 0.0001751111697129346, + "loss": 1.1315, + "step": 1313 + }, + { + "epoch": 0.25280625281827745, + "grad_norm": 0.3621019937457814, + "learning_rate": 0.00017507001247169587, + "loss": 1.1823, + "step": 1314 + }, + { + "epoch": 0.252998647226815, + "grad_norm": 0.26110746967417664, + "learning_rate": 0.00017502882607412933, + "loss": 1.0819, + "step": 1315 + }, + { + "epoch": 0.2531910416353525, + "grad_norm": 0.33215603669557836, + "learning_rate": 0.00017498761053623128, + "loss": 1.0696, + "step": 1316 + }, + { + "epoch": 0.25338343604388996, + "grad_norm": 0.3089901410957134, + "learning_rate": 0.0001749463658740094, + "loss": 1.1566, + "step": 1317 + }, + { + "epoch": 0.2535758304524275, + "grad_norm": 0.2676814671588705, + "learning_rate": 0.00017490509210348263, + "loss": 1.1228, + "step": 1318 + }, + { + "epoch": 0.25376822486096495, + "grad_norm": 0.315852099385859, + "learning_rate": 0.0001748637892406812, + "loss": 1.1903, + "step": 1319 + }, + { + "epoch": 0.2539606192695025, + "grad_norm": 0.31970930854664487, + "learning_rate": 0.0001748224573016467, + "loss": 1.0864, + "step": 1320 + }, + { + "epoch": 0.25415301367804, + "grad_norm": 0.27790099021632264, + "learning_rate": 0.00017478109630243195, + "loss": 1.0916, + "step": 1321 + }, + { + "epoch": 0.25434540808657746, + "grad_norm": 0.30558296220478814, + "learning_rate": 0.0001747397062591011, + "loss": 1.1287, + "step": 1322 + }, + { + "epoch": 0.254537802495115, + "grad_norm": 0.2702814127579277, + "learning_rate": 0.00017469828718772958, + "loss": 1.1779, + "step": 1323 + }, + { + "epoch": 0.2547301969036525, + "grad_norm": 0.297505295627331, + "learning_rate": 0.00017465683910440405, + "loss": 1.1328, + "step": 1324 + }, + { + "epoch": 0.25492259131219, + "grad_norm": 0.26769827486566683, + "learning_rate": 0.00017461536202522247, + "loss": 1.1379, + "step": 1325 + }, + { + "epoch": 0.2551149857207275, + "grad_norm": 0.355058893755735, + "learning_rate": 0.0001745738559662941, + "loss": 1.1411, + "step": 1326 + }, + { + "epoch": 0.255307380129265, + "grad_norm": 0.309347595436928, + "learning_rate": 0.00017453232094373936, + "loss": 1.1696, + "step": 1327 + }, + { + "epoch": 0.2554997745378025, + "grad_norm": 0.4476075045732932, + "learning_rate": 0.00017449075697369005, + "loss": 1.1806, + "step": 1328 + }, + { + "epoch": 0.25569216894634, + "grad_norm": 0.3406162517718127, + "learning_rate": 0.00017444916407228904, + "loss": 1.186, + "step": 1329 + }, + { + "epoch": 0.2558845633548775, + "grad_norm": 0.24096468980054248, + "learning_rate": 0.0001744075422556906, + "loss": 1.1641, + "step": 1330 + }, + { + "epoch": 0.256076957763415, + "grad_norm": 0.30086294001288766, + "learning_rate": 0.00017436589154006012, + "loss": 1.0826, + "step": 1331 + }, + { + "epoch": 0.2562693521719525, + "grad_norm": 0.28676636236735553, + "learning_rate": 0.00017432421194157432, + "loss": 1.1188, + "step": 1332 + }, + { + "epoch": 0.25646174658049, + "grad_norm": 0.34417627273508167, + "learning_rate": 0.000174282503476421, + "loss": 1.0832, + "step": 1333 + }, + { + "epoch": 0.2566541409890275, + "grad_norm": 0.35048610341297504, + "learning_rate": 0.00017424076616079934, + "loss": 1.1534, + "step": 1334 + }, + { + "epoch": 0.256846535397565, + "grad_norm": 0.3979590056134015, + "learning_rate": 0.00017419900001091953, + "loss": 1.1599, + "step": 1335 + }, + { + "epoch": 0.2570389298061025, + "grad_norm": 0.33763286483264704, + "learning_rate": 0.00017415720504300314, + "loss": 1.0098, + "step": 1336 + }, + { + "epoch": 0.25723132421464, + "grad_norm": 0.3576401448908834, + "learning_rate": 0.00017411538127328283, + "loss": 1.1871, + "step": 1337 + }, + { + "epoch": 0.25742371862317753, + "grad_norm": 0.35276211463307466, + "learning_rate": 0.00017407352871800244, + "loss": 1.0371, + "step": 1338 + }, + { + "epoch": 0.257616113031715, + "grad_norm": 0.29221167111412355, + "learning_rate": 0.00017403164739341706, + "loss": 1.1255, + "step": 1339 + }, + { + "epoch": 0.2578085074402525, + "grad_norm": 0.36273868784627933, + "learning_rate": 0.0001739897373157929, + "loss": 1.1714, + "step": 1340 + }, + { + "epoch": 0.25800090184879004, + "grad_norm": 0.2755318015574914, + "learning_rate": 0.00017394779850140734, + "loss": 1.1595, + "step": 1341 + }, + { + "epoch": 0.2581932962573275, + "grad_norm": 0.28706153346378777, + "learning_rate": 0.00017390583096654896, + "loss": 1.0942, + "step": 1342 + }, + { + "epoch": 0.25838569066586503, + "grad_norm": 0.3775394537185136, + "learning_rate": 0.00017386383472751743, + "loss": 1.1813, + "step": 1343 + }, + { + "epoch": 0.25857808507440255, + "grad_norm": 0.3323319561680617, + "learning_rate": 0.00017382180980062364, + "loss": 1.0759, + "step": 1344 + }, + { + "epoch": 0.25877047948294, + "grad_norm": 0.255644721439666, + "learning_rate": 0.00017377975620218953, + "loss": 1.0823, + "step": 1345 + }, + { + "epoch": 0.25896287389147754, + "grad_norm": 0.2841415414848638, + "learning_rate": 0.00017373767394854834, + "loss": 1.1767, + "step": 1346 + }, + { + "epoch": 0.259155268300015, + "grad_norm": 0.3228337200242913, + "learning_rate": 0.00017369556305604422, + "loss": 1.0592, + "step": 1347 + }, + { + "epoch": 0.25934766270855253, + "grad_norm": 0.26735867803353425, + "learning_rate": 0.00017365342354103264, + "loss": 1.1682, + "step": 1348 + }, + { + "epoch": 0.25954005711709005, + "grad_norm": 0.29059786890977607, + "learning_rate": 0.00017361125541988002, + "loss": 1.1384, + "step": 1349 + }, + { + "epoch": 0.2597324515256275, + "grad_norm": 0.2771992032868704, + "learning_rate": 0.00017356905870896406, + "loss": 1.1535, + "step": 1350 + }, + { + "epoch": 0.25992484593416504, + "grad_norm": 0.24527673864737073, + "learning_rate": 0.0001735268334246734, + "loss": 1.1916, + "step": 1351 + }, + { + "epoch": 0.26011724034270256, + "grad_norm": 0.28637376823554156, + "learning_rate": 0.0001734845795834079, + "loss": 1.1167, + "step": 1352 + }, + { + "epoch": 0.26030963475124, + "grad_norm": 0.29057489302019385, + "learning_rate": 0.00017344229720157845, + "loss": 1.043, + "step": 1353 + }, + { + "epoch": 0.26050202915977755, + "grad_norm": 0.3455368633759316, + "learning_rate": 0.00017339998629560706, + "loss": 1.1397, + "step": 1354 + }, + { + "epoch": 0.26069442356831507, + "grad_norm": 0.32360951955328404, + "learning_rate": 0.00017335764688192678, + "loss": 1.0493, + "step": 1355 + }, + { + "epoch": 0.26088681797685254, + "grad_norm": 0.34124170919272556, + "learning_rate": 0.0001733152789769817, + "loss": 1.0926, + "step": 1356 + }, + { + "epoch": 0.26107921238539006, + "grad_norm": 0.353349847992069, + "learning_rate": 0.00017327288259722713, + "loss": 1.0655, + "step": 1357 + }, + { + "epoch": 0.2612716067939275, + "grad_norm": 0.2622086953275031, + "learning_rate": 0.00017323045775912926, + "loss": 1.193, + "step": 1358 + }, + { + "epoch": 0.26146400120246505, + "grad_norm": 0.39105781315914395, + "learning_rate": 0.00017318800447916542, + "loss": 1.1149, + "step": 1359 + }, + { + "epoch": 0.26165639561100257, + "grad_norm": 0.30976968640013536, + "learning_rate": 0.00017314552277382402, + "loss": 1.0476, + "step": 1360 + }, + { + "epoch": 0.26184879001954003, + "grad_norm": 0.26619682359698926, + "learning_rate": 0.00017310301265960446, + "loss": 1.1216, + "step": 1361 + }, + { + "epoch": 0.26204118442807756, + "grad_norm": 0.3209847711273689, + "learning_rate": 0.00017306047415301706, + "loss": 1.0611, + "step": 1362 + }, + { + "epoch": 0.2622335788366151, + "grad_norm": 0.2725241445577398, + "learning_rate": 0.00017301790727058345, + "loss": 1.0797, + "step": 1363 + }, + { + "epoch": 0.26242597324515254, + "grad_norm": 0.44269612569447203, + "learning_rate": 0.000172975312028836, + "loss": 1.0151, + "step": 1364 + }, + { + "epoch": 0.26261836765369007, + "grad_norm": 0.3214987078674056, + "learning_rate": 0.00017293268844431828, + "loss": 1.1846, + "step": 1365 + }, + { + "epoch": 0.2628107620622276, + "grad_norm": 0.2755490965984195, + "learning_rate": 0.00017289003653358473, + "loss": 1.1428, + "step": 1366 + }, + { + "epoch": 0.26300315647076505, + "grad_norm": 0.3236620776652681, + "learning_rate": 0.00017284735631320093, + "loss": 1.0628, + "step": 1367 + }, + { + "epoch": 0.2631955508793026, + "grad_norm": 0.43103719426319725, + "learning_rate": 0.00017280464779974335, + "loss": 1.1225, + "step": 1368 + }, + { + "epoch": 0.2633879452878401, + "grad_norm": 0.29182915995559056, + "learning_rate": 0.0001727619110097995, + "loss": 1.1525, + "step": 1369 + }, + { + "epoch": 0.26358033969637756, + "grad_norm": 0.3429150854124409, + "learning_rate": 0.00017271914595996782, + "loss": 1.1199, + "step": 1370 + }, + { + "epoch": 0.2637727341049151, + "grad_norm": 0.2637758803009464, + "learning_rate": 0.00017267635266685782, + "loss": 1.0769, + "step": 1371 + }, + { + "epoch": 0.26396512851345255, + "grad_norm": 0.2935984788774275, + "learning_rate": 0.0001726335311470899, + "loss": 1.0605, + "step": 1372 + }, + { + "epoch": 0.2641575229219901, + "grad_norm": 0.3023912442325294, + "learning_rate": 0.00017259068141729542, + "loss": 1.1168, + "step": 1373 + }, + { + "epoch": 0.2643499173305276, + "grad_norm": 0.3315555055672322, + "learning_rate": 0.00017254780349411675, + "loss": 1.1239, + "step": 1374 + }, + { + "epoch": 0.26454231173906506, + "grad_norm": 0.3352022510656215, + "learning_rate": 0.00017250489739420718, + "loss": 1.0892, + "step": 1375 + }, + { + "epoch": 0.2647347061476026, + "grad_norm": 0.37205721319154095, + "learning_rate": 0.00017246196313423093, + "loss": 1.0428, + "step": 1376 + }, + { + "epoch": 0.2649271005561401, + "grad_norm": 0.3437422788419956, + "learning_rate": 0.00017241900073086317, + "loss": 1.0495, + "step": 1377 + }, + { + "epoch": 0.26511949496467757, + "grad_norm": 0.2800951366405879, + "learning_rate": 0.00017237601020079002, + "loss": 1.1672, + "step": 1378 + }, + { + "epoch": 0.2653118893732151, + "grad_norm": 0.33395863707201673, + "learning_rate": 0.0001723329915607085, + "loss": 1.1359, + "step": 1379 + }, + { + "epoch": 0.2655042837817526, + "grad_norm": 0.27940732767555193, + "learning_rate": 0.00017228994482732651, + "loss": 1.2608, + "step": 1380 + }, + { + "epoch": 0.2656966781902901, + "grad_norm": 0.4140561343379944, + "learning_rate": 0.000172246870017363, + "loss": 1.0091, + "step": 1381 + }, + { + "epoch": 0.2658890725988276, + "grad_norm": 0.31906365134682535, + "learning_rate": 0.00017220376714754764, + "loss": 1.031, + "step": 1382 + }, + { + "epoch": 0.2660814670073651, + "grad_norm": 0.2629626181918077, + "learning_rate": 0.00017216063623462112, + "loss": 1.1009, + "step": 1383 + }, + { + "epoch": 0.2662738614159026, + "grad_norm": 0.3459677194379836, + "learning_rate": 0.00017211747729533502, + "loss": 1.1798, + "step": 1384 + }, + { + "epoch": 0.2664662558244401, + "grad_norm": 0.26964009886145485, + "learning_rate": 0.00017207429034645175, + "loss": 1.1205, + "step": 1385 + }, + { + "epoch": 0.2666586502329776, + "grad_norm": 0.28465305019450465, + "learning_rate": 0.0001720310754047446, + "loss": 1.1774, + "step": 1386 + }, + { + "epoch": 0.2668510446415151, + "grad_norm": 0.3119888541964351, + "learning_rate": 0.00017198783248699779, + "loss": 1.1608, + "step": 1387 + }, + { + "epoch": 0.2670434390500526, + "grad_norm": 0.3944044220649418, + "learning_rate": 0.00017194456161000635, + "loss": 1.1544, + "step": 1388 + }, + { + "epoch": 0.2672358334585901, + "grad_norm": 0.3697790963912528, + "learning_rate": 0.0001719012627905762, + "loss": 1.0404, + "step": 1389 + }, + { + "epoch": 0.2674282278671276, + "grad_norm": 0.28279454414641186, + "learning_rate": 0.00017185793604552408, + "loss": 1.012, + "step": 1390 + }, + { + "epoch": 0.26762062227566513, + "grad_norm": 0.29364721233012775, + "learning_rate": 0.00017181458139167764, + "loss": 1.0722, + "step": 1391 + }, + { + "epoch": 0.2678130166842026, + "grad_norm": 0.30657863225904525, + "learning_rate": 0.00017177119884587535, + "loss": 1.1353, + "step": 1392 + }, + { + "epoch": 0.2680054110927401, + "grad_norm": 0.3001832172375664, + "learning_rate": 0.0001717277884249664, + "loss": 1.1595, + "step": 1393 + }, + { + "epoch": 0.26819780550127764, + "grad_norm": 0.2867900108041073, + "learning_rate": 0.00017168435014581094, + "loss": 1.1331, + "step": 1394 + }, + { + "epoch": 0.2683901999098151, + "grad_norm": 0.3194360701346741, + "learning_rate": 0.0001716408840252799, + "loss": 1.1501, + "step": 1395 + }, + { + "epoch": 0.26858259431835263, + "grad_norm": 0.3434192596775916, + "learning_rate": 0.00017159739008025505, + "loss": 1.0014, + "step": 1396 + }, + { + "epoch": 0.26877498872689015, + "grad_norm": 0.25958055228442156, + "learning_rate": 0.0001715538683276289, + "loss": 1.1398, + "step": 1397 + }, + { + "epoch": 0.2689673831354276, + "grad_norm": 0.2800696009487632, + "learning_rate": 0.0001715103187843048, + "loss": 1.1148, + "step": 1398 + }, + { + "epoch": 0.26915977754396514, + "grad_norm": 0.2666934809970182, + "learning_rate": 0.0001714667414671969, + "loss": 1.1001, + "step": 1399 + }, + { + "epoch": 0.2693521719525026, + "grad_norm": 0.3300122963342882, + "learning_rate": 0.0001714231363932301, + "loss": 1.1367, + "step": 1400 + }, + { + "epoch": 0.2695445663610401, + "grad_norm": 0.288944250780435, + "learning_rate": 0.00017137950357934016, + "loss": 1.1034, + "step": 1401 + }, + { + "epoch": 0.26973696076957765, + "grad_norm": 0.3945039390178446, + "learning_rate": 0.00017133584304247353, + "loss": 1.2764, + "step": 1402 + }, + { + "epoch": 0.2699293551781151, + "grad_norm": 0.34469031247437065, + "learning_rate": 0.00017129215479958745, + "loss": 1.1197, + "step": 1403 + }, + { + "epoch": 0.27012174958665264, + "grad_norm": 0.3067648446670519, + "learning_rate": 0.00017124843886765, + "loss": 1.0654, + "step": 1404 + }, + { + "epoch": 0.27031414399519016, + "grad_norm": 0.27869117175923364, + "learning_rate": 0.00017120469526363982, + "loss": 1.1095, + "step": 1405 + }, + { + "epoch": 0.2705065384037276, + "grad_norm": 0.33802316017498774, + "learning_rate": 0.00017116092400454652, + "loss": 1.0426, + "step": 1406 + }, + { + "epoch": 0.27069893281226515, + "grad_norm": 0.3155777695709668, + "learning_rate": 0.00017111712510737035, + "loss": 1.1854, + "step": 1407 + }, + { + "epoch": 0.27089132722080267, + "grad_norm": 0.30751416378092516, + "learning_rate": 0.00017107329858912225, + "loss": 1.0578, + "step": 1408 + }, + { + "epoch": 0.27108372162934014, + "grad_norm": 0.3518317013774291, + "learning_rate": 0.00017102944446682395, + "loss": 1.0176, + "step": 1409 + }, + { + "epoch": 0.27127611603787766, + "grad_norm": 0.2340350896227295, + "learning_rate": 0.00017098556275750787, + "loss": 1.0734, + "step": 1410 + }, + { + "epoch": 0.2714685104464152, + "grad_norm": 0.2890537821398866, + "learning_rate": 0.00017094165347821723, + "loss": 1.0906, + "step": 1411 + }, + { + "epoch": 0.27166090485495265, + "grad_norm": 0.3395019582990811, + "learning_rate": 0.00017089771664600582, + "loss": 1.1152, + "step": 1412 + }, + { + "epoch": 0.27185329926349017, + "grad_norm": 0.3373935525833467, + "learning_rate": 0.00017085375227793818, + "loss": 1.0581, + "step": 1413 + }, + { + "epoch": 0.27204569367202763, + "grad_norm": 0.29701743997662994, + "learning_rate": 0.00017080976039108965, + "loss": 1.0521, + "step": 1414 + }, + { + "epoch": 0.27223808808056515, + "grad_norm": 0.2540218390388427, + "learning_rate": 0.00017076574100254613, + "loss": 1.1097, + "step": 1415 + }, + { + "epoch": 0.2724304824891027, + "grad_norm": 0.3182432027755049, + "learning_rate": 0.00017072169412940422, + "loss": 1.0806, + "step": 1416 + }, + { + "epoch": 0.27262287689764014, + "grad_norm": 0.3914213197454, + "learning_rate": 0.00017067761978877121, + "loss": 1.0403, + "step": 1417 + }, + { + "epoch": 0.27281527130617766, + "grad_norm": 0.3269191412985446, + "learning_rate": 0.00017063351799776513, + "loss": 1.119, + "step": 1418 + }, + { + "epoch": 0.2730076657147152, + "grad_norm": 0.3803367678562637, + "learning_rate": 0.00017058938877351456, + "loss": 1.1058, + "step": 1419 + }, + { + "epoch": 0.27320006012325265, + "grad_norm": 0.30849186310063365, + "learning_rate": 0.00017054523213315878, + "loss": 1.1424, + "step": 1420 + }, + { + "epoch": 0.2733924545317902, + "grad_norm": 0.2703485857002882, + "learning_rate": 0.00017050104809384774, + "loss": 1.1049, + "step": 1421 + }, + { + "epoch": 0.2735848489403277, + "grad_norm": 0.2921528574845525, + "learning_rate": 0.000170456836672742, + "loss": 1.0599, + "step": 1422 + }, + { + "epoch": 0.27377724334886516, + "grad_norm": 0.3076258718874411, + "learning_rate": 0.00017041259788701281, + "loss": 1.1367, + "step": 1423 + }, + { + "epoch": 0.2739696377574027, + "grad_norm": 0.30124214318469533, + "learning_rate": 0.0001703683317538419, + "loss": 1.1517, + "step": 1424 + }, + { + "epoch": 0.27416203216594015, + "grad_norm": 0.31177408413252455, + "learning_rate": 0.00017032403829042182, + "loss": 1.1415, + "step": 1425 + }, + { + "epoch": 0.27435442657447767, + "grad_norm": 0.30004432972090933, + "learning_rate": 0.00017027971751395563, + "loss": 1.1988, + "step": 1426 + }, + { + "epoch": 0.2745468209830152, + "grad_norm": 0.3106744326241377, + "learning_rate": 0.00017023536944165698, + "loss": 1.1546, + "step": 1427 + }, + { + "epoch": 0.27473921539155266, + "grad_norm": 0.2851602238507599, + "learning_rate": 0.00017019099409075014, + "loss": 1.086, + "step": 1428 + }, + { + "epoch": 0.2749316098000902, + "grad_norm": 0.2634909899667367, + "learning_rate": 0.00017014659147847002, + "loss": 1.1056, + "step": 1429 + }, + { + "epoch": 0.2751240042086277, + "grad_norm": 0.3188866896136964, + "learning_rate": 0.00017010216162206208, + "loss": 1.1633, + "step": 1430 + }, + { + "epoch": 0.27531639861716517, + "grad_norm": 0.28836837788970254, + "learning_rate": 0.00017005770453878234, + "loss": 1.1182, + "step": 1431 + }, + { + "epoch": 0.2755087930257027, + "grad_norm": 0.2975450328661655, + "learning_rate": 0.00017001322024589742, + "loss": 1.0936, + "step": 1432 + }, + { + "epoch": 0.2757011874342402, + "grad_norm": 0.3382751692849956, + "learning_rate": 0.00016996870876068452, + "loss": 1.1345, + "step": 1433 + }, + { + "epoch": 0.2758935818427777, + "grad_norm": 0.34107771338697157, + "learning_rate": 0.00016992417010043142, + "loss": 1.1076, + "step": 1434 + }, + { + "epoch": 0.2760859762513152, + "grad_norm": 0.27712803587893164, + "learning_rate": 0.00016987960428243638, + "loss": 1.0613, + "step": 1435 + }, + { + "epoch": 0.2762783706598527, + "grad_norm": 0.34252903826358966, + "learning_rate": 0.00016983501132400824, + "loss": 1.1542, + "step": 1436 + }, + { + "epoch": 0.2764707650683902, + "grad_norm": 0.3017579731220398, + "learning_rate": 0.00016979039124246642, + "loss": 1.0011, + "step": 1437 + }, + { + "epoch": 0.2766631594769277, + "grad_norm": 0.3650463571416283, + "learning_rate": 0.00016974574405514083, + "loss": 1.0774, + "step": 1438 + }, + { + "epoch": 0.2768555538854652, + "grad_norm": 0.34567173216330377, + "learning_rate": 0.00016970106977937192, + "loss": 1.1283, + "step": 1439 + }, + { + "epoch": 0.2770479482940027, + "grad_norm": 0.3008617277171387, + "learning_rate": 0.0001696563684325107, + "loss": 1.091, + "step": 1440 + }, + { + "epoch": 0.2772403427025402, + "grad_norm": 0.3210892263127466, + "learning_rate": 0.00016961164003191862, + "loss": 1.0207, + "step": 1441 + }, + { + "epoch": 0.2774327371110777, + "grad_norm": 0.4125838840831622, + "learning_rate": 0.00016956688459496765, + "loss": 1.1289, + "step": 1442 + }, + { + "epoch": 0.2776251315196152, + "grad_norm": 0.28647034974871216, + "learning_rate": 0.00016952210213904038, + "loss": 1.1797, + "step": 1443 + }, + { + "epoch": 0.27781752592815273, + "grad_norm": 0.28494862305196333, + "learning_rate": 0.0001694772926815297, + "loss": 1.0654, + "step": 1444 + }, + { + "epoch": 0.2780099203366902, + "grad_norm": 0.2839765423330028, + "learning_rate": 0.00016943245623983916, + "loss": 1.0816, + "step": 1445 + }, + { + "epoch": 0.2782023147452277, + "grad_norm": 0.31956143988872515, + "learning_rate": 0.00016938759283138268, + "loss": 1.172, + "step": 1446 + }, + { + "epoch": 0.27839470915376524, + "grad_norm": 0.34606961562792615, + "learning_rate": 0.00016934270247358468, + "loss": 1.1393, + "step": 1447 + }, + { + "epoch": 0.2785871035623027, + "grad_norm": 0.2799991187980808, + "learning_rate": 0.00016929778518388007, + "loss": 1.073, + "step": 1448 + }, + { + "epoch": 0.27877949797084023, + "grad_norm": 0.2910570538009604, + "learning_rate": 0.00016925284097971425, + "loss": 1.0928, + "step": 1449 + }, + { + "epoch": 0.27897189237937775, + "grad_norm": 0.2681833976207475, + "learning_rate": 0.00016920786987854294, + "loss": 1.0833, + "step": 1450 + }, + { + "epoch": 0.2791642867879152, + "grad_norm": 0.3004191624630394, + "learning_rate": 0.00016916287189783252, + "loss": 1.0586, + "step": 1451 + }, + { + "epoch": 0.27935668119645274, + "grad_norm": 0.2871753365160926, + "learning_rate": 0.00016911784705505957, + "loss": 1.0896, + "step": 1452 + }, + { + "epoch": 0.2795490756049902, + "grad_norm": 0.3046531488285525, + "learning_rate": 0.0001690727953677113, + "loss": 1.0628, + "step": 1453 + }, + { + "epoch": 0.2797414700135277, + "grad_norm": 0.2907399602928292, + "learning_rate": 0.00016902771685328523, + "loss": 1.1626, + "step": 1454 + }, + { + "epoch": 0.27993386442206525, + "grad_norm": 0.3062179049396391, + "learning_rate": 0.00016898261152928931, + "loss": 1.1066, + "step": 1455 + }, + { + "epoch": 0.2801262588306027, + "grad_norm": 0.3351688303157574, + "learning_rate": 0.00016893747941324196, + "loss": 1.0735, + "step": 1456 + }, + { + "epoch": 0.28031865323914024, + "grad_norm": 0.3098324602502187, + "learning_rate": 0.000168892320522672, + "loss": 1.0838, + "step": 1457 + }, + { + "epoch": 0.28051104764767776, + "grad_norm": 0.3733944528578597, + "learning_rate": 0.00016884713487511858, + "loss": 1.19, + "step": 1458 + }, + { + "epoch": 0.2807034420562152, + "grad_norm": 0.2856005012087916, + "learning_rate": 0.0001688019224881313, + "loss": 1.0703, + "step": 1459 + }, + { + "epoch": 0.28089583646475275, + "grad_norm": 0.36357730146748884, + "learning_rate": 0.00016875668337927013, + "loss": 1.2372, + "step": 1460 + }, + { + "epoch": 0.28108823087329027, + "grad_norm": 0.31883751969703866, + "learning_rate": 0.00016871141756610544, + "loss": 1.1341, + "step": 1461 + }, + { + "epoch": 0.28128062528182773, + "grad_norm": 0.24909921113461664, + "learning_rate": 0.0001686661250662179, + "loss": 1.0997, + "step": 1462 + }, + { + "epoch": 0.28147301969036526, + "grad_norm": 0.28181778666966495, + "learning_rate": 0.0001686208058971986, + "loss": 1.0444, + "step": 1463 + }, + { + "epoch": 0.2816654140989028, + "grad_norm": 0.28495260405584993, + "learning_rate": 0.00016857546007664905, + "loss": 1.1328, + "step": 1464 + }, + { + "epoch": 0.28185780850744024, + "grad_norm": 0.27798452060783974, + "learning_rate": 0.000168530087622181, + "loss": 1.213, + "step": 1465 + }, + { + "epoch": 0.28205020291597777, + "grad_norm": 0.49235965559192735, + "learning_rate": 0.0001684846885514166, + "loss": 1.175, + "step": 1466 + }, + { + "epoch": 0.28224259732451523, + "grad_norm": 0.3576291386200579, + "learning_rate": 0.00016843926288198828, + "loss": 1.0901, + "step": 1467 + }, + { + "epoch": 0.28243499173305275, + "grad_norm": 0.29159212153274483, + "learning_rate": 0.0001683938106315389, + "loss": 1.1877, + "step": 1468 + }, + { + "epoch": 0.2826273861415903, + "grad_norm": 0.2723514191399268, + "learning_rate": 0.0001683483318177216, + "loss": 1.0679, + "step": 1469 + }, + { + "epoch": 0.28281978055012774, + "grad_norm": 0.30860933842493043, + "learning_rate": 0.00016830282645819976, + "loss": 1.1087, + "step": 1470 + }, + { + "epoch": 0.28301217495866526, + "grad_norm": 0.33556153547966977, + "learning_rate": 0.0001682572945706472, + "loss": 1.1703, + "step": 1471 + }, + { + "epoch": 0.2832045693672028, + "grad_norm": 0.3015032914858447, + "learning_rate": 0.00016821173617274793, + "loss": 1.0502, + "step": 1472 + }, + { + "epoch": 0.28339696377574025, + "grad_norm": 0.2953371253542578, + "learning_rate": 0.00016816615128219635, + "loss": 1.1347, + "step": 1473 + }, + { + "epoch": 0.2835893581842778, + "grad_norm": 0.3176017935032959, + "learning_rate": 0.0001681205399166971, + "loss": 1.1705, + "step": 1474 + }, + { + "epoch": 0.2837817525928153, + "grad_norm": 0.29309108886207547, + "learning_rate": 0.00016807490209396506, + "loss": 1.1541, + "step": 1475 + }, + { + "epoch": 0.28397414700135276, + "grad_norm": 0.39354989586670625, + "learning_rate": 0.00016802923783172552, + "loss": 1.0775, + "step": 1476 + }, + { + "epoch": 0.2841665414098903, + "grad_norm": 0.29954011805284947, + "learning_rate": 0.0001679835471477139, + "loss": 1.0235, + "step": 1477 + }, + { + "epoch": 0.28435893581842775, + "grad_norm": 0.3035630327059971, + "learning_rate": 0.00016793783005967592, + "loss": 1.0487, + "step": 1478 + }, + { + "epoch": 0.28455133022696527, + "grad_norm": 0.31938507727732696, + "learning_rate": 0.0001678920865853676, + "loss": 1.0572, + "step": 1479 + }, + { + "epoch": 0.2847437246355028, + "grad_norm": 0.29441542933487236, + "learning_rate": 0.00016784631674255518, + "loss": 1.1836, + "step": 1480 + }, + { + "epoch": 0.28493611904404026, + "grad_norm": 0.3107622872292956, + "learning_rate": 0.00016780052054901512, + "loss": 1.1321, + "step": 1481 + }, + { + "epoch": 0.2851285134525778, + "grad_norm": 0.35573137406520317, + "learning_rate": 0.00016775469802253414, + "loss": 1.1253, + "step": 1482 + }, + { + "epoch": 0.2853209078611153, + "grad_norm": 0.2765597068152107, + "learning_rate": 0.0001677088491809092, + "loss": 1.0699, + "step": 1483 + }, + { + "epoch": 0.28551330226965277, + "grad_norm": 0.3639595288307644, + "learning_rate": 0.00016766297404194745, + "loss": 1.1564, + "step": 1484 + }, + { + "epoch": 0.2857056966781903, + "grad_norm": 0.2972230833788457, + "learning_rate": 0.00016761707262346625, + "loss": 1.0802, + "step": 1485 + }, + { + "epoch": 0.2858980910867278, + "grad_norm": 0.2585359595866178, + "learning_rate": 0.0001675711449432932, + "loss": 1.066, + "step": 1486 + }, + { + "epoch": 0.2860904854952653, + "grad_norm": 0.3416958588595029, + "learning_rate": 0.00016752519101926607, + "loss": 1.1818, + "step": 1487 + }, + { + "epoch": 0.2862828799038028, + "grad_norm": 0.24818602492656874, + "learning_rate": 0.00016747921086923283, + "loss": 1.1695, + "step": 1488 + }, + { + "epoch": 0.2864752743123403, + "grad_norm": 0.26713942087382575, + "learning_rate": 0.00016743320451105168, + "loss": 1.0488, + "step": 1489 + }, + { + "epoch": 0.2866676687208778, + "grad_norm": 0.3527701470941162, + "learning_rate": 0.00016738717196259094, + "loss": 1.144, + "step": 1490 + }, + { + "epoch": 0.2868600631294153, + "grad_norm": 0.3293371939936096, + "learning_rate": 0.0001673411132417291, + "loss": 1.0061, + "step": 1491 + }, + { + "epoch": 0.2870524575379528, + "grad_norm": 0.3556116428133735, + "learning_rate": 0.0001672950283663548, + "loss": 1.1294, + "step": 1492 + }, + { + "epoch": 0.2872448519464903, + "grad_norm": 0.2748832749209894, + "learning_rate": 0.00016724891735436697, + "loss": 1.1753, + "step": 1493 + }, + { + "epoch": 0.2874372463550278, + "grad_norm": 0.31851666387671945, + "learning_rate": 0.00016720278022367452, + "loss": 1.1382, + "step": 1494 + }, + { + "epoch": 0.2876296407635653, + "grad_norm": 0.5268915186796314, + "learning_rate": 0.00016715661699219663, + "loss": 1.0654, + "step": 1495 + }, + { + "epoch": 0.2878220351721028, + "grad_norm": 0.34015706564092485, + "learning_rate": 0.00016711042767786257, + "loss": 1.119, + "step": 1496 + }, + { + "epoch": 0.28801442958064033, + "grad_norm": 0.3380177753706035, + "learning_rate": 0.00016706421229861166, + "loss": 1.0756, + "step": 1497 + }, + { + "epoch": 0.2882068239891778, + "grad_norm": 0.251871619832251, + "learning_rate": 0.00016701797087239354, + "loss": 1.1766, + "step": 1498 + }, + { + "epoch": 0.2883992183977153, + "grad_norm": 0.2497903719085726, + "learning_rate": 0.0001669717034171677, + "loss": 1.0662, + "step": 1499 + }, + { + "epoch": 0.28859161280625284, + "grad_norm": 0.271168615319396, + "learning_rate": 0.000166925409950904, + "loss": 1.1289, + "step": 1500 + }, + { + "epoch": 0.2887840072147903, + "grad_norm": 0.28216074211396697, + "learning_rate": 0.00016687909049158228, + "loss": 1.1391, + "step": 1501 + }, + { + "epoch": 0.2889764016233278, + "grad_norm": 0.34081625162184925, + "learning_rate": 0.00016683274505719246, + "loss": 1.1392, + "step": 1502 + }, + { + "epoch": 0.28916879603186535, + "grad_norm": 0.3632388174196172, + "learning_rate": 0.00016678637366573454, + "loss": 1.0236, + "step": 1503 + }, + { + "epoch": 0.2893611904404028, + "grad_norm": 0.3221984456151925, + "learning_rate": 0.0001667399763352187, + "loss": 1.105, + "step": 1504 + }, + { + "epoch": 0.28955358484894034, + "grad_norm": 0.31769912921531707, + "learning_rate": 0.0001666935530836651, + "loss": 1.186, + "step": 1505 + }, + { + "epoch": 0.2897459792574778, + "grad_norm": 0.34271585635347096, + "learning_rate": 0.00016664710392910395, + "loss": 0.9809, + "step": 1506 + }, + { + "epoch": 0.2899383736660153, + "grad_norm": 0.3357288852011628, + "learning_rate": 0.00016660062888957563, + "loss": 1.1471, + "step": 1507 + }, + { + "epoch": 0.29013076807455285, + "grad_norm": 0.31661361899122564, + "learning_rate": 0.00016655412798313051, + "loss": 1.0597, + "step": 1508 + }, + { + "epoch": 0.2903231624830903, + "grad_norm": 0.30237753675077755, + "learning_rate": 0.00016650760122782895, + "loss": 0.9679, + "step": 1509 + }, + { + "epoch": 0.29051555689162784, + "grad_norm": 0.31153421406024145, + "learning_rate": 0.00016646104864174145, + "loss": 1.1629, + "step": 1510 + }, + { + "epoch": 0.29070795130016536, + "grad_norm": 0.32212144065978987, + "learning_rate": 0.0001664144702429485, + "loss": 1.0928, + "step": 1511 + }, + { + "epoch": 0.2909003457087028, + "grad_norm": 0.47514370505666315, + "learning_rate": 0.0001663678660495406, + "loss": 1.0917, + "step": 1512 + }, + { + "epoch": 0.29109274011724035, + "grad_norm": 0.327981366932905, + "learning_rate": 0.0001663212360796183, + "loss": 1.1701, + "step": 1513 + }, + { + "epoch": 0.29128513452577787, + "grad_norm": 0.2578675540893025, + "learning_rate": 0.0001662745803512921, + "loss": 1.1149, + "step": 1514 + }, + { + "epoch": 0.29147752893431533, + "grad_norm": 0.33134955839770963, + "learning_rate": 0.00016622789888268258, + "loss": 1.1508, + "step": 1515 + }, + { + "epoch": 0.29166992334285285, + "grad_norm": 0.29587883752941313, + "learning_rate": 0.00016618119169192026, + "loss": 1.1707, + "step": 1516 + }, + { + "epoch": 0.2918623177513904, + "grad_norm": 0.3259080926822911, + "learning_rate": 0.00016613445879714572, + "loss": 1.1833, + "step": 1517 + }, + { + "epoch": 0.29205471215992784, + "grad_norm": 0.2759774004048962, + "learning_rate": 0.00016608770021650943, + "loss": 1.0774, + "step": 1518 + }, + { + "epoch": 0.29224710656846536, + "grad_norm": 0.34534654655783553, + "learning_rate": 0.00016604091596817192, + "loss": 1.0763, + "step": 1519 + }, + { + "epoch": 0.29243950097700283, + "grad_norm": 0.32048012748563287, + "learning_rate": 0.00016599410607030365, + "loss": 1.1655, + "step": 1520 + }, + { + "epoch": 0.29263189538554035, + "grad_norm": 0.27284069611106254, + "learning_rate": 0.00016594727054108498, + "loss": 1.1008, + "step": 1521 + }, + { + "epoch": 0.2928242897940779, + "grad_norm": 0.2842901508160404, + "learning_rate": 0.0001659004093987064, + "loss": 1.0221, + "step": 1522 + }, + { + "epoch": 0.29301668420261534, + "grad_norm": 0.3000877091087916, + "learning_rate": 0.00016585352266136814, + "loss": 1.1417, + "step": 1523 + }, + { + "epoch": 0.29320907861115286, + "grad_norm": 0.34940099331165325, + "learning_rate": 0.00016580661034728053, + "loss": 1.0477, + "step": 1524 + }, + { + "epoch": 0.2934014730196904, + "grad_norm": 0.28169491807370783, + "learning_rate": 0.00016575967247466376, + "loss": 1.0167, + "step": 1525 + }, + { + "epoch": 0.29359386742822785, + "grad_norm": 0.28410687744689694, + "learning_rate": 0.0001657127090617479, + "loss": 1.0746, + "step": 1526 + }, + { + "epoch": 0.29378626183676537, + "grad_norm": 0.33459806989878466, + "learning_rate": 0.0001656657201267731, + "loss": 1.083, + "step": 1527 + }, + { + "epoch": 0.2939786562453029, + "grad_norm": 0.3048899533795747, + "learning_rate": 0.00016561870568798924, + "loss": 0.9888, + "step": 1528 + }, + { + "epoch": 0.29417105065384036, + "grad_norm": 0.3321949905668031, + "learning_rate": 0.00016557166576365622, + "loss": 1.163, + "step": 1529 + }, + { + "epoch": 0.2943634450623779, + "grad_norm": 0.26981622413587947, + "learning_rate": 0.00016552460037204384, + "loss": 1.0693, + "step": 1530 + }, + { + "epoch": 0.2945558394709154, + "grad_norm": 0.31459955220857305, + "learning_rate": 0.00016547750953143167, + "loss": 1.0609, + "step": 1531 + }, + { + "epoch": 0.29474823387945287, + "grad_norm": 0.3003298648748653, + "learning_rate": 0.00016543039326010928, + "loss": 1.1169, + "step": 1532 + }, + { + "epoch": 0.2949406282879904, + "grad_norm": 0.2875135056200567, + "learning_rate": 0.00016538325157637614, + "loss": 1.0714, + "step": 1533 + }, + { + "epoch": 0.29513302269652786, + "grad_norm": 0.3318638867723934, + "learning_rate": 0.00016533608449854147, + "loss": 1.1392, + "step": 1534 + }, + { + "epoch": 0.2953254171050654, + "grad_norm": 0.3259864783176585, + "learning_rate": 0.00016528889204492448, + "loss": 1.0244, + "step": 1535 + }, + { + "epoch": 0.2955178115136029, + "grad_norm": 0.3374964952180507, + "learning_rate": 0.00016524167423385413, + "loss": 1.1173, + "step": 1536 + }, + { + "epoch": 0.29571020592214037, + "grad_norm": 0.28719876444284703, + "learning_rate": 0.00016519443108366927, + "loss": 1.234, + "step": 1537 + }, + { + "epoch": 0.2959026003306779, + "grad_norm": 0.2739730710761506, + "learning_rate": 0.00016514716261271866, + "loss": 1.0707, + "step": 1538 + }, + { + "epoch": 0.2960949947392154, + "grad_norm": 0.30130410795227114, + "learning_rate": 0.00016509986883936074, + "loss": 1.101, + "step": 1539 + }, + { + "epoch": 0.2962873891477529, + "grad_norm": 0.35028803854119056, + "learning_rate": 0.00016505254978196388, + "loss": 1.059, + "step": 1540 + }, + { + "epoch": 0.2964797835562904, + "grad_norm": 0.32535297219762993, + "learning_rate": 0.00016500520545890634, + "loss": 1.1654, + "step": 1541 + }, + { + "epoch": 0.2966721779648279, + "grad_norm": 0.30817740055072923, + "learning_rate": 0.00016495783588857605, + "loss": 1.1192, + "step": 1542 + }, + { + "epoch": 0.2968645723733654, + "grad_norm": 0.478210489089417, + "learning_rate": 0.0001649104410893708, + "loss": 1.0413, + "step": 1543 + }, + { + "epoch": 0.2970569667819029, + "grad_norm": 0.3068352514438494, + "learning_rate": 0.0001648630210796982, + "loss": 1.1003, + "step": 1544 + }, + { + "epoch": 0.2972493611904404, + "grad_norm": 0.3406848942790489, + "learning_rate": 0.0001648155758779756, + "loss": 1.0016, + "step": 1545 + }, + { + "epoch": 0.2974417555989779, + "grad_norm": 0.2808769192019626, + "learning_rate": 0.0001647681055026302, + "loss": 1.0166, + "step": 1546 + }, + { + "epoch": 0.2976341500075154, + "grad_norm": 0.329848316331849, + "learning_rate": 0.000164720609972099, + "loss": 1.0867, + "step": 1547 + }, + { + "epoch": 0.2978265444160529, + "grad_norm": 0.2809503536357732, + "learning_rate": 0.00016467308930482864, + "loss": 1.1006, + "step": 1548 + }, + { + "epoch": 0.2980189388245904, + "grad_norm": 0.3328033422354526, + "learning_rate": 0.00016462554351927557, + "loss": 0.9857, + "step": 1549 + }, + { + "epoch": 0.29821133323312793, + "grad_norm": 0.35623490764987126, + "learning_rate": 0.00016457797263390612, + "loss": 1.2333, + "step": 1550 + }, + { + "epoch": 0.2984037276416654, + "grad_norm": 0.31260021843495356, + "learning_rate": 0.00016453037666719624, + "loss": 1.131, + "step": 1551 + }, + { + "epoch": 0.2985961220502029, + "grad_norm": 0.30209859915913373, + "learning_rate": 0.00016448275563763162, + "loss": 1.1302, + "step": 1552 + }, + { + "epoch": 0.29878851645874044, + "grad_norm": 0.34068880877156577, + "learning_rate": 0.0001644351095637078, + "loss": 1.0522, + "step": 1553 + }, + { + "epoch": 0.2989809108672779, + "grad_norm": 0.2717034027673641, + "learning_rate": 0.00016438743846392985, + "loss": 1.1712, + "step": 1554 + }, + { + "epoch": 0.2991733052758154, + "grad_norm": 0.3103989627495267, + "learning_rate": 0.00016433974235681274, + "loss": 1.1326, + "step": 1555 + }, + { + "epoch": 0.29936569968435295, + "grad_norm": 0.2402098857090824, + "learning_rate": 0.0001642920212608811, + "loss": 1.1839, + "step": 1556 + }, + { + "epoch": 0.2995580940928904, + "grad_norm": 0.34889125013744243, + "learning_rate": 0.00016424427519466924, + "loss": 1.1347, + "step": 1557 + }, + { + "epoch": 0.29975048850142794, + "grad_norm": 0.363983432710772, + "learning_rate": 0.00016419650417672118, + "loss": 1.0968, + "step": 1558 + }, + { + "epoch": 0.2999428829099654, + "grad_norm": 0.31039175436138483, + "learning_rate": 0.00016414870822559064, + "loss": 1.2207, + "step": 1559 + }, + { + "epoch": 0.3001352773185029, + "grad_norm": 0.3055765865425427, + "learning_rate": 0.00016410088735984102, + "loss": 1.0434, + "step": 1560 + }, + { + "epoch": 0.30032767172704045, + "grad_norm": 0.2667455286187899, + "learning_rate": 0.00016405304159804534, + "loss": 1.0608, + "step": 1561 + }, + { + "epoch": 0.3005200661355779, + "grad_norm": 0.29871428015617846, + "learning_rate": 0.00016400517095878643, + "loss": 1.0646, + "step": 1562 + }, + { + "epoch": 0.30071246054411543, + "grad_norm": 0.42130675647760185, + "learning_rate": 0.00016395727546065665, + "loss": 0.9163, + "step": 1563 + }, + { + "epoch": 0.30090485495265296, + "grad_norm": 0.3271331110157056, + "learning_rate": 0.00016390935512225805, + "loss": 1.1656, + "step": 1564 + }, + { + "epoch": 0.3010972493611904, + "grad_norm": 0.3434655383691047, + "learning_rate": 0.00016386140996220232, + "loss": 1.0943, + "step": 1565 + }, + { + "epoch": 0.30128964376972794, + "grad_norm": 0.3017120632775669, + "learning_rate": 0.00016381343999911086, + "loss": 1.2189, + "step": 1566 + }, + { + "epoch": 0.30148203817826547, + "grad_norm": 0.2531338468220288, + "learning_rate": 0.00016376544525161465, + "loss": 1.1832, + "step": 1567 + }, + { + "epoch": 0.30167443258680293, + "grad_norm": 0.30054357809784965, + "learning_rate": 0.00016371742573835426, + "loss": 1.0188, + "step": 1568 + }, + { + "epoch": 0.30186682699534045, + "grad_norm": 0.2859045038413787, + "learning_rate": 0.0001636693814779799, + "loss": 1.0915, + "step": 1569 + }, + { + "epoch": 0.302059221403878, + "grad_norm": 0.3255328867349781, + "learning_rate": 0.00016362131248915144, + "loss": 1.0922, + "step": 1570 + }, + { + "epoch": 0.30225161581241544, + "grad_norm": 0.31721420954452095, + "learning_rate": 0.00016357321879053834, + "loss": 1.0463, + "step": 1571 + }, + { + "epoch": 0.30244401022095296, + "grad_norm": 0.308636085373308, + "learning_rate": 0.00016352510040081963, + "loss": 1.1799, + "step": 1572 + }, + { + "epoch": 0.30263640462949043, + "grad_norm": 0.3396430145652912, + "learning_rate": 0.00016347695733868388, + "loss": 1.087, + "step": 1573 + }, + { + "epoch": 0.30282879903802795, + "grad_norm": 0.3150167575930741, + "learning_rate": 0.00016342878962282938, + "loss": 1.0466, + "step": 1574 + }, + { + "epoch": 0.3030211934465655, + "grad_norm": 0.23547476718079235, + "learning_rate": 0.00016338059727196387, + "loss": 1.1615, + "step": 1575 + }, + { + "epoch": 0.30321358785510294, + "grad_norm": 0.2981953981187636, + "learning_rate": 0.0001633323803048047, + "loss": 1.0787, + "step": 1576 + }, + { + "epoch": 0.30340598226364046, + "grad_norm": 0.33129995102859633, + "learning_rate": 0.0001632841387400788, + "loss": 1.1471, + "step": 1577 + }, + { + "epoch": 0.303598376672178, + "grad_norm": 0.30979639418539157, + "learning_rate": 0.00016323587259652267, + "loss": 1.1005, + "step": 1578 + }, + { + "epoch": 0.30379077108071545, + "grad_norm": 0.29595478584885476, + "learning_rate": 0.00016318758189288226, + "loss": 1.0032, + "step": 1579 + }, + { + "epoch": 0.30398316548925297, + "grad_norm": 0.36623837203171844, + "learning_rate": 0.00016313926664791314, + "loss": 1.1667, + "step": 1580 + }, + { + "epoch": 0.3041755598977905, + "grad_norm": 0.2983930115362985, + "learning_rate": 0.00016309092688038046, + "loss": 1.0953, + "step": 1581 + }, + { + "epoch": 0.30436795430632796, + "grad_norm": 0.3146073937564928, + "learning_rate": 0.00016304256260905872, + "loss": 0.9952, + "step": 1582 + }, + { + "epoch": 0.3045603487148655, + "grad_norm": 0.29550557175646847, + "learning_rate": 0.00016299417385273214, + "loss": 1.1821, + "step": 1583 + }, + { + "epoch": 0.304752743123403, + "grad_norm": 0.3038707634019499, + "learning_rate": 0.00016294576063019428, + "loss": 1.1121, + "step": 1584 + }, + { + "epoch": 0.30494513753194047, + "grad_norm": 0.3293523071770354, + "learning_rate": 0.00016289732296024834, + "loss": 1.106, + "step": 1585 + }, + { + "epoch": 0.305137531940478, + "grad_norm": 0.29149109941744755, + "learning_rate": 0.00016284886086170698, + "loss": 1.1194, + "step": 1586 + }, + { + "epoch": 0.30532992634901546, + "grad_norm": 0.4592677812140502, + "learning_rate": 0.00016280037435339217, + "loss": 1.1474, + "step": 1587 + }, + { + "epoch": 0.305522320757553, + "grad_norm": 0.3203653400906737, + "learning_rate": 0.00016275186345413568, + "loss": 1.1605, + "step": 1588 + }, + { + "epoch": 0.3057147151660905, + "grad_norm": 0.36307461152220283, + "learning_rate": 0.00016270332818277847, + "loss": 1.0303, + "step": 1589 + }, + { + "epoch": 0.30590710957462797, + "grad_norm": 0.44069341364426956, + "learning_rate": 0.00016265476855817116, + "loss": 1.0347, + "step": 1590 + }, + { + "epoch": 0.3060995039831655, + "grad_norm": 0.29995668120186636, + "learning_rate": 0.00016260618459917368, + "loss": 1.0528, + "step": 1591 + }, + { + "epoch": 0.306291898391703, + "grad_norm": 0.3451138109635149, + "learning_rate": 0.00016255757632465553, + "loss": 1.1118, + "step": 1592 + }, + { + "epoch": 0.3064842928002405, + "grad_norm": 0.2711304810937573, + "learning_rate": 0.00016250894375349558, + "loss": 1.101, + "step": 1593 + }, + { + "epoch": 0.306676687208778, + "grad_norm": 0.38704399296599323, + "learning_rate": 0.00016246028690458216, + "loss": 1.1577, + "step": 1594 + }, + { + "epoch": 0.3068690816173155, + "grad_norm": 0.4133066487034265, + "learning_rate": 0.00016241160579681308, + "loss": 1.1894, + "step": 1595 + }, + { + "epoch": 0.307061476025853, + "grad_norm": 0.28992392284211643, + "learning_rate": 0.0001623629004490954, + "loss": 1.1236, + "step": 1596 + }, + { + "epoch": 0.3072538704343905, + "grad_norm": 0.33565142715411495, + "learning_rate": 0.00016231417088034586, + "loss": 1.1304, + "step": 1597 + }, + { + "epoch": 0.30744626484292803, + "grad_norm": 0.26501111413113954, + "learning_rate": 0.0001622654171094904, + "loss": 1.0423, + "step": 1598 + }, + { + "epoch": 0.3076386592514655, + "grad_norm": 0.36617546738271656, + "learning_rate": 0.00016221663915546436, + "loss": 1.0918, + "step": 1599 + }, + { + "epoch": 0.307831053660003, + "grad_norm": 0.389302797482364, + "learning_rate": 0.00016216783703721266, + "loss": 1.0707, + "step": 1600 + }, + { + "epoch": 0.3080234480685405, + "grad_norm": 0.2773217172383021, + "learning_rate": 0.00016211901077368935, + "loss": 1.0718, + "step": 1601 + }, + { + "epoch": 0.308215842477078, + "grad_norm": 0.28523774796025136, + "learning_rate": 0.0001620701603838581, + "loss": 1.1037, + "step": 1602 + }, + { + "epoch": 0.3084082368856155, + "grad_norm": 0.22574790952974697, + "learning_rate": 0.00016202128588669177, + "loss": 1.1081, + "step": 1603 + }, + { + "epoch": 0.308600631294153, + "grad_norm": 0.2764104018829215, + "learning_rate": 0.00016197238730117271, + "loss": 1.1793, + "step": 1604 + }, + { + "epoch": 0.3087930257026905, + "grad_norm": 0.34309387709313244, + "learning_rate": 0.00016192346464629246, + "loss": 1.1452, + "step": 1605 + }, + { + "epoch": 0.30898542011122804, + "grad_norm": 0.31290656503701925, + "learning_rate": 0.00016187451794105214, + "loss": 1.1104, + "step": 1606 + }, + { + "epoch": 0.3091778145197655, + "grad_norm": 0.288157637887267, + "learning_rate": 0.00016182554720446202, + "loss": 1.0873, + "step": 1607 + }, + { + "epoch": 0.309370208928303, + "grad_norm": 0.2934921643373651, + "learning_rate": 0.00016177655245554177, + "loss": 1.1279, + "step": 1608 + }, + { + "epoch": 0.30956260333684055, + "grad_norm": 0.2972520581216308, + "learning_rate": 0.00016172753371332038, + "loss": 1.1243, + "step": 1609 + }, + { + "epoch": 0.309754997745378, + "grad_norm": 0.29628968296112, + "learning_rate": 0.00016167849099683624, + "loss": 1.0601, + "step": 1610 + }, + { + "epoch": 0.30994739215391554, + "grad_norm": 0.3353959738162529, + "learning_rate": 0.00016162942432513687, + "loss": 1.1675, + "step": 1611 + }, + { + "epoch": 0.310139786562453, + "grad_norm": 0.2632999349649381, + "learning_rate": 0.00016158033371727924, + "loss": 1.1154, + "step": 1612 + }, + { + "epoch": 0.3103321809709905, + "grad_norm": 0.2552675225240679, + "learning_rate": 0.00016153121919232962, + "loss": 1.0847, + "step": 1613 + }, + { + "epoch": 0.31052457537952805, + "grad_norm": 0.35422271810160016, + "learning_rate": 0.00016148208076936348, + "loss": 1.079, + "step": 1614 + }, + { + "epoch": 0.3107169697880655, + "grad_norm": 0.27509491454965834, + "learning_rate": 0.0001614329184674656, + "loss": 1.1359, + "step": 1615 + }, + { + "epoch": 0.31090936419660303, + "grad_norm": 0.30725065984078526, + "learning_rate": 0.00016138373230573013, + "loss": 1.0937, + "step": 1616 + }, + { + "epoch": 0.31110175860514055, + "grad_norm": 0.26424568384200414, + "learning_rate": 0.00016133452230326033, + "loss": 1.0895, + "step": 1617 + }, + { + "epoch": 0.311294153013678, + "grad_norm": 0.33726879817609995, + "learning_rate": 0.00016128528847916883, + "loss": 1.0272, + "step": 1618 + }, + { + "epoch": 0.31148654742221554, + "grad_norm": 0.2984932843827208, + "learning_rate": 0.00016123603085257745, + "loss": 1.1446, + "step": 1619 + }, + { + "epoch": 0.31167894183075306, + "grad_norm": 0.2503779323015421, + "learning_rate": 0.00016118674944261732, + "loss": 1.1713, + "step": 1620 + }, + { + "epoch": 0.31187133623929053, + "grad_norm": 0.2882919565900987, + "learning_rate": 0.0001611374442684288, + "loss": 1.1407, + "step": 1621 + }, + { + "epoch": 0.31206373064782805, + "grad_norm": 0.37607656355574237, + "learning_rate": 0.00016108811534916136, + "loss": 1.0204, + "step": 1622 + }, + { + "epoch": 0.3122561250563656, + "grad_norm": 0.30051842653071015, + "learning_rate": 0.00016103876270397386, + "loss": 1.1044, + "step": 1623 + }, + { + "epoch": 0.31244851946490304, + "grad_norm": 0.3351953421145429, + "learning_rate": 0.0001609893863520343, + "loss": 1.1175, + "step": 1624 + }, + { + "epoch": 0.31264091387344056, + "grad_norm": 0.3709485804872838, + "learning_rate": 0.0001609399863125198, + "loss": 1.1519, + "step": 1625 + }, + { + "epoch": 0.31283330828197803, + "grad_norm": 0.27105795360778984, + "learning_rate": 0.00016089056260461688, + "loss": 1.1809, + "step": 1626 + }, + { + "epoch": 0.31302570269051555, + "grad_norm": 0.3249737759395512, + "learning_rate": 0.00016084111524752105, + "loss": 1.1303, + "step": 1627 + }, + { + "epoch": 0.31321809709905307, + "grad_norm": 0.30231451672539145, + "learning_rate": 0.00016079164426043718, + "loss": 1.1199, + "step": 1628 + }, + { + "epoch": 0.31341049150759054, + "grad_norm": 0.3404165721034413, + "learning_rate": 0.0001607421496625791, + "loss": 1.0667, + "step": 1629 + }, + { + "epoch": 0.31360288591612806, + "grad_norm": 0.3473400020118858, + "learning_rate": 0.00016069263147317013, + "loss": 1.2436, + "step": 1630 + }, + { + "epoch": 0.3137952803246656, + "grad_norm": 0.2635881180537359, + "learning_rate": 0.00016064308971144238, + "loss": 1.0311, + "step": 1631 + }, + { + "epoch": 0.31398767473320305, + "grad_norm": 0.3097943727334572, + "learning_rate": 0.00016059352439663739, + "loss": 1.1602, + "step": 1632 + }, + { + "epoch": 0.31418006914174057, + "grad_norm": 0.24570331745693133, + "learning_rate": 0.00016054393554800576, + "loss": 1.1309, + "step": 1633 + }, + { + "epoch": 0.3143724635502781, + "grad_norm": 0.38554881372537647, + "learning_rate": 0.0001604943231848072, + "loss": 1.1888, + "step": 1634 + }, + { + "epoch": 0.31456485795881556, + "grad_norm": 0.2820389162483143, + "learning_rate": 0.00016044468732631057, + "loss": 1.1445, + "step": 1635 + }, + { + "epoch": 0.3147572523673531, + "grad_norm": 0.27371254530078315, + "learning_rate": 0.00016039502799179394, + "loss": 0.9879, + "step": 1636 + }, + { + "epoch": 0.3149496467758906, + "grad_norm": 0.30841342432884855, + "learning_rate": 0.00016034534520054433, + "loss": 1.1096, + "step": 1637 + }, + { + "epoch": 0.31514204118442807, + "grad_norm": 0.31856710401081456, + "learning_rate": 0.000160295638971858, + "loss": 1.0031, + "step": 1638 + }, + { + "epoch": 0.3153344355929656, + "grad_norm": 0.3274701548655172, + "learning_rate": 0.0001602459093250403, + "loss": 1.0453, + "step": 1639 + }, + { + "epoch": 0.31552683000150306, + "grad_norm": 0.33298410955415725, + "learning_rate": 0.0001601961562794056, + "loss": 1.1194, + "step": 1640 + }, + { + "epoch": 0.3157192244100406, + "grad_norm": 0.33392940564080026, + "learning_rate": 0.0001601463798542775, + "loss": 1.0302, + "step": 1641 + }, + { + "epoch": 0.3159116188185781, + "grad_norm": 0.3143650994119334, + "learning_rate": 0.00016009658006898848, + "loss": 1.1557, + "step": 1642 + }, + { + "epoch": 0.31610401322711557, + "grad_norm": 0.3514850475319936, + "learning_rate": 0.00016004675694288026, + "loss": 0.9993, + "step": 1643 + }, + { + "epoch": 0.3162964076356531, + "grad_norm": 0.30575928311211603, + "learning_rate": 0.0001599969104953036, + "loss": 1.1048, + "step": 1644 + }, + { + "epoch": 0.3164888020441906, + "grad_norm": 0.28060796344719513, + "learning_rate": 0.0001599470407456182, + "loss": 1.0582, + "step": 1645 + }, + { + "epoch": 0.3166811964527281, + "grad_norm": 0.26270948000019373, + "learning_rate": 0.00015989714771319299, + "loss": 1.1035, + "step": 1646 + }, + { + "epoch": 0.3168735908612656, + "grad_norm": 0.3512780011667153, + "learning_rate": 0.00015984723141740576, + "loss": 1.0329, + "step": 1647 + }, + { + "epoch": 0.3170659852698031, + "grad_norm": 0.34227776507782676, + "learning_rate": 0.0001597972918776435, + "loss": 1.1087, + "step": 1648 + }, + { + "epoch": 0.3172583796783406, + "grad_norm": 0.2899848389035279, + "learning_rate": 0.00015974732911330208, + "loss": 1.0792, + "step": 1649 + }, + { + "epoch": 0.3174507740868781, + "grad_norm": 0.2894716805556298, + "learning_rate": 0.00015969734314378654, + "loss": 1.1692, + "step": 1650 + }, + { + "epoch": 0.31764316849541563, + "grad_norm": 0.33311694375190615, + "learning_rate": 0.00015964733398851077, + "loss": 1.0278, + "step": 1651 + }, + { + "epoch": 0.3178355629039531, + "grad_norm": 0.2288311725280411, + "learning_rate": 0.00015959730166689783, + "loss": 1.1078, + "step": 1652 + }, + { + "epoch": 0.3180279573124906, + "grad_norm": 0.3153840508885981, + "learning_rate": 0.00015954724619837967, + "loss": 1.1157, + "step": 1653 + }, + { + "epoch": 0.3182203517210281, + "grad_norm": 0.3111843945999688, + "learning_rate": 0.00015949716760239722, + "loss": 1.1882, + "step": 1654 + }, + { + "epoch": 0.3184127461295656, + "grad_norm": 0.32217455834873554, + "learning_rate": 0.00015944706589840046, + "loss": 1.1693, + "step": 1655 + }, + { + "epoch": 0.3186051405381031, + "grad_norm": 0.34036596071969843, + "learning_rate": 0.00015939694110584832, + "loss": 1.1236, + "step": 1656 + }, + { + "epoch": 0.3187975349466406, + "grad_norm": 0.3013483252959257, + "learning_rate": 0.00015934679324420872, + "loss": 1.1389, + "step": 1657 + }, + { + "epoch": 0.3189899293551781, + "grad_norm": 0.2923871347311417, + "learning_rate": 0.00015929662233295843, + "loss": 1.0541, + "step": 1658 + }, + { + "epoch": 0.31918232376371564, + "grad_norm": 0.31867701210043714, + "learning_rate": 0.00015924642839158332, + "loss": 1.0832, + "step": 1659 + }, + { + "epoch": 0.3193747181722531, + "grad_norm": 0.39856036220128727, + "learning_rate": 0.0001591962114395781, + "loss": 1.0644, + "step": 1660 + }, + { + "epoch": 0.3195671125807906, + "grad_norm": 0.34993384178261716, + "learning_rate": 0.00015914597149644652, + "loss": 1.1121, + "step": 1661 + }, + { + "epoch": 0.31975950698932815, + "grad_norm": 0.2818685497718032, + "learning_rate": 0.00015909570858170112, + "loss": 1.0815, + "step": 1662 + }, + { + "epoch": 0.3199519013978656, + "grad_norm": 0.3257483342553277, + "learning_rate": 0.00015904542271486346, + "loss": 1.1076, + "step": 1663 + }, + { + "epoch": 0.32014429580640313, + "grad_norm": 0.4620397042101499, + "learning_rate": 0.00015899511391546402, + "loss": 1.0697, + "step": 1664 + }, + { + "epoch": 0.32033669021494066, + "grad_norm": 0.28726484021356385, + "learning_rate": 0.00015894478220304214, + "loss": 1.0706, + "step": 1665 + }, + { + "epoch": 0.3205290846234781, + "grad_norm": 0.38799554637874106, + "learning_rate": 0.00015889442759714603, + "loss": 1.1153, + "step": 1666 + }, + { + "epoch": 0.32072147903201564, + "grad_norm": 0.28135786722388934, + "learning_rate": 0.00015884405011733292, + "loss": 1.078, + "step": 1667 + }, + { + "epoch": 0.3209138734405531, + "grad_norm": 0.31900256597094473, + "learning_rate": 0.0001587936497831688, + "loss": 1.1211, + "step": 1668 + }, + { + "epoch": 0.32110626784909063, + "grad_norm": 0.24625409818982552, + "learning_rate": 0.00015874322661422856, + "loss": 1.1542, + "step": 1669 + }, + { + "epoch": 0.32129866225762815, + "grad_norm": 0.3620634302619436, + "learning_rate": 0.000158692780630096, + "loss": 1.0907, + "step": 1670 + }, + { + "epoch": 0.3214910566661656, + "grad_norm": 0.27307794748105374, + "learning_rate": 0.0001586423118503638, + "loss": 1.0856, + "step": 1671 + }, + { + "epoch": 0.32168345107470314, + "grad_norm": 0.3385126516913844, + "learning_rate": 0.0001585918202946334, + "loss": 1.1228, + "step": 1672 + }, + { + "epoch": 0.32187584548324066, + "grad_norm": 0.24589945573280625, + "learning_rate": 0.00015854130598251512, + "loss": 1.0938, + "step": 1673 + }, + { + "epoch": 0.32206823989177813, + "grad_norm": 0.29583788903140473, + "learning_rate": 0.0001584907689336282, + "loss": 1.1006, + "step": 1674 + }, + { + "epoch": 0.32226063430031565, + "grad_norm": 0.2552304059083672, + "learning_rate": 0.0001584402091676006, + "loss": 1.1358, + "step": 1675 + }, + { + "epoch": 0.3224530287088532, + "grad_norm": 0.29829569302398823, + "learning_rate": 0.00015838962670406916, + "loss": 1.1205, + "step": 1676 + }, + { + "epoch": 0.32264542311739064, + "grad_norm": 0.31524272566734557, + "learning_rate": 0.00015833902156267956, + "loss": 1.0217, + "step": 1677 + }, + { + "epoch": 0.32283781752592816, + "grad_norm": 0.30083031982222824, + "learning_rate": 0.00015828839376308618, + "loss": 1.1027, + "step": 1678 + }, + { + "epoch": 0.3230302119344656, + "grad_norm": 0.35217746987846116, + "learning_rate": 0.00015823774332495235, + "loss": 1.0706, + "step": 1679 + }, + { + "epoch": 0.32322260634300315, + "grad_norm": 0.26813025465998713, + "learning_rate": 0.0001581870702679501, + "loss": 1.1434, + "step": 1680 + }, + { + "epoch": 0.32341500075154067, + "grad_norm": 0.2921191933598302, + "learning_rate": 0.0001581363746117602, + "loss": 1.0904, + "step": 1681 + }, + { + "epoch": 0.32360739516007814, + "grad_norm": 0.2670587227394991, + "learning_rate": 0.00015808565637607237, + "loss": 1.0912, + "step": 1682 + }, + { + "epoch": 0.32379978956861566, + "grad_norm": 0.28722073590857866, + "learning_rate": 0.00015803491558058488, + "loss": 1.0297, + "step": 1683 + }, + { + "epoch": 0.3239921839771532, + "grad_norm": 0.27207316853786107, + "learning_rate": 0.00015798415224500492, + "loss": 1.071, + "step": 1684 + }, + { + "epoch": 0.32418457838569065, + "grad_norm": 0.360211668310971, + "learning_rate": 0.00015793336638904838, + "loss": 1.1801, + "step": 1685 + }, + { + "epoch": 0.32437697279422817, + "grad_norm": 0.28057419264824257, + "learning_rate": 0.0001578825580324399, + "loss": 1.0872, + "step": 1686 + }, + { + "epoch": 0.3245693672027657, + "grad_norm": 0.3115687098893564, + "learning_rate": 0.0001578317271949129, + "loss": 1.0582, + "step": 1687 + }, + { + "epoch": 0.32476176161130316, + "grad_norm": 0.3239013387661759, + "learning_rate": 0.00015778087389620938, + "loss": 1.1821, + "step": 1688 + }, + { + "epoch": 0.3249541560198407, + "grad_norm": 0.30370621431040346, + "learning_rate": 0.00015772999815608028, + "loss": 1.1513, + "step": 1689 + }, + { + "epoch": 0.3251465504283782, + "grad_norm": 0.3388270445755482, + "learning_rate": 0.00015767909999428513, + "loss": 1.048, + "step": 1690 + }, + { + "epoch": 0.32533894483691567, + "grad_norm": 0.5014467130105696, + "learning_rate": 0.00015762817943059217, + "loss": 1.109, + "step": 1691 + }, + { + "epoch": 0.3255313392454532, + "grad_norm": 0.3101457911798254, + "learning_rate": 0.00015757723648477837, + "loss": 1.1951, + "step": 1692 + }, + { + "epoch": 0.32572373365399065, + "grad_norm": 0.31983341580099617, + "learning_rate": 0.0001575262711766294, + "loss": 1.0513, + "step": 1693 + }, + { + "epoch": 0.3259161280625282, + "grad_norm": 0.32115860558200393, + "learning_rate": 0.00015747528352593956, + "loss": 1.1377, + "step": 1694 + }, + { + "epoch": 0.3261085224710657, + "grad_norm": 0.26693164410380726, + "learning_rate": 0.0001574242735525119, + "loss": 1.0741, + "step": 1695 + }, + { + "epoch": 0.32630091687960316, + "grad_norm": 0.29261104426639334, + "learning_rate": 0.00015737324127615806, + "loss": 1.1571, + "step": 1696 + }, + { + "epoch": 0.3264933112881407, + "grad_norm": 0.32067235043976067, + "learning_rate": 0.00015732218671669844, + "loss": 1.1029, + "step": 1697 + }, + { + "epoch": 0.3266857056966782, + "grad_norm": 0.2906248562989904, + "learning_rate": 0.00015727110989396202, + "loss": 1.1475, + "step": 1698 + }, + { + "epoch": 0.3268781001052157, + "grad_norm": 0.2867376906048281, + "learning_rate": 0.00015722001082778646, + "loss": 1.1471, + "step": 1699 + }, + { + "epoch": 0.3270704945137532, + "grad_norm": 0.27682117457405814, + "learning_rate": 0.00015716888953801804, + "loss": 1.0052, + "step": 1700 + }, + { + "epoch": 0.3272628889222907, + "grad_norm": 0.34681205850819985, + "learning_rate": 0.00015711774604451167, + "loss": 1.1095, + "step": 1701 + }, + { + "epoch": 0.3274552833308282, + "grad_norm": 0.2947816197773247, + "learning_rate": 0.0001570665803671309, + "loss": 1.1064, + "step": 1702 + }, + { + "epoch": 0.3276476777393657, + "grad_norm": 0.32184530624521973, + "learning_rate": 0.00015701539252574792, + "loss": 1.0734, + "step": 1703 + }, + { + "epoch": 0.3278400721479032, + "grad_norm": 0.2540830853998571, + "learning_rate": 0.00015696418254024344, + "loss": 1.1088, + "step": 1704 + }, + { + "epoch": 0.3280324665564407, + "grad_norm": 0.3484982565630624, + "learning_rate": 0.00015691295043050688, + "loss": 1.068, + "step": 1705 + }, + { + "epoch": 0.3282248609649782, + "grad_norm": 0.2719189864386847, + "learning_rate": 0.0001568616962164362, + "loss": 1.1032, + "step": 1706 + }, + { + "epoch": 0.3284172553735157, + "grad_norm": 0.2705620834447423, + "learning_rate": 0.0001568104199179379, + "loss": 1.0381, + "step": 1707 + }, + { + "epoch": 0.3286096497820532, + "grad_norm": 0.45316165264254743, + "learning_rate": 0.00015675912155492712, + "loss": 1.0574, + "step": 1708 + }, + { + "epoch": 0.3288020441905907, + "grad_norm": 0.2893803363115719, + "learning_rate": 0.00015670780114732756, + "loss": 1.0125, + "step": 1709 + }, + { + "epoch": 0.3289944385991282, + "grad_norm": 0.2836083015471957, + "learning_rate": 0.00015665645871507151, + "loss": 1.1448, + "step": 1710 + }, + { + "epoch": 0.3291868330076657, + "grad_norm": 0.33308071873020717, + "learning_rate": 0.00015660509427809974, + "loss": 1.1653, + "step": 1711 + }, + { + "epoch": 0.32937922741620324, + "grad_norm": 0.32566723301114353, + "learning_rate": 0.0001565537078563616, + "loss": 1.1916, + "step": 1712 + }, + { + "epoch": 0.3295716218247407, + "grad_norm": 0.32567450087158395, + "learning_rate": 0.000156502299469815, + "loss": 1.12, + "step": 1713 + }, + { + "epoch": 0.3297640162332782, + "grad_norm": 0.2915856448588911, + "learning_rate": 0.00015645086913842636, + "loss": 1.0872, + "step": 1714 + }, + { + "epoch": 0.32995641064181574, + "grad_norm": 0.2921176902493695, + "learning_rate": 0.00015639941688217065, + "loss": 1.0837, + "step": 1715 + }, + { + "epoch": 0.3301488050503532, + "grad_norm": 0.2832387107531186, + "learning_rate": 0.00015634794272103127, + "loss": 1.0409, + "step": 1716 + }, + { + "epoch": 0.33034119945889073, + "grad_norm": 0.3914146956229172, + "learning_rate": 0.0001562964466750003, + "loss": 1.0827, + "step": 1717 + }, + { + "epoch": 0.33053359386742825, + "grad_norm": 0.40159459338146336, + "learning_rate": 0.0001562449287640781, + "loss": 1.1236, + "step": 1718 + }, + { + "epoch": 0.3307259882759657, + "grad_norm": 0.29390354481910025, + "learning_rate": 0.00015619338900827367, + "loss": 1.1759, + "step": 1719 + }, + { + "epoch": 0.33091838268450324, + "grad_norm": 0.32539541465338706, + "learning_rate": 0.00015614182742760448, + "loss": 1.1608, + "step": 1720 + }, + { + "epoch": 0.3311107770930407, + "grad_norm": 0.36937310139837315, + "learning_rate": 0.00015609024404209643, + "loss": 1.1444, + "step": 1721 + }, + { + "epoch": 0.33130317150157823, + "grad_norm": 0.285692421423644, + "learning_rate": 0.00015603863887178393, + "loss": 1.1304, + "step": 1722 + }, + { + "epoch": 0.33149556591011575, + "grad_norm": 0.33567645622433384, + "learning_rate": 0.00015598701193670982, + "loss": 1.1621, + "step": 1723 + }, + { + "epoch": 0.3316879603186532, + "grad_norm": 0.2936346138536046, + "learning_rate": 0.0001559353632569254, + "loss": 1.0891, + "step": 1724 + }, + { + "epoch": 0.33188035472719074, + "grad_norm": 0.3329249851761191, + "learning_rate": 0.00015588369285249047, + "loss": 1.1677, + "step": 1725 + }, + { + "epoch": 0.33207274913572826, + "grad_norm": 0.25552820385507224, + "learning_rate": 0.00015583200074347316, + "loss": 1.2004, + "step": 1726 + }, + { + "epoch": 0.33226514354426573, + "grad_norm": 0.33284428871176047, + "learning_rate": 0.0001557802869499501, + "loss": 1.1082, + "step": 1727 + }, + { + "epoch": 0.33245753795280325, + "grad_norm": 0.2794737019628236, + "learning_rate": 0.00015572855149200638, + "loss": 1.1239, + "step": 1728 + }, + { + "epoch": 0.33264993236134077, + "grad_norm": 0.2536638971841869, + "learning_rate": 0.0001556767943897354, + "loss": 1.0684, + "step": 1729 + }, + { + "epoch": 0.33284232676987824, + "grad_norm": 0.2849290901393974, + "learning_rate": 0.00015562501566323907, + "loss": 1.1189, + "step": 1730 + }, + { + "epoch": 0.33303472117841576, + "grad_norm": 0.41521589835162076, + "learning_rate": 0.0001555732153326276, + "loss": 1.1012, + "step": 1731 + }, + { + "epoch": 0.3332271155869532, + "grad_norm": 0.37598631027050283, + "learning_rate": 0.00015552139341801967, + "loss": 1.0954, + "step": 1732 + }, + { + "epoch": 0.33341950999549075, + "grad_norm": 0.3044128262013889, + "learning_rate": 0.00015546954993954227, + "loss": 1.1882, + "step": 1733 + }, + { + "epoch": 0.33361190440402827, + "grad_norm": 0.3680301285876295, + "learning_rate": 0.00015541768491733092, + "loss": 1.0749, + "step": 1734 + }, + { + "epoch": 0.33380429881256574, + "grad_norm": 0.3144030411264564, + "learning_rate": 0.00015536579837152926, + "loss": 1.059, + "step": 1735 + }, + { + "epoch": 0.33399669322110326, + "grad_norm": 0.2706787135809171, + "learning_rate": 0.00015531389032228955, + "loss": 1.0583, + "step": 1736 + }, + { + "epoch": 0.3341890876296408, + "grad_norm": 0.30436458206492695, + "learning_rate": 0.00015526196078977217, + "loss": 1.155, + "step": 1737 + }, + { + "epoch": 0.33438148203817825, + "grad_norm": 0.2969114220531906, + "learning_rate": 0.00015521000979414602, + "loss": 1.0697, + "step": 1738 + }, + { + "epoch": 0.33457387644671577, + "grad_norm": 0.392924646839991, + "learning_rate": 0.00015515803735558826, + "loss": 1.1072, + "step": 1739 + }, + { + "epoch": 0.3347662708552533, + "grad_norm": 0.30896485465806317, + "learning_rate": 0.00015510604349428436, + "loss": 1.1143, + "step": 1740 + }, + { + "epoch": 0.33495866526379076, + "grad_norm": 0.3167503249554492, + "learning_rate": 0.00015505402823042818, + "loss": 1.0169, + "step": 1741 + }, + { + "epoch": 0.3351510596723283, + "grad_norm": 0.2964547561570499, + "learning_rate": 0.00015500199158422178, + "loss": 1.0048, + "step": 1742 + }, + { + "epoch": 0.3353434540808658, + "grad_norm": 0.2723709433325831, + "learning_rate": 0.0001549499335758757, + "loss": 1.077, + "step": 1743 + }, + { + "epoch": 0.33553584848940327, + "grad_norm": 0.30477874378702324, + "learning_rate": 0.00015489785422560857, + "loss": 1.2128, + "step": 1744 + }, + { + "epoch": 0.3357282428979408, + "grad_norm": 0.3686132621298599, + "learning_rate": 0.00015484575355364743, + "loss": 1.1092, + "step": 1745 + }, + { + "epoch": 0.33592063730647825, + "grad_norm": 0.29269390666926565, + "learning_rate": 0.00015479363158022764, + "loss": 1.1855, + "step": 1746 + }, + { + "epoch": 0.3361130317150158, + "grad_norm": 0.2747907752684848, + "learning_rate": 0.0001547414883255927, + "loss": 1.073, + "step": 1747 + }, + { + "epoch": 0.3363054261235533, + "grad_norm": 0.4225370662095908, + "learning_rate": 0.0001546893238099945, + "loss": 1.1993, + "step": 1748 + }, + { + "epoch": 0.33649782053209076, + "grad_norm": 0.30229365873123076, + "learning_rate": 0.0001546371380536931, + "loss": 1.0499, + "step": 1749 + }, + { + "epoch": 0.3366902149406283, + "grad_norm": 0.3224313881088821, + "learning_rate": 0.00015458493107695686, + "loss": 1.0478, + "step": 1750 + }, + { + "epoch": 0.3368826093491658, + "grad_norm": 0.3033038838014268, + "learning_rate": 0.00015453270290006238, + "loss": 1.0132, + "step": 1751 + }, + { + "epoch": 0.3370750037577033, + "grad_norm": 0.27948211864946315, + "learning_rate": 0.00015448045354329447, + "loss": 1.1255, + "step": 1752 + }, + { + "epoch": 0.3372673981662408, + "grad_norm": 0.27273257626454467, + "learning_rate": 0.00015442818302694618, + "loss": 1.1071, + "step": 1753 + }, + { + "epoch": 0.3374597925747783, + "grad_norm": 0.34740510402338404, + "learning_rate": 0.0001543758913713188, + "loss": 1.1502, + "step": 1754 + }, + { + "epoch": 0.3376521869833158, + "grad_norm": 0.38611403755365, + "learning_rate": 0.00015432357859672177, + "loss": 1.1175, + "step": 1755 + }, + { + "epoch": 0.3378445813918533, + "grad_norm": 0.28403726016976716, + "learning_rate": 0.00015427124472347278, + "loss": 1.0419, + "step": 1756 + }, + { + "epoch": 0.3380369758003908, + "grad_norm": 0.26934138562037263, + "learning_rate": 0.0001542188897718977, + "loss": 1.0526, + "step": 1757 + }, + { + "epoch": 0.3382293702089283, + "grad_norm": 0.38889725227347716, + "learning_rate": 0.00015416651376233063, + "loss": 1.0532, + "step": 1758 + }, + { + "epoch": 0.3384217646174658, + "grad_norm": 0.2974947687548203, + "learning_rate": 0.00015411411671511377, + "loss": 1.1613, + "step": 1759 + }, + { + "epoch": 0.3386141590260033, + "grad_norm": 0.2875969969750952, + "learning_rate": 0.00015406169865059749, + "loss": 1.0862, + "step": 1760 + }, + { + "epoch": 0.3388065534345408, + "grad_norm": 0.3539143594883471, + "learning_rate": 0.00015400925958914042, + "loss": 1.0877, + "step": 1761 + }, + { + "epoch": 0.3389989478430783, + "grad_norm": 0.2854611950246478, + "learning_rate": 0.00015395679955110925, + "loss": 1.0127, + "step": 1762 + }, + { + "epoch": 0.3391913422516158, + "grad_norm": 0.30701325381728817, + "learning_rate": 0.00015390431855687896, + "loss": 1.1032, + "step": 1763 + }, + { + "epoch": 0.3393837366601533, + "grad_norm": 0.34027250974421197, + "learning_rate": 0.00015385181662683244, + "loss": 1.0645, + "step": 1764 + }, + { + "epoch": 0.33957613106869083, + "grad_norm": 0.2755512030754894, + "learning_rate": 0.00015379929378136087, + "loss": 1.1812, + "step": 1765 + }, + { + "epoch": 0.3397685254772283, + "grad_norm": 0.2721827475655045, + "learning_rate": 0.00015374675004086355, + "loss": 1.1167, + "step": 1766 + }, + { + "epoch": 0.3399609198857658, + "grad_norm": 0.3153367350088956, + "learning_rate": 0.00015369418542574782, + "loss": 1.0111, + "step": 1767 + }, + { + "epoch": 0.34015331429430334, + "grad_norm": 0.3169366995461158, + "learning_rate": 0.00015364159995642917, + "loss": 1.0217, + "step": 1768 + }, + { + "epoch": 0.3403457087028408, + "grad_norm": 0.32611725661062746, + "learning_rate": 0.00015358899365333124, + "loss": 1.1728, + "step": 1769 + }, + { + "epoch": 0.34053810311137833, + "grad_norm": 0.3338928906087186, + "learning_rate": 0.00015353636653688563, + "loss": 1.1639, + "step": 1770 + }, + { + "epoch": 0.34073049751991585, + "grad_norm": 0.32998448488016646, + "learning_rate": 0.0001534837186275322, + "loss": 1.0466, + "step": 1771 + }, + { + "epoch": 0.3409228919284533, + "grad_norm": 0.28215468049046394, + "learning_rate": 0.00015343104994571875, + "loss": 1.0381, + "step": 1772 + }, + { + "epoch": 0.34111528633699084, + "grad_norm": 0.27450755758158707, + "learning_rate": 0.0001533783605119012, + "loss": 1.1175, + "step": 1773 + }, + { + "epoch": 0.3413076807455283, + "grad_norm": 0.30931338530116964, + "learning_rate": 0.00015332565034654344, + "loss": 1.1249, + "step": 1774 + }, + { + "epoch": 0.34150007515406583, + "grad_norm": 0.38731023149783406, + "learning_rate": 0.00015327291947011762, + "loss": 1.0035, + "step": 1775 + }, + { + "epoch": 0.34169246956260335, + "grad_norm": 0.24864118699835444, + "learning_rate": 0.00015322016790310372, + "loss": 1.0852, + "step": 1776 + }, + { + "epoch": 0.3418848639711408, + "grad_norm": 0.405763130348696, + "learning_rate": 0.00015316739566598986, + "loss": 1.0656, + "step": 1777 + }, + { + "epoch": 0.34207725837967834, + "grad_norm": 0.31255940910451313, + "learning_rate": 0.00015311460277927217, + "loss": 1.1618, + "step": 1778 + }, + { + "epoch": 0.34226965278821586, + "grad_norm": 0.25263431760291793, + "learning_rate": 0.0001530617892634548, + "loss": 1.0342, + "step": 1779 + }, + { + "epoch": 0.3424620471967533, + "grad_norm": 0.30834828150676286, + "learning_rate": 0.0001530089551390499, + "loss": 1.0734, + "step": 1780 + }, + { + "epoch": 0.34265444160529085, + "grad_norm": 0.3297408612407537, + "learning_rate": 0.0001529561004265777, + "loss": 1.0902, + "step": 1781 + }, + { + "epoch": 0.34284683601382837, + "grad_norm": 0.26787666509708885, + "learning_rate": 0.00015290322514656626, + "loss": 1.1457, + "step": 1782 + }, + { + "epoch": 0.34303923042236584, + "grad_norm": 0.2627916678647937, + "learning_rate": 0.00015285032931955177, + "loss": 1.0799, + "step": 1783 + }, + { + "epoch": 0.34323162483090336, + "grad_norm": 0.36148219950596344, + "learning_rate": 0.0001527974129660784, + "loss": 1.1444, + "step": 1784 + }, + { + "epoch": 0.3434240192394409, + "grad_norm": 0.330209573855822, + "learning_rate": 0.0001527444761066982, + "loss": 1.0294, + "step": 1785 + }, + { + "epoch": 0.34361641364797835, + "grad_norm": 0.3197845304826271, + "learning_rate": 0.00015269151876197125, + "loss": 1.148, + "step": 1786 + }, + { + "epoch": 0.34380880805651587, + "grad_norm": 0.2790777484992655, + "learning_rate": 0.00015263854095246557, + "loss": 1.0771, + "step": 1787 + }, + { + "epoch": 0.34400120246505334, + "grad_norm": 0.23711577021571012, + "learning_rate": 0.00015258554269875717, + "loss": 1.0897, + "step": 1788 + }, + { + "epoch": 0.34419359687359086, + "grad_norm": 0.25859153045757366, + "learning_rate": 0.00015253252402142988, + "loss": 1.0724, + "step": 1789 + }, + { + "epoch": 0.3443859912821284, + "grad_norm": 0.34452803862415393, + "learning_rate": 0.00015247948494107565, + "loss": 1.093, + "step": 1790 + }, + { + "epoch": 0.34457838569066584, + "grad_norm": 0.3112044740418616, + "learning_rate": 0.00015242642547829417, + "loss": 1.1033, + "step": 1791 + }, + { + "epoch": 0.34477078009920337, + "grad_norm": 0.3302994700873457, + "learning_rate": 0.0001523733456536931, + "loss": 1.1365, + "step": 1792 + }, + { + "epoch": 0.3449631745077409, + "grad_norm": 0.31761706764435255, + "learning_rate": 0.00015232024548788813, + "loss": 1.1523, + "step": 1793 + }, + { + "epoch": 0.34515556891627835, + "grad_norm": 0.3238261409758097, + "learning_rate": 0.00015226712500150268, + "loss": 1.0589, + "step": 1794 + }, + { + "epoch": 0.3453479633248159, + "grad_norm": 0.32046292439174884, + "learning_rate": 0.00015221398421516816, + "loss": 1.0966, + "step": 1795 + }, + { + "epoch": 0.3455403577333534, + "grad_norm": 0.351197956693725, + "learning_rate": 0.0001521608231495238, + "loss": 1.1068, + "step": 1796 + }, + { + "epoch": 0.34573275214189086, + "grad_norm": 0.3723271805530417, + "learning_rate": 0.0001521076418252168, + "loss": 1.0115, + "step": 1797 + }, + { + "epoch": 0.3459251465504284, + "grad_norm": 0.2827945169050502, + "learning_rate": 0.00015205444026290216, + "loss": 1.0237, + "step": 1798 + }, + { + "epoch": 0.34611754095896585, + "grad_norm": 0.32931992868338894, + "learning_rate": 0.00015200121848324275, + "loss": 1.1826, + "step": 1799 + }, + { + "epoch": 0.3463099353675034, + "grad_norm": 0.2623493266489867, + "learning_rate": 0.00015194797650690926, + "loss": 0.9022, + "step": 1800 + }, + { + "epoch": 0.3465023297760409, + "grad_norm": 0.3204710753398161, + "learning_rate": 0.0001518947143545803, + "loss": 1.1384, + "step": 1801 + }, + { + "epoch": 0.34669472418457836, + "grad_norm": 0.28588826461643324, + "learning_rate": 0.0001518414320469423, + "loss": 1.0765, + "step": 1802 + }, + { + "epoch": 0.3468871185931159, + "grad_norm": 0.297112028634499, + "learning_rate": 0.00015178812960468945, + "loss": 1.0037, + "step": 1803 + }, + { + "epoch": 0.3470795130016534, + "grad_norm": 0.2640300049904744, + "learning_rate": 0.0001517348070485238, + "loss": 1.0881, + "step": 1804 + }, + { + "epoch": 0.34727190741019087, + "grad_norm": 0.3509622595181104, + "learning_rate": 0.00015168146439915525, + "loss": 1.182, + "step": 1805 + }, + { + "epoch": 0.3474643018187284, + "grad_norm": 0.31591870890387047, + "learning_rate": 0.00015162810167730143, + "loss": 1.1381, + "step": 1806 + }, + { + "epoch": 0.3476566962272659, + "grad_norm": 0.30513087254168086, + "learning_rate": 0.00015157471890368785, + "loss": 1.0488, + "step": 1807 + }, + { + "epoch": 0.3478490906358034, + "grad_norm": 0.28825378971561394, + "learning_rate": 0.0001515213160990477, + "loss": 0.9969, + "step": 1808 + }, + { + "epoch": 0.3480414850443409, + "grad_norm": 0.32706413159062864, + "learning_rate": 0.00015146789328412212, + "loss": 1.0874, + "step": 1809 + }, + { + "epoch": 0.3482338794528784, + "grad_norm": 0.32015900627141747, + "learning_rate": 0.00015141445047965984, + "loss": 1.0852, + "step": 1810 + }, + { + "epoch": 0.3484262738614159, + "grad_norm": 0.27149440893241417, + "learning_rate": 0.0001513609877064174, + "loss": 1.1216, + "step": 1811 + }, + { + "epoch": 0.3486186682699534, + "grad_norm": 0.289906861337127, + "learning_rate": 0.0001513075049851592, + "loss": 1.0222, + "step": 1812 + }, + { + "epoch": 0.3488110626784909, + "grad_norm": 0.26056187957917737, + "learning_rate": 0.0001512540023366573, + "loss": 1.0619, + "step": 1813 + }, + { + "epoch": 0.3490034570870284, + "grad_norm": 0.31682954136910635, + "learning_rate": 0.00015120047978169144, + "loss": 1.1224, + "step": 1814 + }, + { + "epoch": 0.3491958514955659, + "grad_norm": 0.3374126923949515, + "learning_rate": 0.00015114693734104927, + "loss": 1.0508, + "step": 1815 + }, + { + "epoch": 0.3493882459041034, + "grad_norm": 0.26288242574283976, + "learning_rate": 0.00015109337503552595, + "loss": 1.11, + "step": 1816 + }, + { + "epoch": 0.3495806403126409, + "grad_norm": 0.34899136002242825, + "learning_rate": 0.00015103979288592453, + "loss": 1.1022, + "step": 1817 + }, + { + "epoch": 0.34977303472117843, + "grad_norm": 0.25210360826426065, + "learning_rate": 0.0001509861909130557, + "loss": 1.0325, + "step": 1818 + }, + { + "epoch": 0.3499654291297159, + "grad_norm": 0.31008390775494704, + "learning_rate": 0.00015093256913773786, + "loss": 1.0619, + "step": 1819 + }, + { + "epoch": 0.3501578235382534, + "grad_norm": 0.32120459374616644, + "learning_rate": 0.000150878927580797, + "loss": 1.1243, + "step": 1820 + }, + { + "epoch": 0.35035021794679094, + "grad_norm": 0.312263341500782, + "learning_rate": 0.00015082526626306695, + "loss": 1.1324, + "step": 1821 + }, + { + "epoch": 0.3505426123553284, + "grad_norm": 0.299100340479796, + "learning_rate": 0.0001507715852053892, + "loss": 1.0759, + "step": 1822 + }, + { + "epoch": 0.35073500676386593, + "grad_norm": 0.24478580044375112, + "learning_rate": 0.00015071788442861276, + "loss": 1.0094, + "step": 1823 + }, + { + "epoch": 0.35092740117240345, + "grad_norm": 0.3220735804964353, + "learning_rate": 0.00015066416395359444, + "loss": 1.0594, + "step": 1824 + }, + { + "epoch": 0.3511197955809409, + "grad_norm": 0.32609772204265813, + "learning_rate": 0.00015061042380119864, + "loss": 1.1503, + "step": 1825 + }, + { + "epoch": 0.35131218998947844, + "grad_norm": 0.3235793516980581, + "learning_rate": 0.0001505566639922974, + "loss": 1.0348, + "step": 1826 + }, + { + "epoch": 0.3515045843980159, + "grad_norm": 0.2738300161583872, + "learning_rate": 0.00015050288454777046, + "loss": 1.0835, + "step": 1827 + }, + { + "epoch": 0.35169697880655343, + "grad_norm": 0.3092118576913086, + "learning_rate": 0.0001504490854885051, + "loss": 1.0572, + "step": 1828 + }, + { + "epoch": 0.35188937321509095, + "grad_norm": 0.2927776653833124, + "learning_rate": 0.00015039526683539625, + "loss": 1.1122, + "step": 1829 + }, + { + "epoch": 0.3520817676236284, + "grad_norm": 0.33700553328467114, + "learning_rate": 0.0001503414286093465, + "loss": 1.0637, + "step": 1830 + }, + { + "epoch": 0.35227416203216594, + "grad_norm": 0.2629123021983572, + "learning_rate": 0.00015028757083126592, + "loss": 0.9553, + "step": 1831 + }, + { + "epoch": 0.35246655644070346, + "grad_norm": 0.3587618540457818, + "learning_rate": 0.00015023369352207229, + "loss": 1.1639, + "step": 1832 + }, + { + "epoch": 0.3526589508492409, + "grad_norm": 0.2597243625037536, + "learning_rate": 0.00015017979670269095, + "loss": 1.1174, + "step": 1833 + }, + { + "epoch": 0.35285134525777845, + "grad_norm": 0.2657473337966056, + "learning_rate": 0.0001501258803940548, + "loss": 1.0376, + "step": 1834 + }, + { + "epoch": 0.35304373966631597, + "grad_norm": 0.2800192506752259, + "learning_rate": 0.0001500719446171043, + "loss": 1.0763, + "step": 1835 + }, + { + "epoch": 0.35323613407485344, + "grad_norm": 0.35641513034473116, + "learning_rate": 0.00015001798939278752, + "loss": 1.1407, + "step": 1836 + }, + { + "epoch": 0.35342852848339096, + "grad_norm": 0.29347724709111284, + "learning_rate": 0.00014996401474205997, + "loss": 1.1292, + "step": 1837 + }, + { + "epoch": 0.3536209228919285, + "grad_norm": 0.39734301227070035, + "learning_rate": 0.00014991002068588484, + "loss": 1.0724, + "step": 1838 + }, + { + "epoch": 0.35381331730046595, + "grad_norm": 0.27775395129926944, + "learning_rate": 0.0001498560072452328, + "loss": 1.1082, + "step": 1839 + }, + { + "epoch": 0.35400571170900347, + "grad_norm": 0.3014670784927019, + "learning_rate": 0.00014980197444108205, + "loss": 1.0971, + "step": 1840 + }, + { + "epoch": 0.35419810611754093, + "grad_norm": 0.3378754581872636, + "learning_rate": 0.00014974792229441826, + "loss": 1.1237, + "step": 1841 + }, + { + "epoch": 0.35439050052607846, + "grad_norm": 0.27128828355443557, + "learning_rate": 0.00014969385082623472, + "loss": 1.0729, + "step": 1842 + }, + { + "epoch": 0.354582894934616, + "grad_norm": 0.3298426117789897, + "learning_rate": 0.00014963976005753215, + "loss": 1.0834, + "step": 1843 + }, + { + "epoch": 0.35477528934315344, + "grad_norm": 0.343659239962405, + "learning_rate": 0.00014958565000931876, + "loss": 0.9839, + "step": 1844 + }, + { + "epoch": 0.35496768375169097, + "grad_norm": 0.278930524450225, + "learning_rate": 0.00014953152070261026, + "loss": 1.0054, + "step": 1845 + }, + { + "epoch": 0.3551600781602285, + "grad_norm": 0.29318306702917285, + "learning_rate": 0.0001494773721584299, + "loss": 1.1171, + "step": 1846 + }, + { + "epoch": 0.35535247256876595, + "grad_norm": 0.30681604941453455, + "learning_rate": 0.0001494232043978083, + "loss": 1.1272, + "step": 1847 + }, + { + "epoch": 0.3555448669773035, + "grad_norm": 0.3147653987995692, + "learning_rate": 0.00014936901744178367, + "loss": 1.1555, + "step": 1848 + }, + { + "epoch": 0.355737261385841, + "grad_norm": 0.303426632944889, + "learning_rate": 0.00014931481131140147, + "loss": 1.0929, + "step": 1849 + }, + { + "epoch": 0.35592965579437846, + "grad_norm": 0.27165012128240573, + "learning_rate": 0.00014926058602771484, + "loss": 1.137, + "step": 1850 + }, + { + "epoch": 0.356122050202916, + "grad_norm": 0.3075775481675345, + "learning_rate": 0.00014920634161178425, + "loss": 1.1136, + "step": 1851 + }, + { + "epoch": 0.3563144446114535, + "grad_norm": 0.34846587137004936, + "learning_rate": 0.00014915207808467756, + "loss": 1.0719, + "step": 1852 + }, + { + "epoch": 0.356506839019991, + "grad_norm": 0.3766285481356455, + "learning_rate": 0.0001490977954674701, + "loss": 1.0861, + "step": 1853 + }, + { + "epoch": 0.3566992334285285, + "grad_norm": 0.2686058052969888, + "learning_rate": 0.00014904349378124467, + "loss": 1.1716, + "step": 1854 + }, + { + "epoch": 0.35689162783706596, + "grad_norm": 0.27299597606529447, + "learning_rate": 0.0001489891730470914, + "loss": 1.0009, + "step": 1855 + }, + { + "epoch": 0.3570840222456035, + "grad_norm": 0.37283271345094865, + "learning_rate": 0.00014893483328610777, + "loss": 1.0428, + "step": 1856 + }, + { + "epoch": 0.357276416654141, + "grad_norm": 0.4309161089859467, + "learning_rate": 0.0001488804745193988, + "loss": 1.0318, + "step": 1857 + }, + { + "epoch": 0.35746881106267847, + "grad_norm": 0.35053829244215123, + "learning_rate": 0.00014882609676807675, + "loss": 1.0359, + "step": 1858 + }, + { + "epoch": 0.357661205471216, + "grad_norm": 0.31613241719091206, + "learning_rate": 0.00014877170005326136, + "loss": 1.0896, + "step": 1859 + }, + { + "epoch": 0.3578535998797535, + "grad_norm": 0.432863418358156, + "learning_rate": 0.00014871728439607966, + "loss": 1.0949, + "step": 1860 + }, + { + "epoch": 0.358045994288291, + "grad_norm": 0.4319979447427512, + "learning_rate": 0.00014866284981766606, + "loss": 1.0643, + "step": 1861 + }, + { + "epoch": 0.3582383886968285, + "grad_norm": 0.3139741896531891, + "learning_rate": 0.00014860839633916236, + "loss": 1.1716, + "step": 1862 + }, + { + "epoch": 0.358430783105366, + "grad_norm": 0.31078885130802425, + "learning_rate": 0.0001485539239817176, + "loss": 1.0672, + "step": 1863 + }, + { + "epoch": 0.3586231775139035, + "grad_norm": 0.3291140952595944, + "learning_rate": 0.0001484994327664883, + "loss": 1.1039, + "step": 1864 + }, + { + "epoch": 0.358815571922441, + "grad_norm": 0.3377318125162411, + "learning_rate": 0.0001484449227146381, + "loss": 1.1793, + "step": 1865 + }, + { + "epoch": 0.3590079663309785, + "grad_norm": 0.29210983351750236, + "learning_rate": 0.0001483903938473382, + "loss": 1.1435, + "step": 1866 + }, + { + "epoch": 0.359200360739516, + "grad_norm": 0.29840851672761753, + "learning_rate": 0.00014833584618576695, + "loss": 1.1924, + "step": 1867 + }, + { + "epoch": 0.3593927551480535, + "grad_norm": 0.33487483726424955, + "learning_rate": 0.00014828127975111, + "loss": 1.0845, + "step": 1868 + }, + { + "epoch": 0.359585149556591, + "grad_norm": 0.286279541824635, + "learning_rate": 0.0001482266945645603, + "loss": 1.0551, + "step": 1869 + }, + { + "epoch": 0.3597775439651285, + "grad_norm": 0.2991117765472499, + "learning_rate": 0.00014817209064731817, + "loss": 1.1261, + "step": 1870 + }, + { + "epoch": 0.35996993837366603, + "grad_norm": 0.33380266102838413, + "learning_rate": 0.00014811746802059113, + "loss": 1.0044, + "step": 1871 + }, + { + "epoch": 0.3601623327822035, + "grad_norm": 0.344713321414533, + "learning_rate": 0.000148062826705594, + "loss": 1.0734, + "step": 1872 + }, + { + "epoch": 0.360354727190741, + "grad_norm": 0.2724585130564941, + "learning_rate": 0.00014800816672354877, + "loss": 1.0704, + "step": 1873 + }, + { + "epoch": 0.36054712159927854, + "grad_norm": 0.2642015365539861, + "learning_rate": 0.00014795348809568476, + "loss": 1.1128, + "step": 1874 + }, + { + "epoch": 0.360739516007816, + "grad_norm": 0.3534706986703852, + "learning_rate": 0.00014789879084323857, + "loss": 1.1588, + "step": 1875 + }, + { + "epoch": 0.36093191041635353, + "grad_norm": 0.25537674601980054, + "learning_rate": 0.00014784407498745394, + "loss": 1.0076, + "step": 1876 + }, + { + "epoch": 0.36112430482489105, + "grad_norm": 0.27894783927574934, + "learning_rate": 0.0001477893405495819, + "loss": 1.0879, + "step": 1877 + }, + { + "epoch": 0.3613166992334285, + "grad_norm": 0.37599560464547327, + "learning_rate": 0.00014773458755088068, + "loss": 1.1295, + "step": 1878 + }, + { + "epoch": 0.36150909364196604, + "grad_norm": 0.36789010878467643, + "learning_rate": 0.00014767981601261567, + "loss": 1.0879, + "step": 1879 + }, + { + "epoch": 0.3617014880505035, + "grad_norm": 0.30677080578676985, + "learning_rate": 0.00014762502595605956, + "loss": 1.0759, + "step": 1880 + }, + { + "epoch": 0.361893882459041, + "grad_norm": 0.27532868669205685, + "learning_rate": 0.00014757021740249214, + "loss": 1.1114, + "step": 1881 + }, + { + "epoch": 0.36208627686757855, + "grad_norm": 0.28652231343501494, + "learning_rate": 0.00014751539037320043, + "loss": 0.9999, + "step": 1882 + }, + { + "epoch": 0.362278671276116, + "grad_norm": 0.2796722496990979, + "learning_rate": 0.00014746054488947862, + "loss": 1.0701, + "step": 1883 + }, + { + "epoch": 0.36247106568465354, + "grad_norm": 0.3076328280684436, + "learning_rate": 0.00014740568097262812, + "loss": 1.1187, + "step": 1884 + }, + { + "epoch": 0.36266346009319106, + "grad_norm": 0.280827738363737, + "learning_rate": 0.00014735079864395729, + "loss": 1.0844, + "step": 1885 + }, + { + "epoch": 0.3628558545017285, + "grad_norm": 0.26475922736819607, + "learning_rate": 0.00014729589792478192, + "loss": 1.048, + "step": 1886 + }, + { + "epoch": 0.36304824891026605, + "grad_norm": 0.3512053032736165, + "learning_rate": 0.00014724097883642482, + "loss": 1.1478, + "step": 1887 + }, + { + "epoch": 0.36324064331880357, + "grad_norm": 0.26768796331119143, + "learning_rate": 0.00014718604140021588, + "loss": 1.0983, + "step": 1888 + }, + { + "epoch": 0.36343303772734104, + "grad_norm": 0.3144776219272371, + "learning_rate": 0.0001471310856374922, + "loss": 1.0266, + "step": 1889 + }, + { + "epoch": 0.36362543213587856, + "grad_norm": 0.302157941849477, + "learning_rate": 0.0001470761115695979, + "loss": 1.015, + "step": 1890 + }, + { + "epoch": 0.3638178265444161, + "grad_norm": 0.22398903107315207, + "learning_rate": 0.00014702111921788437, + "loss": 1.1601, + "step": 1891 + }, + { + "epoch": 0.36401022095295354, + "grad_norm": 0.350950329568758, + "learning_rate": 0.00014696610860370996, + "loss": 1.0775, + "step": 1892 + }, + { + "epoch": 0.36420261536149107, + "grad_norm": 0.38003015568613585, + "learning_rate": 0.00014691107974844014, + "loss": 1.0482, + "step": 1893 + }, + { + "epoch": 0.36439500977002853, + "grad_norm": 0.30681054747715425, + "learning_rate": 0.0001468560326734475, + "loss": 1.0904, + "step": 1894 + }, + { + "epoch": 0.36458740417856605, + "grad_norm": 0.28091694354639735, + "learning_rate": 0.00014680096740011172, + "loss": 1.1858, + "step": 1895 + }, + { + "epoch": 0.3647797985871036, + "grad_norm": 0.291599722468472, + "learning_rate": 0.00014674588394981947, + "loss": 1.1751, + "step": 1896 + }, + { + "epoch": 0.36497219299564104, + "grad_norm": 0.2614752988961378, + "learning_rate": 0.00014669078234396453, + "loss": 1.0868, + "step": 1897 + }, + { + "epoch": 0.36516458740417856, + "grad_norm": 0.2989790066668176, + "learning_rate": 0.00014663566260394775, + "loss": 1.1482, + "step": 1898 + }, + { + "epoch": 0.3653569818127161, + "grad_norm": 0.3141103454325244, + "learning_rate": 0.00014658052475117702, + "loss": 1.1347, + "step": 1899 + }, + { + "epoch": 0.36554937622125355, + "grad_norm": 0.29283849849484106, + "learning_rate": 0.0001465253688070672, + "loss": 1.1036, + "step": 1900 + }, + { + "epoch": 0.3657417706297911, + "grad_norm": 0.26689509763726227, + "learning_rate": 0.00014647019479304028, + "loss": 1.0175, + "step": 1901 + }, + { + "epoch": 0.3659341650383286, + "grad_norm": 0.4247693096984541, + "learning_rate": 0.00014641500273052514, + "loss": 1.1299, + "step": 1902 + }, + { + "epoch": 0.36612655944686606, + "grad_norm": 0.33323301557293616, + "learning_rate": 0.0001463597926409578, + "loss": 1.0762, + "step": 1903 + }, + { + "epoch": 0.3663189538554036, + "grad_norm": 0.32793474311197524, + "learning_rate": 0.0001463045645457812, + "loss": 1.0773, + "step": 1904 + }, + { + "epoch": 0.3665113482639411, + "grad_norm": 0.34854491719274083, + "learning_rate": 0.0001462493184664453, + "loss": 1.0637, + "step": 1905 + }, + { + "epoch": 0.36670374267247857, + "grad_norm": 0.39320003066251424, + "learning_rate": 0.000146194054424407, + "loss": 1.18, + "step": 1906 + }, + { + "epoch": 0.3668961370810161, + "grad_norm": 0.3027938909935693, + "learning_rate": 0.00014613877244113032, + "loss": 1.0773, + "step": 1907 + }, + { + "epoch": 0.36708853148955356, + "grad_norm": 0.3029114678045782, + "learning_rate": 0.00014608347253808606, + "loss": 1.0866, + "step": 1908 + }, + { + "epoch": 0.3672809258980911, + "grad_norm": 0.284655055825206, + "learning_rate": 0.0001460281547367521, + "loss": 1.1031, + "step": 1909 + }, + { + "epoch": 0.3674733203066286, + "grad_norm": 0.36640965978321494, + "learning_rate": 0.0001459728190586132, + "loss": 1.0476, + "step": 1910 + }, + { + "epoch": 0.36766571471516607, + "grad_norm": 0.29089276480726395, + "learning_rate": 0.00014591746552516108, + "loss": 1.1077, + "step": 1911 + }, + { + "epoch": 0.3678581091237036, + "grad_norm": 0.290937824411561, + "learning_rate": 0.0001458620941578945, + "loss": 1.1023, + "step": 1912 + }, + { + "epoch": 0.3680505035322411, + "grad_norm": 0.2891360019158722, + "learning_rate": 0.00014580670497831903, + "loss": 1.1717, + "step": 1913 + }, + { + "epoch": 0.3682428979407786, + "grad_norm": 0.28263787625557674, + "learning_rate": 0.00014575129800794718, + "loss": 1.0419, + "step": 1914 + }, + { + "epoch": 0.3684352923493161, + "grad_norm": 0.30818924629002703, + "learning_rate": 0.00014569587326829835, + "loss": 1.089, + "step": 1915 + }, + { + "epoch": 0.3686276867578536, + "grad_norm": 0.29294698405037756, + "learning_rate": 0.0001456404307808989, + "loss": 1.1865, + "step": 1916 + }, + { + "epoch": 0.3688200811663911, + "grad_norm": 0.29451444432322943, + "learning_rate": 0.00014558497056728204, + "loss": 1.1114, + "step": 1917 + }, + { + "epoch": 0.3690124755749286, + "grad_norm": 0.315494682217328, + "learning_rate": 0.00014552949264898794, + "loss": 1.1967, + "step": 1918 + }, + { + "epoch": 0.36920486998346613, + "grad_norm": 0.2959856348141984, + "learning_rate": 0.00014547399704756348, + "loss": 1.0801, + "step": 1919 + }, + { + "epoch": 0.3693972643920036, + "grad_norm": 0.3163604706547772, + "learning_rate": 0.00014541848378456256, + "loss": 1.0565, + "step": 1920 + }, + { + "epoch": 0.3695896588005411, + "grad_norm": 0.3176996527827041, + "learning_rate": 0.00014536295288154592, + "loss": 1.0979, + "step": 1921 + }, + { + "epoch": 0.3697820532090786, + "grad_norm": 0.28799658236242526, + "learning_rate": 0.0001453074043600811, + "loss": 1.1257, + "step": 1922 + }, + { + "epoch": 0.3699744476176161, + "grad_norm": 0.31411010622190016, + "learning_rate": 0.00014525183824174248, + "loss": 0.9577, + "step": 1923 + }, + { + "epoch": 0.37016684202615363, + "grad_norm": 0.25421927880015555, + "learning_rate": 0.00014519625454811135, + "loss": 1.1084, + "step": 1924 + }, + { + "epoch": 0.3703592364346911, + "grad_norm": 0.32865048509904704, + "learning_rate": 0.00014514065330077576, + "loss": 1.1339, + "step": 1925 + }, + { + "epoch": 0.3705516308432286, + "grad_norm": 0.3242171630569951, + "learning_rate": 0.00014508503452133052, + "loss": 1.1099, + "step": 1926 + }, + { + "epoch": 0.37074402525176614, + "grad_norm": 0.31137280682818663, + "learning_rate": 0.00014502939823137745, + "loss": 1.1093, + "step": 1927 + }, + { + "epoch": 0.3709364196603036, + "grad_norm": 0.27958562705832646, + "learning_rate": 0.00014497374445252497, + "loss": 1.0681, + "step": 1928 + }, + { + "epoch": 0.37112881406884113, + "grad_norm": 0.3657785587537535, + "learning_rate": 0.00014491807320638834, + "loss": 1.1187, + "step": 1929 + }, + { + "epoch": 0.37132120847737865, + "grad_norm": 0.2527408477986618, + "learning_rate": 0.0001448623845145897, + "loss": 1.0952, + "step": 1930 + }, + { + "epoch": 0.3715136028859161, + "grad_norm": 0.2958644187223314, + "learning_rate": 0.00014480667839875786, + "loss": 1.1191, + "step": 1931 + }, + { + "epoch": 0.37170599729445364, + "grad_norm": 0.367129519008423, + "learning_rate": 0.00014475095488052843, + "loss": 1.0967, + "step": 1932 + }, + { + "epoch": 0.3718983917029911, + "grad_norm": 0.3093776216584789, + "learning_rate": 0.0001446952139815438, + "loss": 1.1141, + "step": 1933 + }, + { + "epoch": 0.3720907861115286, + "grad_norm": 0.3012883217871457, + "learning_rate": 0.00014463945572345307, + "loss": 1.0621, + "step": 1934 + }, + { + "epoch": 0.37228318052006615, + "grad_norm": 0.30749918181400143, + "learning_rate": 0.00014458368012791213, + "loss": 1.1066, + "step": 1935 + }, + { + "epoch": 0.3724755749286036, + "grad_norm": 0.3389656998187917, + "learning_rate": 0.00014452788721658355, + "loss": 1.1911, + "step": 1936 + }, + { + "epoch": 0.37266796933714114, + "grad_norm": 0.2576045603691585, + "learning_rate": 0.00014447207701113668, + "loss": 1.1183, + "step": 1937 + }, + { + "epoch": 0.37286036374567866, + "grad_norm": 0.3221752712214336, + "learning_rate": 0.00014441624953324755, + "loss": 1.1001, + "step": 1938 + }, + { + "epoch": 0.3730527581542161, + "grad_norm": 0.3028885729414909, + "learning_rate": 0.00014436040480459892, + "loss": 1.1018, + "step": 1939 + }, + { + "epoch": 0.37324515256275365, + "grad_norm": 0.26494023180236825, + "learning_rate": 0.0001443045428468802, + "loss": 1.0562, + "step": 1940 + }, + { + "epoch": 0.37343754697129117, + "grad_norm": 0.2864666905192389, + "learning_rate": 0.0001442486636817876, + "loss": 1.1579, + "step": 1941 + }, + { + "epoch": 0.37362994137982863, + "grad_norm": 0.27285037768947795, + "learning_rate": 0.00014419276733102388, + "loss": 1.1256, + "step": 1942 + }, + { + "epoch": 0.37382233578836616, + "grad_norm": 0.28600232582889934, + "learning_rate": 0.00014413685381629856, + "loss": 1.0829, + "step": 1943 + }, + { + "epoch": 0.3740147301969037, + "grad_norm": 0.2980899104623963, + "learning_rate": 0.0001440809231593278, + "loss": 1.0274, + "step": 1944 + }, + { + "epoch": 0.37420712460544114, + "grad_norm": 0.3226759823491168, + "learning_rate": 0.00014402497538183444, + "loss": 1.0545, + "step": 1945 + }, + { + "epoch": 0.37439951901397867, + "grad_norm": 0.2695213506096957, + "learning_rate": 0.00014396901050554793, + "loss": 1.0513, + "step": 1946 + }, + { + "epoch": 0.37459191342251613, + "grad_norm": 0.38507862268510473, + "learning_rate": 0.0001439130285522044, + "loss": 1.0645, + "step": 1947 + }, + { + "epoch": 0.37478430783105365, + "grad_norm": 0.29164899375142106, + "learning_rate": 0.0001438570295435466, + "loss": 1.0832, + "step": 1948 + }, + { + "epoch": 0.3749767022395912, + "grad_norm": 0.31056433981839915, + "learning_rate": 0.00014380101350132388, + "loss": 1.1543, + "step": 1949 + }, + { + "epoch": 0.37516909664812864, + "grad_norm": 0.2680192797807722, + "learning_rate": 0.00014374498044729227, + "loss": 1.1139, + "step": 1950 + }, + { + "epoch": 0.37536149105666616, + "grad_norm": 0.2821569933474121, + "learning_rate": 0.00014368893040321427, + "loss": 1.0733, + "step": 1951 + }, + { + "epoch": 0.3755538854652037, + "grad_norm": 0.3201459023484589, + "learning_rate": 0.00014363286339085914, + "loss": 1.1127, + "step": 1952 + }, + { + "epoch": 0.37574627987374115, + "grad_norm": 0.29164255169627756, + "learning_rate": 0.0001435767794320027, + "loss": 1.0496, + "step": 1953 + }, + { + "epoch": 0.3759386742822787, + "grad_norm": 0.2906523835438739, + "learning_rate": 0.00014352067854842723, + "loss": 1.1041, + "step": 1954 + }, + { + "epoch": 0.3761310686908162, + "grad_norm": 0.2680536260721687, + "learning_rate": 0.0001434645607619217, + "loss": 1.1774, + "step": 1955 + }, + { + "epoch": 0.37632346309935366, + "grad_norm": 0.25914136042921976, + "learning_rate": 0.0001434084260942816, + "loss": 1.0613, + "step": 1956 + }, + { + "epoch": 0.3765158575078912, + "grad_norm": 0.28862573938948977, + "learning_rate": 0.000143352274567309, + "loss": 1.0751, + "step": 1957 + }, + { + "epoch": 0.3767082519164287, + "grad_norm": 0.3023902117097119, + "learning_rate": 0.00014329610620281253, + "loss": 1.0843, + "step": 1958 + }, + { + "epoch": 0.37690064632496617, + "grad_norm": 0.35296921790886787, + "learning_rate": 0.00014323992102260733, + "loss": 1.0453, + "step": 1959 + }, + { + "epoch": 0.3770930407335037, + "grad_norm": 0.2870603460152669, + "learning_rate": 0.00014318371904851503, + "loss": 1.0963, + "step": 1960 + }, + { + "epoch": 0.37728543514204116, + "grad_norm": 0.26613303087991497, + "learning_rate": 0.00014312750030236383, + "loss": 1.1107, + "step": 1961 + }, + { + "epoch": 0.3774778295505787, + "grad_norm": 0.3064795217516217, + "learning_rate": 0.0001430712648059885, + "loss": 1.0534, + "step": 1962 + }, + { + "epoch": 0.3776702239591162, + "grad_norm": 0.34496826674265324, + "learning_rate": 0.00014301501258123025, + "loss": 1.0758, + "step": 1963 + }, + { + "epoch": 0.37786261836765367, + "grad_norm": 0.34103971079027146, + "learning_rate": 0.0001429587436499367, + "loss": 1.0877, + "step": 1964 + }, + { + "epoch": 0.3780550127761912, + "grad_norm": 0.3143255351612261, + "learning_rate": 0.0001429024580339622, + "loss": 1.0223, + "step": 1965 + }, + { + "epoch": 0.3782474071847287, + "grad_norm": 0.3099969175356247, + "learning_rate": 0.00014284615575516738, + "loss": 1.1331, + "step": 1966 + }, + { + "epoch": 0.3784398015932662, + "grad_norm": 0.2995963604391879, + "learning_rate": 0.00014278983683541934, + "loss": 1.1212, + "step": 1967 + }, + { + "epoch": 0.3786321960018037, + "grad_norm": 0.26594334986357204, + "learning_rate": 0.00014273350129659172, + "loss": 1.0299, + "step": 1968 + }, + { + "epoch": 0.3788245904103412, + "grad_norm": 0.3663847970446549, + "learning_rate": 0.00014267714916056464, + "loss": 1.1724, + "step": 1969 + }, + { + "epoch": 0.3790169848188787, + "grad_norm": 0.2670572257110239, + "learning_rate": 0.0001426207804492246, + "loss": 1.1415, + "step": 1970 + }, + { + "epoch": 0.3792093792274162, + "grad_norm": 0.28044351715522375, + "learning_rate": 0.00014256439518446455, + "loss": 1.1028, + "step": 1971 + }, + { + "epoch": 0.37940177363595373, + "grad_norm": 0.3372370384754339, + "learning_rate": 0.00014250799338818388, + "loss": 1.1881, + "step": 1972 + }, + { + "epoch": 0.3795941680444912, + "grad_norm": 0.28017107256950113, + "learning_rate": 0.00014245157508228839, + "loss": 1.0563, + "step": 1973 + }, + { + "epoch": 0.3797865624530287, + "grad_norm": 0.298544765390121, + "learning_rate": 0.00014239514028869033, + "loss": 1.1141, + "step": 1974 + }, + { + "epoch": 0.3799789568615662, + "grad_norm": 0.30277588433873626, + "learning_rate": 0.00014233868902930826, + "loss": 1.1369, + "step": 1975 + }, + { + "epoch": 0.3801713512701037, + "grad_norm": 0.2943144115185054, + "learning_rate": 0.0001422822213260673, + "loss": 1.1295, + "step": 1976 + }, + { + "epoch": 0.38036374567864123, + "grad_norm": 0.28713604502386886, + "learning_rate": 0.00014222573720089874, + "loss": 1.0605, + "step": 1977 + }, + { + "epoch": 0.3805561400871787, + "grad_norm": 0.33591508231816775, + "learning_rate": 0.00014216923667574043, + "loss": 1.1793, + "step": 1978 + }, + { + "epoch": 0.3807485344957162, + "grad_norm": 0.34764835548957024, + "learning_rate": 0.00014211271977253653, + "loss": 1.0707, + "step": 1979 + }, + { + "epoch": 0.38094092890425374, + "grad_norm": 0.2954517975798876, + "learning_rate": 0.00014205618651323752, + "loss": 1.1271, + "step": 1980 + }, + { + "epoch": 0.3811333233127912, + "grad_norm": 0.3377212307135022, + "learning_rate": 0.00014199963691980026, + "loss": 1.1592, + "step": 1981 + }, + { + "epoch": 0.3813257177213287, + "grad_norm": 0.25759934385625344, + "learning_rate": 0.00014194307101418805, + "loss": 1.0455, + "step": 1982 + }, + { + "epoch": 0.38151811212986625, + "grad_norm": 0.26686972307314477, + "learning_rate": 0.00014188648881837033, + "loss": 1.1715, + "step": 1983 + }, + { + "epoch": 0.3817105065384037, + "grad_norm": 0.29984112996475654, + "learning_rate": 0.00014182989035432298, + "loss": 1.1334, + "step": 1984 + }, + { + "epoch": 0.38190290094694124, + "grad_norm": 0.35255728654768675, + "learning_rate": 0.00014177327564402826, + "loss": 1.0833, + "step": 1985 + }, + { + "epoch": 0.38209529535547876, + "grad_norm": 0.39304632772877357, + "learning_rate": 0.00014171664470947465, + "loss": 1.0845, + "step": 1986 + }, + { + "epoch": 0.3822876897640162, + "grad_norm": 0.3839754251032507, + "learning_rate": 0.0001416599975726569, + "loss": 1.1347, + "step": 1987 + }, + { + "epoch": 0.38248008417255375, + "grad_norm": 0.25505784855196545, + "learning_rate": 0.00014160333425557615, + "loss": 1.0058, + "step": 1988 + }, + { + "epoch": 0.3826724785810912, + "grad_norm": 0.3000442649687535, + "learning_rate": 0.00014154665478023976, + "loss": 1.1342, + "step": 1989 + }, + { + "epoch": 0.38286487298962874, + "grad_norm": 0.2576329171643482, + "learning_rate": 0.0001414899591686614, + "loss": 1.103, + "step": 1990 + }, + { + "epoch": 0.38305726739816626, + "grad_norm": 0.3656325396461047, + "learning_rate": 0.000141433247442861, + "loss": 0.9734, + "step": 1991 + }, + { + "epoch": 0.3832496618067037, + "grad_norm": 0.29380429146326276, + "learning_rate": 0.0001413765196248647, + "loss": 1.0384, + "step": 1992 + }, + { + "epoch": 0.38344205621524124, + "grad_norm": 0.30529503815508935, + "learning_rate": 0.00014131977573670498, + "loss": 1.1147, + "step": 1993 + }, + { + "epoch": 0.38363445062377877, + "grad_norm": 0.32160975147219234, + "learning_rate": 0.0001412630158004205, + "loss": 1.0596, + "step": 1994 + }, + { + "epoch": 0.38382684503231623, + "grad_norm": 0.3663337398837326, + "learning_rate": 0.00014120623983805616, + "loss": 1.0174, + "step": 1995 + }, + { + "epoch": 0.38401923944085375, + "grad_norm": 0.27165311841715334, + "learning_rate": 0.00014114944787166307, + "loss": 1.1311, + "step": 1996 + }, + { + "epoch": 0.3842116338493913, + "grad_norm": 0.28057625346601744, + "learning_rate": 0.00014109263992329857, + "loss": 1.1139, + "step": 1997 + }, + { + "epoch": 0.38440402825792874, + "grad_norm": 0.3152161600145858, + "learning_rate": 0.0001410358160150263, + "loss": 1.1688, + "step": 1998 + }, + { + "epoch": 0.38459642266646626, + "grad_norm": 0.2699550529752809, + "learning_rate": 0.0001409789761689159, + "loss": 1.1206, + "step": 1999 + }, + { + "epoch": 0.38478881707500373, + "grad_norm": 0.18316410270251204, + "learning_rate": 0.00014092212040704336, + "loss": 1.0787, + "step": 2000 + }, + { + "epoch": 0.38498121148354125, + "grad_norm": 0.35663042410706985, + "learning_rate": 0.0001408652487514908, + "loss": 1.1214, + "step": 2001 + }, + { + "epoch": 0.3851736058920788, + "grad_norm": 0.32414486296588063, + "learning_rate": 0.0001408083612243465, + "loss": 1.0169, + "step": 2002 + }, + { + "epoch": 0.38536600030061624, + "grad_norm": 0.32898446387455277, + "learning_rate": 0.00014075145784770495, + "loss": 1.0604, + "step": 2003 + }, + { + "epoch": 0.38555839470915376, + "grad_norm": 0.32755318138444894, + "learning_rate": 0.00014069453864366677, + "loss": 1.0501, + "step": 2004 + }, + { + "epoch": 0.3857507891176913, + "grad_norm": 0.29762364756747844, + "learning_rate": 0.00014063760363433866, + "loss": 1.1317, + "step": 2005 + }, + { + "epoch": 0.38594318352622875, + "grad_norm": 0.2886755174274723, + "learning_rate": 0.00014058065284183358, + "loss": 1.0229, + "step": 2006 + }, + { + "epoch": 0.38613557793476627, + "grad_norm": 0.33066756772643474, + "learning_rate": 0.00014052368628827058, + "loss": 1.1267, + "step": 2007 + }, + { + "epoch": 0.3863279723433038, + "grad_norm": 0.5108689803097722, + "learning_rate": 0.00014046670399577477, + "loss": 1.1445, + "step": 2008 + }, + { + "epoch": 0.38652036675184126, + "grad_norm": 0.367712119078728, + "learning_rate": 0.0001404097059864774, + "loss": 1.0834, + "step": 2009 + }, + { + "epoch": 0.3867127611603788, + "grad_norm": 0.29666278316430783, + "learning_rate": 0.00014035269228251587, + "loss": 1.0715, + "step": 2010 + }, + { + "epoch": 0.3869051555689163, + "grad_norm": 0.3410635961186643, + "learning_rate": 0.00014029566290603367, + "loss": 1.1426, + "step": 2011 + }, + { + "epoch": 0.38709754997745377, + "grad_norm": 0.3064763240114719, + "learning_rate": 0.00014023861787918032, + "loss": 1.0599, + "step": 2012 + }, + { + "epoch": 0.3872899443859913, + "grad_norm": 0.3203840538653993, + "learning_rate": 0.00014018155722411144, + "loss": 1.1161, + "step": 2013 + }, + { + "epoch": 0.38748233879452876, + "grad_norm": 0.3009858416073814, + "learning_rate": 0.00014012448096298874, + "loss": 1.0631, + "step": 2014 + }, + { + "epoch": 0.3876747332030663, + "grad_norm": 0.2715882261239675, + "learning_rate": 0.00014006738911798, + "loss": 1.1196, + "step": 2015 + }, + { + "epoch": 0.3878671276116038, + "grad_norm": 0.2632765294624663, + "learning_rate": 0.000140010281711259, + "loss": 1.0435, + "step": 2016 + }, + { + "epoch": 0.38805952202014127, + "grad_norm": 0.30174660263090725, + "learning_rate": 0.00013995315876500565, + "loss": 1.0679, + "step": 2017 + }, + { + "epoch": 0.3882519164286788, + "grad_norm": 0.2976900181919206, + "learning_rate": 0.0001398960203014058, + "loss": 1.1669, + "step": 2018 + }, + { + "epoch": 0.3884443108372163, + "grad_norm": 0.3002693812398982, + "learning_rate": 0.0001398388663426514, + "loss": 1.0552, + "step": 2019 + }, + { + "epoch": 0.3886367052457538, + "grad_norm": 0.3357480216306192, + "learning_rate": 0.00013978169691094037, + "loss": 1.0776, + "step": 2020 + }, + { + "epoch": 0.3888290996542913, + "grad_norm": 0.29927868197483065, + "learning_rate": 0.00013972451202847664, + "loss": 1.0676, + "step": 2021 + }, + { + "epoch": 0.3890214940628288, + "grad_norm": 0.3286864164580379, + "learning_rate": 0.00013966731171747023, + "loss": 0.992, + "step": 2022 + }, + { + "epoch": 0.3892138884713663, + "grad_norm": 0.3038832491213965, + "learning_rate": 0.000139610096000137, + "loss": 1.1172, + "step": 2023 + }, + { + "epoch": 0.3894062828799038, + "grad_norm": 0.2627361070246382, + "learning_rate": 0.00013955286489869895, + "loss": 1.132, + "step": 2024 + }, + { + "epoch": 0.38959867728844133, + "grad_norm": 0.3438216465287393, + "learning_rate": 0.0001394956184353839, + "loss": 1.1049, + "step": 2025 + }, + { + "epoch": 0.3897910716969788, + "grad_norm": 0.343644278698167, + "learning_rate": 0.00013943835663242577, + "loss": 1.0887, + "step": 2026 + }, + { + "epoch": 0.3899834661055163, + "grad_norm": 0.2617840041858717, + "learning_rate": 0.00013938107951206439, + "loss": 1.0097, + "step": 2027 + }, + { + "epoch": 0.3901758605140538, + "grad_norm": 0.2723666708594852, + "learning_rate": 0.00013932378709654547, + "loss": 1.0521, + "step": 2028 + }, + { + "epoch": 0.3903682549225913, + "grad_norm": 0.32178469621356853, + "learning_rate": 0.00013926647940812082, + "loss": 1.1396, + "step": 2029 + }, + { + "epoch": 0.39056064933112883, + "grad_norm": 0.3511065750811782, + "learning_rate": 0.000139209156469048, + "loss": 1.2104, + "step": 2030 + }, + { + "epoch": 0.3907530437396663, + "grad_norm": 0.3060315973089918, + "learning_rate": 0.00013915181830159061, + "loss": 1.1405, + "step": 2031 + }, + { + "epoch": 0.3909454381482038, + "grad_norm": 0.29461087808642006, + "learning_rate": 0.00013909446492801818, + "loss": 0.9589, + "step": 2032 + }, + { + "epoch": 0.39113783255674134, + "grad_norm": 0.28331014386052233, + "learning_rate": 0.00013903709637060605, + "loss": 1.1054, + "step": 2033 + }, + { + "epoch": 0.3913302269652788, + "grad_norm": 0.317799359866807, + "learning_rate": 0.00013897971265163544, + "loss": 1.0282, + "step": 2034 + }, + { + "epoch": 0.3915226213738163, + "grad_norm": 0.3500943680259453, + "learning_rate": 0.00013892231379339368, + "loss": 1.0858, + "step": 2035 + }, + { + "epoch": 0.39171501578235385, + "grad_norm": 0.3042963158307611, + "learning_rate": 0.00013886489981817374, + "loss": 1.0279, + "step": 2036 + }, + { + "epoch": 0.3919074101908913, + "grad_norm": 0.39582282683952036, + "learning_rate": 0.00013880747074827455, + "loss": 1.1546, + "step": 2037 + }, + { + "epoch": 0.39209980459942884, + "grad_norm": 0.29684025299805616, + "learning_rate": 0.00013875002660600086, + "loss": 1.1034, + "step": 2038 + }, + { + "epoch": 0.39229219900796636, + "grad_norm": 0.3360600449603767, + "learning_rate": 0.00013869256741366338, + "loss": 1.1364, + "step": 2039 + }, + { + "epoch": 0.3924845934165038, + "grad_norm": 0.2939770535631643, + "learning_rate": 0.00013863509319357858, + "loss": 1.0044, + "step": 2040 + }, + { + "epoch": 0.39267698782504135, + "grad_norm": 0.307410315423353, + "learning_rate": 0.00013857760396806875, + "loss": 1.0466, + "step": 2041 + }, + { + "epoch": 0.3928693822335788, + "grad_norm": 0.25265901953556863, + "learning_rate": 0.00013852009975946208, + "loss": 1.0157, + "step": 2042 + }, + { + "epoch": 0.39306177664211633, + "grad_norm": 0.24404275983052462, + "learning_rate": 0.00013846258059009253, + "loss": 1.066, + "step": 2043 + }, + { + "epoch": 0.39325417105065386, + "grad_norm": 0.3280647300136039, + "learning_rate": 0.0001384050464822999, + "loss": 1.101, + "step": 2044 + }, + { + "epoch": 0.3934465654591913, + "grad_norm": 0.3428250585144029, + "learning_rate": 0.0001383474974584297, + "loss": 1.0671, + "step": 2045 + }, + { + "epoch": 0.39363895986772884, + "grad_norm": 0.26512424373300364, + "learning_rate": 0.0001382899335408334, + "loss": 1.0308, + "step": 2046 + }, + { + "epoch": 0.39383135427626637, + "grad_norm": 0.4813952914573968, + "learning_rate": 0.00013823235475186814, + "loss": 1.0766, + "step": 2047 + }, + { + "epoch": 0.39402374868480383, + "grad_norm": 0.2894001090687884, + "learning_rate": 0.00013817476111389684, + "loss": 1.0691, + "step": 2048 + }, + { + "epoch": 0.39421614309334135, + "grad_norm": 0.3104660549042743, + "learning_rate": 0.00013811715264928825, + "loss": 1.1377, + "step": 2049 + }, + { + "epoch": 0.3944085375018789, + "grad_norm": 0.4111115117637331, + "learning_rate": 0.00013805952938041673, + "loss": 1.0415, + "step": 2050 + }, + { + "epoch": 0.39460093191041634, + "grad_norm": 0.3632355462202737, + "learning_rate": 0.00013800189132966257, + "loss": 1.1263, + "step": 2051 + }, + { + "epoch": 0.39479332631895386, + "grad_norm": 0.31626302678159074, + "learning_rate": 0.00013794423851941175, + "loss": 1.0975, + "step": 2052 + }, + { + "epoch": 0.39498572072749133, + "grad_norm": 0.2706800886709484, + "learning_rate": 0.00013788657097205592, + "loss": 1.1029, + "step": 2053 + }, + { + "epoch": 0.39517811513602885, + "grad_norm": 0.25725046537940016, + "learning_rate": 0.00013782888870999245, + "loss": 1.035, + "step": 2054 + }, + { + "epoch": 0.3953705095445664, + "grad_norm": 0.27834633660170705, + "learning_rate": 0.0001377711917556245, + "loss": 1.1839, + "step": 2055 + }, + { + "epoch": 0.39556290395310384, + "grad_norm": 0.42690148735716416, + "learning_rate": 0.00013771348013136097, + "loss": 1.1049, + "step": 2056 + }, + { + "epoch": 0.39575529836164136, + "grad_norm": 0.29549524956082585, + "learning_rate": 0.00013765575385961626, + "loss": 1.1342, + "step": 2057 + }, + { + "epoch": 0.3959476927701789, + "grad_norm": 0.24628504779021126, + "learning_rate": 0.0001375980129628107, + "loss": 1.1864, + "step": 2058 + }, + { + "epoch": 0.39614008717871635, + "grad_norm": 0.27842487613582945, + "learning_rate": 0.00013754025746337014, + "loss": 1.1022, + "step": 2059 + }, + { + "epoch": 0.39633248158725387, + "grad_norm": 0.28060256854335885, + "learning_rate": 0.00013748248738372615, + "loss": 1.0962, + "step": 2060 + }, + { + "epoch": 0.3965248759957914, + "grad_norm": 0.3190526824239674, + "learning_rate": 0.00013742470274631598, + "loss": 1.099, + "step": 2061 + }, + { + "epoch": 0.39671727040432886, + "grad_norm": 0.33726110717618146, + "learning_rate": 0.00013736690357358253, + "loss": 1.0868, + "step": 2062 + }, + { + "epoch": 0.3969096648128664, + "grad_norm": 0.3113877915706845, + "learning_rate": 0.00013730908988797427, + "loss": 1.0781, + "step": 2063 + }, + { + "epoch": 0.3971020592214039, + "grad_norm": 0.2738932258230844, + "learning_rate": 0.00013725126171194543, + "loss": 1.0563, + "step": 2064 + }, + { + "epoch": 0.39729445362994137, + "grad_norm": 0.3452939684872177, + "learning_rate": 0.0001371934190679558, + "loss": 1.1556, + "step": 2065 + }, + { + "epoch": 0.3974868480384789, + "grad_norm": 0.22981383382739284, + "learning_rate": 0.00013713556197847074, + "loss": 1.1705, + "step": 2066 + }, + { + "epoch": 0.39767924244701636, + "grad_norm": 0.26028591749442753, + "learning_rate": 0.00013707769046596136, + "loss": 0.9801, + "step": 2067 + }, + { + "epoch": 0.3978716368555539, + "grad_norm": 0.2918806737392978, + "learning_rate": 0.00013701980455290425, + "loss": 1.0513, + "step": 2068 + }, + { + "epoch": 0.3980640312640914, + "grad_norm": 0.3202449645307156, + "learning_rate": 0.00013696190426178162, + "loss": 1.2073, + "step": 2069 + }, + { + "epoch": 0.39825642567262887, + "grad_norm": 0.3810834203542579, + "learning_rate": 0.00013690398961508127, + "loss": 1.0588, + "step": 2070 + }, + { + "epoch": 0.3984488200811664, + "grad_norm": 0.27711890794106275, + "learning_rate": 0.00013684606063529663, + "loss": 1.1198, + "step": 2071 + }, + { + "epoch": 0.3986412144897039, + "grad_norm": 0.3212684362809215, + "learning_rate": 0.00013678811734492657, + "loss": 0.9831, + "step": 2072 + }, + { + "epoch": 0.3988336088982414, + "grad_norm": 0.30844321733987484, + "learning_rate": 0.00013673015976647568, + "loss": 1.1314, + "step": 2073 + }, + { + "epoch": 0.3990260033067789, + "grad_norm": 0.3166019414123627, + "learning_rate": 0.000136672187922454, + "loss": 1.0714, + "step": 2074 + }, + { + "epoch": 0.3992183977153164, + "grad_norm": 0.38948413266360854, + "learning_rate": 0.00013661420183537704, + "loss": 1.0322, + "step": 2075 + }, + { + "epoch": 0.3994107921238539, + "grad_norm": 0.3153684778434652, + "learning_rate": 0.00013655620152776604, + "loss": 1.1494, + "step": 2076 + }, + { + "epoch": 0.3996031865323914, + "grad_norm": 0.26359494773756625, + "learning_rate": 0.0001364981870221476, + "loss": 1.1031, + "step": 2077 + }, + { + "epoch": 0.39979558094092893, + "grad_norm": 0.2615007390625749, + "learning_rate": 0.00013644015834105388, + "loss": 1.1718, + "step": 2078 + }, + { + "epoch": 0.3999879753494664, + "grad_norm": 0.2770276163713552, + "learning_rate": 0.00013638211550702256, + "loss": 1.08, + "step": 2079 + }, + { + "epoch": 0.4001803697580039, + "grad_norm": 0.2965165584442199, + "learning_rate": 0.0001363240585425968, + "loss": 1.0811, + "step": 2080 + }, + { + "epoch": 0.4003727641665414, + "grad_norm": 0.3613295323674013, + "learning_rate": 0.0001362659874703253, + "loss": 1.0728, + "step": 2081 + }, + { + "epoch": 0.4005651585750789, + "grad_norm": 0.2935570637582771, + "learning_rate": 0.00013620790231276214, + "loss": 0.9881, + "step": 2082 + }, + { + "epoch": 0.4007575529836164, + "grad_norm": 0.32688553000468895, + "learning_rate": 0.00013614980309246692, + "loss": 1.0613, + "step": 2083 + }, + { + "epoch": 0.4009499473921539, + "grad_norm": 0.36265883972935936, + "learning_rate": 0.00013609168983200472, + "loss": 1.1742, + "step": 2084 + }, + { + "epoch": 0.4011423418006914, + "grad_norm": 0.3485944680988614, + "learning_rate": 0.00013603356255394614, + "loss": 1.1153, + "step": 2085 + }, + { + "epoch": 0.40133473620922894, + "grad_norm": 0.28433976686267926, + "learning_rate": 0.00013597542128086703, + "loss": 1.1619, + "step": 2086 + }, + { + "epoch": 0.4015271306177664, + "grad_norm": 0.2900059030464672, + "learning_rate": 0.00013591726603534885, + "loss": 1.0274, + "step": 2087 + }, + { + "epoch": 0.4017195250263039, + "grad_norm": 0.2886868131417094, + "learning_rate": 0.00013585909683997842, + "loss": 0.9886, + "step": 2088 + }, + { + "epoch": 0.40191191943484145, + "grad_norm": 0.3145366087777109, + "learning_rate": 0.00013580091371734798, + "loss": 1.18, + "step": 2089 + }, + { + "epoch": 0.4021043138433789, + "grad_norm": 0.29350833172164986, + "learning_rate": 0.00013574271669005518, + "loss": 1.0232, + "step": 2090 + }, + { + "epoch": 0.40229670825191644, + "grad_norm": 0.31273848002720694, + "learning_rate": 0.00013568450578070308, + "loss": 1.0609, + "step": 2091 + }, + { + "epoch": 0.40248910266045396, + "grad_norm": 0.3180627820275433, + "learning_rate": 0.00013562628101190013, + "loss": 1.1313, + "step": 2092 + }, + { + "epoch": 0.4026814970689914, + "grad_norm": 0.3302072039902513, + "learning_rate": 0.00013556804240626017, + "loss": 1.0451, + "step": 2093 + }, + { + "epoch": 0.40287389147752894, + "grad_norm": 0.30975872280331496, + "learning_rate": 0.0001355097899864024, + "loss": 1.1584, + "step": 2094 + }, + { + "epoch": 0.4030662858860664, + "grad_norm": 0.3376188769221276, + "learning_rate": 0.00013545152377495136, + "loss": 1.0282, + "step": 2095 + }, + { + "epoch": 0.40325868029460393, + "grad_norm": 0.35392391874138956, + "learning_rate": 0.00013539324379453698, + "loss": 1.1646, + "step": 2096 + }, + { + "epoch": 0.40345107470314145, + "grad_norm": 0.34748236877836686, + "learning_rate": 0.00013533495006779454, + "loss": 1.1262, + "step": 2097 + }, + { + "epoch": 0.4036434691116789, + "grad_norm": 0.3661899492574811, + "learning_rate": 0.00013527664261736471, + "loss": 1.102, + "step": 2098 + }, + { + "epoch": 0.40383586352021644, + "grad_norm": 0.32911861384737695, + "learning_rate": 0.00013521832146589334, + "loss": 1.1295, + "step": 2099 + }, + { + "epoch": 0.40402825792875396, + "grad_norm": 0.3158394153870188, + "learning_rate": 0.00013515998663603173, + "loss": 1.0142, + "step": 2100 + }, + { + "epoch": 0.40422065233729143, + "grad_norm": 0.3357591318688121, + "learning_rate": 0.00013510163815043646, + "loss": 1.0144, + "step": 2101 + }, + { + "epoch": 0.40441304674582895, + "grad_norm": 0.30856652318178, + "learning_rate": 0.00013504327603176944, + "loss": 1.0329, + "step": 2102 + }, + { + "epoch": 0.4046054411543665, + "grad_norm": 0.323932369333457, + "learning_rate": 0.0001349849003026978, + "loss": 1.0971, + "step": 2103 + }, + { + "epoch": 0.40479783556290394, + "grad_norm": 0.28865582851014965, + "learning_rate": 0.00013492651098589398, + "loss": 1.0984, + "step": 2104 + }, + { + "epoch": 0.40499022997144146, + "grad_norm": 0.25477100770261774, + "learning_rate": 0.0001348681081040358, + "loss": 1.1441, + "step": 2105 + }, + { + "epoch": 0.405182624379979, + "grad_norm": 0.32261191611519663, + "learning_rate": 0.0001348096916798062, + "loss": 1.0235, + "step": 2106 + }, + { + "epoch": 0.40537501878851645, + "grad_norm": 0.3260341592407033, + "learning_rate": 0.00013475126173589344, + "loss": 1.0906, + "step": 2107 + }, + { + "epoch": 0.40556741319705397, + "grad_norm": 0.4192487630739309, + "learning_rate": 0.00013469281829499107, + "loss": 1.1488, + "step": 2108 + }, + { + "epoch": 0.40575980760559144, + "grad_norm": 0.30952295609392133, + "learning_rate": 0.00013463436137979787, + "loss": 0.9873, + "step": 2109 + }, + { + "epoch": 0.40595220201412896, + "grad_norm": 0.3506024579995169, + "learning_rate": 0.00013457589101301774, + "loss": 1.1082, + "step": 2110 + }, + { + "epoch": 0.4061445964226665, + "grad_norm": 0.3095130709917231, + "learning_rate": 0.00013451740721736002, + "loss": 1.0673, + "step": 2111 + }, + { + "epoch": 0.40633699083120395, + "grad_norm": 0.3069659735134276, + "learning_rate": 0.00013445891001553906, + "loss": 1.1357, + "step": 2112 + }, + { + "epoch": 0.40652938523974147, + "grad_norm": 0.35186699354292944, + "learning_rate": 0.00013440039943027451, + "loss": 1.0993, + "step": 2113 + }, + { + "epoch": 0.406721779648279, + "grad_norm": 0.28273978795415033, + "learning_rate": 0.00013434187548429124, + "loss": 1.0602, + "step": 2114 + }, + { + "epoch": 0.40691417405681646, + "grad_norm": 0.33752178931361904, + "learning_rate": 0.00013428333820031922, + "loss": 1.0236, + "step": 2115 + }, + { + "epoch": 0.407106568465354, + "grad_norm": 0.32474596242932924, + "learning_rate": 0.00013422478760109373, + "loss": 1.0567, + "step": 2116 + }, + { + "epoch": 0.4072989628738915, + "grad_norm": 0.2654352102177828, + "learning_rate": 0.00013416622370935507, + "loss": 1.0653, + "step": 2117 + }, + { + "epoch": 0.40749135728242897, + "grad_norm": 0.3202465990290772, + "learning_rate": 0.00013410764654784885, + "loss": 1.0901, + "step": 2118 + }, + { + "epoch": 0.4076837516909665, + "grad_norm": 0.26980647741216307, + "learning_rate": 0.00013404905613932574, + "loss": 1.0907, + "step": 2119 + }, + { + "epoch": 0.40787614609950396, + "grad_norm": 0.2576526288710288, + "learning_rate": 0.00013399045250654152, + "loss": 1.056, + "step": 2120 + }, + { + "epoch": 0.4080685405080415, + "grad_norm": 0.3072191706241032, + "learning_rate": 0.00013393183567225725, + "loss": 1.1635, + "step": 2121 + }, + { + "epoch": 0.408260934916579, + "grad_norm": 0.2840373706199005, + "learning_rate": 0.000133873205659239, + "loss": 1.0244, + "step": 2122 + }, + { + "epoch": 0.40845332932511647, + "grad_norm": 0.3330726525517525, + "learning_rate": 0.000133814562490258, + "loss": 1.0733, + "step": 2123 + }, + { + "epoch": 0.408645723733654, + "grad_norm": 0.27620918895514857, + "learning_rate": 0.00013375590618809054, + "loss": 0.9979, + "step": 2124 + }, + { + "epoch": 0.4088381181421915, + "grad_norm": 0.30359819970538376, + "learning_rate": 0.00013369723677551814, + "loss": 1.1688, + "step": 2125 + }, + { + "epoch": 0.409030512550729, + "grad_norm": 0.3656900264728042, + "learning_rate": 0.00013363855427532724, + "loss": 1.0821, + "step": 2126 + }, + { + "epoch": 0.4092229069592665, + "grad_norm": 0.25745020351525233, + "learning_rate": 0.00013357985871030947, + "loss": 1.1077, + "step": 2127 + }, + { + "epoch": 0.409415301367804, + "grad_norm": 0.3120952803249164, + "learning_rate": 0.00013352115010326155, + "loss": 0.9936, + "step": 2128 + }, + { + "epoch": 0.4096076957763415, + "grad_norm": 0.245152501357541, + "learning_rate": 0.00013346242847698517, + "loss": 0.9881, + "step": 2129 + }, + { + "epoch": 0.409800090184879, + "grad_norm": 0.38143067763378435, + "learning_rate": 0.00013340369385428713, + "loss": 1.0871, + "step": 2130 + }, + { + "epoch": 0.40999248459341653, + "grad_norm": 0.2904231850863169, + "learning_rate": 0.00013334494625797936, + "loss": 1.0056, + "step": 2131 + }, + { + "epoch": 0.410184879001954, + "grad_norm": 0.31836978508375513, + "learning_rate": 0.00013328618571087868, + "loss": 1.088, + "step": 2132 + }, + { + "epoch": 0.4103772734104915, + "grad_norm": 0.29390994405276855, + "learning_rate": 0.000133227412235807, + "loss": 1.0907, + "step": 2133 + }, + { + "epoch": 0.410569667819029, + "grad_norm": 0.31321200852450837, + "learning_rate": 0.0001331686258555913, + "loss": 1.1064, + "step": 2134 + }, + { + "epoch": 0.4107620622275665, + "grad_norm": 0.3157078732535461, + "learning_rate": 0.00013310982659306352, + "loss": 1.1283, + "step": 2135 + }, + { + "epoch": 0.410954456636104, + "grad_norm": 0.3155262313137496, + "learning_rate": 0.00013305101447106063, + "loss": 1.152, + "step": 2136 + }, + { + "epoch": 0.4111468510446415, + "grad_norm": 0.2691636910706811, + "learning_rate": 0.00013299218951242457, + "loss": 1.0996, + "step": 2137 + }, + { + "epoch": 0.411339245453179, + "grad_norm": 0.2629553413783702, + "learning_rate": 0.00013293335174000226, + "loss": 1.0581, + "step": 2138 + }, + { + "epoch": 0.41153163986171654, + "grad_norm": 0.30474586519998226, + "learning_rate": 0.0001328745011766456, + "loss": 1.1373, + "step": 2139 + }, + { + "epoch": 0.411724034270254, + "grad_norm": 0.2756118623251005, + "learning_rate": 0.00013281563784521154, + "loss": 1.0728, + "step": 2140 + }, + { + "epoch": 0.4119164286787915, + "grad_norm": 0.29252663895150366, + "learning_rate": 0.00013275676176856185, + "loss": 1.0617, + "step": 2141 + }, + { + "epoch": 0.41210882308732905, + "grad_norm": 0.3100632327702502, + "learning_rate": 0.00013269787296956334, + "loss": 1.0852, + "step": 2142 + }, + { + "epoch": 0.4123012174958665, + "grad_norm": 0.3108105143767478, + "learning_rate": 0.00013263897147108776, + "loss": 1.0525, + "step": 2143 + }, + { + "epoch": 0.41249361190440403, + "grad_norm": 0.30276024043743577, + "learning_rate": 0.00013258005729601177, + "loss": 1.0607, + "step": 2144 + }, + { + "epoch": 0.41268600631294156, + "grad_norm": 0.2579126470279425, + "learning_rate": 0.00013252113046721692, + "loss": 1.0512, + "step": 2145 + }, + { + "epoch": 0.412878400721479, + "grad_norm": 0.30291625598100125, + "learning_rate": 0.00013246219100758973, + "loss": 1.1413, + "step": 2146 + }, + { + "epoch": 0.41307079513001654, + "grad_norm": 0.2533645600740764, + "learning_rate": 0.00013240323894002167, + "loss": 1.0573, + "step": 2147 + }, + { + "epoch": 0.413263189538554, + "grad_norm": 0.27270643604806544, + "learning_rate": 0.00013234427428740895, + "loss": 1.1247, + "step": 2148 + }, + { + "epoch": 0.41345558394709153, + "grad_norm": 0.34434617161862013, + "learning_rate": 0.00013228529707265277, + "loss": 1.1597, + "step": 2149 + }, + { + "epoch": 0.41364797835562905, + "grad_norm": 0.3370993543549416, + "learning_rate": 0.00013222630731865928, + "loss": 1.0698, + "step": 2150 + }, + { + "epoch": 0.4138403727641665, + "grad_norm": 0.2915587633582487, + "learning_rate": 0.00013216730504833935, + "loss": 1.1213, + "step": 2151 + }, + { + "epoch": 0.41403276717270404, + "grad_norm": 0.2888318846028617, + "learning_rate": 0.00013210829028460883, + "loss": 0.9937, + "step": 2152 + }, + { + "epoch": 0.41422516158124156, + "grad_norm": 0.2933541912930708, + "learning_rate": 0.00013204926305038832, + "loss": 1.0192, + "step": 2153 + }, + { + "epoch": 0.41441755598977903, + "grad_norm": 0.26981190053274784, + "learning_rate": 0.00013199022336860333, + "loss": 1.0394, + "step": 2154 + }, + { + "epoch": 0.41460995039831655, + "grad_norm": 0.30895432972898323, + "learning_rate": 0.00013193117126218424, + "loss": 1.0329, + "step": 2155 + }, + { + "epoch": 0.4148023448068541, + "grad_norm": 0.39618929841086603, + "learning_rate": 0.00013187210675406617, + "loss": 1.0169, + "step": 2156 + }, + { + "epoch": 0.41499473921539154, + "grad_norm": 0.33622540211306556, + "learning_rate": 0.0001318130298671891, + "loss": 1.0878, + "step": 2157 + }, + { + "epoch": 0.41518713362392906, + "grad_norm": 0.3489650635266243, + "learning_rate": 0.00013175394062449778, + "loss": 1.0964, + "step": 2158 + }, + { + "epoch": 0.4153795280324666, + "grad_norm": 0.3579289855885925, + "learning_rate": 0.00013169483904894183, + "loss": 1.1028, + "step": 2159 + }, + { + "epoch": 0.41557192244100405, + "grad_norm": 0.2952716836493573, + "learning_rate": 0.00013163572516347564, + "loss": 1.0469, + "step": 2160 + }, + { + "epoch": 0.41576431684954157, + "grad_norm": 0.2959757813840895, + "learning_rate": 0.00013157659899105833, + "loss": 1.0566, + "step": 2161 + }, + { + "epoch": 0.41595671125807904, + "grad_norm": 0.2896443710761163, + "learning_rate": 0.0001315174605546538, + "loss": 1.0327, + "step": 2162 + }, + { + "epoch": 0.41614910566661656, + "grad_norm": 0.3863883469690799, + "learning_rate": 0.0001314583098772308, + "loss": 1.0188, + "step": 2163 + }, + { + "epoch": 0.4163415000751541, + "grad_norm": 0.36560905326018345, + "learning_rate": 0.00013139914698176273, + "loss": 1.1049, + "step": 2164 + }, + { + "epoch": 0.41653389448369155, + "grad_norm": 0.30308140765113695, + "learning_rate": 0.00013133997189122777, + "loss": 1.1425, + "step": 2165 + }, + { + "epoch": 0.41672628889222907, + "grad_norm": 0.294607906034493, + "learning_rate": 0.00013128078462860886, + "loss": 1.0192, + "step": 2166 + }, + { + "epoch": 0.4169186833007666, + "grad_norm": 0.32690085651940637, + "learning_rate": 0.00013122158521689369, + "loss": 1.0381, + "step": 2167 + }, + { + "epoch": 0.41711107770930406, + "grad_norm": 0.3327760192353365, + "learning_rate": 0.00013116237367907454, + "loss": 1.1548, + "step": 2168 + }, + { + "epoch": 0.4173034721178416, + "grad_norm": 0.25986497352220245, + "learning_rate": 0.00013110315003814855, + "loss": 0.9944, + "step": 2169 + }, + { + "epoch": 0.4174958665263791, + "grad_norm": 0.27472485431242855, + "learning_rate": 0.00013104391431711747, + "loss": 1.0126, + "step": 2170 + }, + { + "epoch": 0.41768826093491657, + "grad_norm": 0.33484886386237683, + "learning_rate": 0.0001309846665389878, + "loss": 1.1238, + "step": 2171 + }, + { + "epoch": 0.4178806553434541, + "grad_norm": 0.34784220257763554, + "learning_rate": 0.0001309254067267707, + "loss": 1.0242, + "step": 2172 + }, + { + "epoch": 0.4180730497519916, + "grad_norm": 0.39300942378334563, + "learning_rate": 0.00013086613490348198, + "loss": 1.05, + "step": 2173 + }, + { + "epoch": 0.4182654441605291, + "grad_norm": 0.28142923000143394, + "learning_rate": 0.00013080685109214207, + "loss": 1.0786, + "step": 2174 + }, + { + "epoch": 0.4184578385690666, + "grad_norm": 0.2798105651501539, + "learning_rate": 0.00013074755531577626, + "loss": 1.0239, + "step": 2175 + }, + { + "epoch": 0.41865023297760406, + "grad_norm": 0.3211164673409022, + "learning_rate": 0.00013068824759741426, + "loss": 1.0228, + "step": 2176 + }, + { + "epoch": 0.4188426273861416, + "grad_norm": 0.3178939655693238, + "learning_rate": 0.0001306289279600905, + "loss": 1.0643, + "step": 2177 + }, + { + "epoch": 0.4190350217946791, + "grad_norm": 0.28732793536172013, + "learning_rate": 0.00013056959642684403, + "loss": 1.072, + "step": 2178 + }, + { + "epoch": 0.4192274162032166, + "grad_norm": 0.27632839963144906, + "learning_rate": 0.0001305102530207186, + "loss": 1.0273, + "step": 2179 + }, + { + "epoch": 0.4194198106117541, + "grad_norm": 0.27648238248740514, + "learning_rate": 0.00013045089776476246, + "loss": 1.038, + "step": 2180 + }, + { + "epoch": 0.4196122050202916, + "grad_norm": 0.32175572376340095, + "learning_rate": 0.0001303915306820285, + "loss": 1.1307, + "step": 2181 + }, + { + "epoch": 0.4198045994288291, + "grad_norm": 0.2710366545829388, + "learning_rate": 0.00013033215179557422, + "loss": 1.0766, + "step": 2182 + }, + { + "epoch": 0.4199969938373666, + "grad_norm": 0.3124927983215582, + "learning_rate": 0.0001302727611284617, + "loss": 1.0717, + "step": 2183 + }, + { + "epoch": 0.4201893882459041, + "grad_norm": 0.31881710467528623, + "learning_rate": 0.00013021335870375765, + "loss": 1.036, + "step": 2184 + }, + { + "epoch": 0.4203817826544416, + "grad_norm": 0.26895695690636184, + "learning_rate": 0.00013015394454453315, + "loss": 1.0754, + "step": 2185 + }, + { + "epoch": 0.4205741770629791, + "grad_norm": 0.3077667451973139, + "learning_rate": 0.00013009451867386412, + "loss": 1.0987, + "step": 2186 + }, + { + "epoch": 0.4207665714715166, + "grad_norm": 0.3970956054123128, + "learning_rate": 0.00013003508111483075, + "loss": 1.0918, + "step": 2187 + }, + { + "epoch": 0.4209589658800541, + "grad_norm": 0.2570532522133091, + "learning_rate": 0.000129975631890518, + "loss": 1.1186, + "step": 2188 + }, + { + "epoch": 0.4211513602885916, + "grad_norm": 0.2918391916324925, + "learning_rate": 0.00012991617102401523, + "loss": 1.1065, + "step": 2189 + }, + { + "epoch": 0.4213437546971291, + "grad_norm": 0.2923584323407831, + "learning_rate": 0.00012985669853841634, + "loss": 1.1537, + "step": 2190 + }, + { + "epoch": 0.4215361491056666, + "grad_norm": 0.34178013657291517, + "learning_rate": 0.0001297972144568198, + "loss": 1.0527, + "step": 2191 + }, + { + "epoch": 0.42172854351420413, + "grad_norm": 0.3430974974207724, + "learning_rate": 0.00012973771880232853, + "loss": 1.0659, + "step": 2192 + }, + { + "epoch": 0.4219209379227416, + "grad_norm": 0.29206749455891645, + "learning_rate": 0.00012967821159804994, + "loss": 1.1654, + "step": 2193 + }, + { + "epoch": 0.4221133323312791, + "grad_norm": 0.37926292406973056, + "learning_rate": 0.00012961869286709594, + "loss": 1.0792, + "step": 2194 + }, + { + "epoch": 0.42230572673981664, + "grad_norm": 0.3137026053850834, + "learning_rate": 0.0001295591626325829, + "loss": 1.0235, + "step": 2195 + }, + { + "epoch": 0.4224981211483541, + "grad_norm": 0.3351652435566971, + "learning_rate": 0.00012949962091763175, + "loss": 1.045, + "step": 2196 + }, + { + "epoch": 0.42269051555689163, + "grad_norm": 0.31202787933585263, + "learning_rate": 0.00012944006774536773, + "loss": 1.0712, + "step": 2197 + }, + { + "epoch": 0.42288290996542915, + "grad_norm": 0.30090767954904274, + "learning_rate": 0.00012938050313892062, + "loss": 1.0942, + "step": 2198 + }, + { + "epoch": 0.4230753043739666, + "grad_norm": 0.2947107674452307, + "learning_rate": 0.00012932092712142468, + "loss": 1.0435, + "step": 2199 + }, + { + "epoch": 0.42326769878250414, + "grad_norm": 0.3211457650056529, + "learning_rate": 0.00012926133971601848, + "loss": 1.0717, + "step": 2200 + }, + { + "epoch": 0.4234600931910416, + "grad_norm": 0.3864264924279052, + "learning_rate": 0.00012920174094584514, + "loss": 1.0121, + "step": 2201 + }, + { + "epoch": 0.42365248759957913, + "grad_norm": 0.2881779388433144, + "learning_rate": 0.00012914213083405212, + "loss": 1.0607, + "step": 2202 + }, + { + "epoch": 0.42384488200811665, + "grad_norm": 0.3191534191694544, + "learning_rate": 0.00012908250940379124, + "loss": 1.0203, + "step": 2203 + }, + { + "epoch": 0.4240372764166541, + "grad_norm": 0.2507204338262169, + "learning_rate": 0.00012902287667821883, + "loss": 1.0637, + "step": 2204 + }, + { + "epoch": 0.42422967082519164, + "grad_norm": 0.3312245656267057, + "learning_rate": 0.0001289632326804956, + "loss": 1.0996, + "step": 2205 + }, + { + "epoch": 0.42442206523372916, + "grad_norm": 0.47611959534223114, + "learning_rate": 0.0001289035774337865, + "loss": 1.1233, + "step": 2206 + }, + { + "epoch": 0.42461445964226663, + "grad_norm": 0.3415957553888742, + "learning_rate": 0.00012884391096126097, + "loss": 1.1295, + "step": 2207 + }, + { + "epoch": 0.42480685405080415, + "grad_norm": 0.2644265852993455, + "learning_rate": 0.00012878423328609281, + "loss": 1.1342, + "step": 2208 + }, + { + "epoch": 0.42499924845934167, + "grad_norm": 0.25000667995646575, + "learning_rate": 0.00012872454443146013, + "loss": 1.0312, + "step": 2209 + }, + { + "epoch": 0.42519164286787914, + "grad_norm": 0.2782220891682969, + "learning_rate": 0.00012866484442054538, + "loss": 1.2075, + "step": 2210 + }, + { + "epoch": 0.42538403727641666, + "grad_norm": 0.29977025592498524, + "learning_rate": 0.00012860513327653535, + "loss": 1.1097, + "step": 2211 + }, + { + "epoch": 0.4255764316849542, + "grad_norm": 0.2785145450124637, + "learning_rate": 0.0001285454110226212, + "loss": 1.0123, + "step": 2212 + }, + { + "epoch": 0.42576882609349165, + "grad_norm": 0.2928195632179793, + "learning_rate": 0.00012848567768199831, + "loss": 1.099, + "step": 2213 + }, + { + "epoch": 0.42596122050202917, + "grad_norm": 0.2753655286622883, + "learning_rate": 0.00012842593327786646, + "loss": 1.0469, + "step": 2214 + }, + { + "epoch": 0.42615361491056664, + "grad_norm": 0.29230091117996665, + "learning_rate": 0.0001283661778334297, + "loss": 1.0303, + "step": 2215 + }, + { + "epoch": 0.42634600931910416, + "grad_norm": 0.34605414720153976, + "learning_rate": 0.00012830641137189628, + "loss": 1.0997, + "step": 2216 + }, + { + "epoch": 0.4265384037276417, + "grad_norm": 0.33858539393511694, + "learning_rate": 0.0001282466339164789, + "loss": 1.0509, + "step": 2217 + }, + { + "epoch": 0.42673079813617915, + "grad_norm": 0.28442241589571476, + "learning_rate": 0.00012818684549039438, + "loss": 0.9711, + "step": 2218 + }, + { + "epoch": 0.42692319254471667, + "grad_norm": 0.36171167729167664, + "learning_rate": 0.00012812704611686387, + "loss": 1.092, + "step": 2219 + }, + { + "epoch": 0.4271155869532542, + "grad_norm": 0.27227430750327375, + "learning_rate": 0.00012806723581911274, + "loss": 1.0676, + "step": 2220 + }, + { + "epoch": 0.42730798136179166, + "grad_norm": 0.2895761499410537, + "learning_rate": 0.00012800741462037064, + "loss": 1.0051, + "step": 2221 + }, + { + "epoch": 0.4275003757703292, + "grad_norm": 0.3211312693193202, + "learning_rate": 0.00012794758254387146, + "loss": 1.1371, + "step": 2222 + }, + { + "epoch": 0.4276927701788667, + "grad_norm": 0.36817870262920066, + "learning_rate": 0.00012788773961285323, + "loss": 1.1009, + "step": 2223 + }, + { + "epoch": 0.42788516458740417, + "grad_norm": 0.3125801670143624, + "learning_rate": 0.00012782788585055828, + "loss": 1.1223, + "step": 2224 + }, + { + "epoch": 0.4280775589959417, + "grad_norm": 0.40226080161062644, + "learning_rate": 0.00012776802128023316, + "loss": 1.0634, + "step": 2225 + }, + { + "epoch": 0.4282699534044792, + "grad_norm": 0.31390023413359386, + "learning_rate": 0.0001277081459251285, + "loss": 1.0165, + "step": 2226 + }, + { + "epoch": 0.4284623478130167, + "grad_norm": 0.28459209722056183, + "learning_rate": 0.0001276482598084993, + "loss": 1.0337, + "step": 2227 + }, + { + "epoch": 0.4286547422215542, + "grad_norm": 0.27392770914379844, + "learning_rate": 0.00012758836295360455, + "loss": 1.0162, + "step": 2228 + }, + { + "epoch": 0.42884713663009166, + "grad_norm": 0.25426950612596344, + "learning_rate": 0.0001275284553837075, + "loss": 1.0915, + "step": 2229 + }, + { + "epoch": 0.4290395310386292, + "grad_norm": 0.3277360810777147, + "learning_rate": 0.00012746853712207568, + "loss": 1.0708, + "step": 2230 + }, + { + "epoch": 0.4292319254471667, + "grad_norm": 0.28727196793326465, + "learning_rate": 0.00012740860819198047, + "loss": 1.077, + "step": 2231 + }, + { + "epoch": 0.4294243198557042, + "grad_norm": 0.35653124835448086, + "learning_rate": 0.0001273486686166977, + "loss": 1.0594, + "step": 2232 + }, + { + "epoch": 0.4296167142642417, + "grad_norm": 0.323352696347872, + "learning_rate": 0.00012728871841950718, + "loss": 1.1271, + "step": 2233 + }, + { + "epoch": 0.4298091086727792, + "grad_norm": 0.28579166378157655, + "learning_rate": 0.00012722875762369288, + "loss": 1.0781, + "step": 2234 + }, + { + "epoch": 0.4300015030813167, + "grad_norm": 0.3066729640014907, + "learning_rate": 0.00012716878625254288, + "loss": 1.0445, + "step": 2235 + }, + { + "epoch": 0.4301938974898542, + "grad_norm": 0.27755961078088953, + "learning_rate": 0.00012710880432934932, + "loss": 1.1186, + "step": 2236 + }, + { + "epoch": 0.4303862918983917, + "grad_norm": 0.3035864349115272, + "learning_rate": 0.0001270488118774086, + "loss": 1.1121, + "step": 2237 + }, + { + "epoch": 0.4305786863069292, + "grad_norm": 0.3069508843058301, + "learning_rate": 0.000126988808920021, + "loss": 1.0509, + "step": 2238 + }, + { + "epoch": 0.4307710807154667, + "grad_norm": 0.32117458606384475, + "learning_rate": 0.000126928795480491, + "loss": 1.0658, + "step": 2239 + }, + { + "epoch": 0.43096347512400424, + "grad_norm": 0.27043276401306465, + "learning_rate": 0.00012686877158212713, + "loss": 1.1762, + "step": 2240 + }, + { + "epoch": 0.4311558695325417, + "grad_norm": 0.2657502421700463, + "learning_rate": 0.00012680873724824198, + "loss": 1.1385, + "step": 2241 + }, + { + "epoch": 0.4313482639410792, + "grad_norm": 0.3114313159268892, + "learning_rate": 0.00012674869250215223, + "loss": 1.0139, + "step": 2242 + }, + { + "epoch": 0.4315406583496167, + "grad_norm": 0.2555846734850437, + "learning_rate": 0.00012668863736717856, + "loss": 1.1475, + "step": 2243 + }, + { + "epoch": 0.4317330527581542, + "grad_norm": 0.46484680610905427, + "learning_rate": 0.00012662857186664559, + "loss": 1.1227, + "step": 2244 + }, + { + "epoch": 0.43192544716669173, + "grad_norm": 0.252117588429946, + "learning_rate": 0.0001265684960238822, + "loss": 1.0652, + "step": 2245 + }, + { + "epoch": 0.4321178415752292, + "grad_norm": 0.3076639643450356, + "learning_rate": 0.0001265084098622211, + "loss": 1.0734, + "step": 2246 + }, + { + "epoch": 0.4323102359837667, + "grad_norm": 0.3018468849588766, + "learning_rate": 0.00012644831340499906, + "loss": 0.9336, + "step": 2247 + }, + { + "epoch": 0.43250263039230424, + "grad_norm": 0.3231050162300759, + "learning_rate": 0.00012638820667555683, + "loss": 0.9482, + "step": 2248 + }, + { + "epoch": 0.4326950248008417, + "grad_norm": 0.3196641442863389, + "learning_rate": 0.00012632808969723926, + "loss": 1.0116, + "step": 2249 + }, + { + "epoch": 0.43288741920937923, + "grad_norm": 0.3421544817356094, + "learning_rate": 0.000126267962493395, + "loss": 1.0357, + "step": 2250 + }, + { + "epoch": 0.43307981361791675, + "grad_norm": 0.24498736165941662, + "learning_rate": 0.0001262078250873768, + "loss": 1.0457, + "step": 2251 + }, + { + "epoch": 0.4332722080264542, + "grad_norm": 0.32777409315260503, + "learning_rate": 0.00012614767750254128, + "loss": 1.03, + "step": 2252 + }, + { + "epoch": 0.43346460243499174, + "grad_norm": 0.3166004917987926, + "learning_rate": 0.00012608751976224915, + "loss": 1.1015, + "step": 2253 + }, + { + "epoch": 0.4336569968435292, + "grad_norm": 0.34767394458823764, + "learning_rate": 0.00012602735188986498, + "loss": 1.1033, + "step": 2254 + }, + { + "epoch": 0.43384939125206673, + "grad_norm": 0.3214150449244712, + "learning_rate": 0.0001259671739087572, + "loss": 1.0629, + "step": 2255 + }, + { + "epoch": 0.43404178566060425, + "grad_norm": 0.37944127412277756, + "learning_rate": 0.00012590698584229835, + "loss": 1.0673, + "step": 2256 + }, + { + "epoch": 0.4342341800691417, + "grad_norm": 0.3065780828408335, + "learning_rate": 0.00012584678771386466, + "loss": 1.0475, + "step": 2257 + }, + { + "epoch": 0.43442657447767924, + "grad_norm": 0.32283017123688185, + "learning_rate": 0.0001257865795468365, + "loss": 1.1555, + "step": 2258 + }, + { + "epoch": 0.43461896888621676, + "grad_norm": 0.24658988097817017, + "learning_rate": 0.000125726361364598, + "loss": 1.0975, + "step": 2259 + }, + { + "epoch": 0.4348113632947542, + "grad_norm": 0.31873477475517975, + "learning_rate": 0.00012566613319053713, + "loss": 1.0741, + "step": 2260 + }, + { + "epoch": 0.43500375770329175, + "grad_norm": 0.30204281269961986, + "learning_rate": 0.00012560589504804592, + "loss": 1.0209, + "step": 2261 + }, + { + "epoch": 0.43519615211182927, + "grad_norm": 0.3447200061536308, + "learning_rate": 0.00012554564696052011, + "loss": 1.1049, + "step": 2262 + }, + { + "epoch": 0.43538854652036674, + "grad_norm": 0.2833123678050803, + "learning_rate": 0.00012548538895135942, + "loss": 1.1173, + "step": 2263 + }, + { + "epoch": 0.43558094092890426, + "grad_norm": 0.31331049698206237, + "learning_rate": 0.00012542512104396728, + "loss": 0.992, + "step": 2264 + }, + { + "epoch": 0.4357733353374418, + "grad_norm": 0.2669592456599427, + "learning_rate": 0.00012536484326175113, + "loss": 1.0341, + "step": 2265 + }, + { + "epoch": 0.43596572974597925, + "grad_norm": 0.331217984626546, + "learning_rate": 0.00012530455562812214, + "loss": 0.9767, + "step": 2266 + }, + { + "epoch": 0.43615812415451677, + "grad_norm": 0.35354982838335797, + "learning_rate": 0.0001252442581664953, + "loss": 1.1244, + "step": 2267 + }, + { + "epoch": 0.43635051856305423, + "grad_norm": 0.2606668891950679, + "learning_rate": 0.0001251839509002895, + "loss": 1.1665, + "step": 2268 + }, + { + "epoch": 0.43654291297159176, + "grad_norm": 0.2870406769574763, + "learning_rate": 0.00012512363385292738, + "loss": 1.0324, + "step": 2269 + }, + { + "epoch": 0.4367353073801293, + "grad_norm": 0.3217397222066845, + "learning_rate": 0.00012506330704783534, + "loss": 1.1431, + "step": 2270 + }, + { + "epoch": 0.43692770178866674, + "grad_norm": 0.2657327436360576, + "learning_rate": 0.00012500297050844366, + "loss": 1.0666, + "step": 2271 + }, + { + "epoch": 0.43712009619720427, + "grad_norm": 0.36698107717755923, + "learning_rate": 0.00012494262425818636, + "loss": 0.9727, + "step": 2272 + }, + { + "epoch": 0.4373124906057418, + "grad_norm": 0.3092521592366532, + "learning_rate": 0.00012488226832050117, + "loss": 1.1505, + "step": 2273 + }, + { + "epoch": 0.43750488501427925, + "grad_norm": 0.30366464728963216, + "learning_rate": 0.00012482190271882973, + "loss": 1.1118, + "step": 2274 + }, + { + "epoch": 0.4376972794228168, + "grad_norm": 0.35101809678043827, + "learning_rate": 0.00012476152747661727, + "loss": 1.1599, + "step": 2275 + }, + { + "epoch": 0.4378896738313543, + "grad_norm": 0.3078381708471384, + "learning_rate": 0.00012470114261731288, + "loss": 1.0856, + "step": 2276 + }, + { + "epoch": 0.43808206823989176, + "grad_norm": 0.28536122652740276, + "learning_rate": 0.0001246407481643693, + "loss": 1.0407, + "step": 2277 + }, + { + "epoch": 0.4382744626484293, + "grad_norm": 0.31125943751135476, + "learning_rate": 0.0001245803441412431, + "loss": 1.0388, + "step": 2278 + }, + { + "epoch": 0.4384668570569668, + "grad_norm": 0.3637167595517368, + "learning_rate": 0.00012451993057139443, + "loss": 1.0565, + "step": 2279 + }, + { + "epoch": 0.4386592514655043, + "grad_norm": 0.3042062479369799, + "learning_rate": 0.0001244595074782873, + "loss": 0.9961, + "step": 2280 + }, + { + "epoch": 0.4388516458740418, + "grad_norm": 0.32445297057755873, + "learning_rate": 0.00012439907488538934, + "loss": 1.0426, + "step": 2281 + }, + { + "epoch": 0.43904404028257926, + "grad_norm": 0.26772674511517314, + "learning_rate": 0.0001243386328161718, + "loss": 1.1199, + "step": 2282 + }, + { + "epoch": 0.4392364346911168, + "grad_norm": 0.2925346654302083, + "learning_rate": 0.00012427818129410976, + "loss": 1.1124, + "step": 2283 + }, + { + "epoch": 0.4394288290996543, + "grad_norm": 0.33678423980102185, + "learning_rate": 0.00012421772034268186, + "loss": 1.0548, + "step": 2284 + }, + { + "epoch": 0.43962122350819177, + "grad_norm": 0.27495217329774596, + "learning_rate": 0.00012415724998537041, + "loss": 0.9635, + "step": 2285 + }, + { + "epoch": 0.4398136179167293, + "grad_norm": 0.33566196919935115, + "learning_rate": 0.00012409677024566144, + "loss": 1.0872, + "step": 2286 + }, + { + "epoch": 0.4400060123252668, + "grad_norm": 0.3159059612233199, + "learning_rate": 0.00012403628114704459, + "loss": 1.0789, + "step": 2287 + }, + { + "epoch": 0.4401984067338043, + "grad_norm": 0.37142460638117897, + "learning_rate": 0.0001239757827130131, + "loss": 1.0503, + "step": 2288 + }, + { + "epoch": 0.4403908011423418, + "grad_norm": 0.283863797533487, + "learning_rate": 0.00012391527496706387, + "loss": 1.0236, + "step": 2289 + }, + { + "epoch": 0.4405831955508793, + "grad_norm": 0.28568554351328374, + "learning_rate": 0.00012385475793269744, + "loss": 1.0133, + "step": 2290 + }, + { + "epoch": 0.4407755899594168, + "grad_norm": 0.24887157270948593, + "learning_rate": 0.0001237942316334179, + "loss": 0.9984, + "step": 2291 + }, + { + "epoch": 0.4409679843679543, + "grad_norm": 0.3216530668952049, + "learning_rate": 0.00012373369609273296, + "loss": 1.1074, + "step": 2292 + }, + { + "epoch": 0.44116037877649183, + "grad_norm": 0.2760162632127163, + "learning_rate": 0.00012367315133415396, + "loss": 1.1407, + "step": 2293 + }, + { + "epoch": 0.4413527731850293, + "grad_norm": 0.2853925600872062, + "learning_rate": 0.00012361259738119575, + "loss": 1.0916, + "step": 2294 + }, + { + "epoch": 0.4415451675935668, + "grad_norm": 0.26758157504914154, + "learning_rate": 0.00012355203425737683, + "loss": 1.0632, + "step": 2295 + }, + { + "epoch": 0.4417375620021043, + "grad_norm": 0.2867192509295806, + "learning_rate": 0.00012349146198621918, + "loss": 1.0587, + "step": 2296 + }, + { + "epoch": 0.4419299564106418, + "grad_norm": 0.2985482590447692, + "learning_rate": 0.0001234308805912484, + "loss": 1.0867, + "step": 2297 + }, + { + "epoch": 0.44212235081917933, + "grad_norm": 0.30150695817957207, + "learning_rate": 0.00012337029009599357, + "loss": 1.0863, + "step": 2298 + }, + { + "epoch": 0.4423147452277168, + "grad_norm": 0.25723595041437636, + "learning_rate": 0.00012330969052398735, + "loss": 1.1078, + "step": 2299 + }, + { + "epoch": 0.4425071396362543, + "grad_norm": 0.32335571975394967, + "learning_rate": 0.00012324908189876595, + "loss": 1.0748, + "step": 2300 + }, + { + "epoch": 0.44269953404479184, + "grad_norm": 0.2736508636651387, + "learning_rate": 0.00012318846424386908, + "loss": 1.0912, + "step": 2301 + }, + { + "epoch": 0.4428919284533293, + "grad_norm": 0.4178067887573691, + "learning_rate": 0.0001231278375828398, + "loss": 1.0361, + "step": 2302 + }, + { + "epoch": 0.44308432286186683, + "grad_norm": 0.32715779755370955, + "learning_rate": 0.00012306720193922498, + "loss": 1.1229, + "step": 2303 + }, + { + "epoch": 0.44327671727040435, + "grad_norm": 0.2592005503252114, + "learning_rate": 0.00012300655733657473, + "loss": 1.069, + "step": 2304 + }, + { + "epoch": 0.4434691116789418, + "grad_norm": 0.29054898565665616, + "learning_rate": 0.0001229459037984427, + "loss": 0.9621, + "step": 2305 + }, + { + "epoch": 0.44366150608747934, + "grad_norm": 0.30149562517088924, + "learning_rate": 0.000122885241348386, + "loss": 1.0445, + "step": 2306 + }, + { + "epoch": 0.44385390049601686, + "grad_norm": 0.2891102344273007, + "learning_rate": 0.0001228245700099653, + "loss": 1.0208, + "step": 2307 + }, + { + "epoch": 0.44404629490455433, + "grad_norm": 0.35548198313909596, + "learning_rate": 0.00012276388980674466, + "loss": 1.0507, + "step": 2308 + }, + { + "epoch": 0.44423868931309185, + "grad_norm": 0.31786970443848317, + "learning_rate": 0.0001227032007622915, + "loss": 1.1075, + "step": 2309 + }, + { + "epoch": 0.4444310837216293, + "grad_norm": 0.2716463124923323, + "learning_rate": 0.00012264250290017673, + "loss": 1.0947, + "step": 2310 + }, + { + "epoch": 0.44462347813016684, + "grad_norm": 0.310421586742775, + "learning_rate": 0.00012258179624397478, + "loss": 1.0247, + "step": 2311 + }, + { + "epoch": 0.44481587253870436, + "grad_norm": 0.3407677014945833, + "learning_rate": 0.00012252108081726336, + "loss": 1.1464, + "step": 2312 + }, + { + "epoch": 0.4450082669472418, + "grad_norm": 0.34577595553051954, + "learning_rate": 0.0001224603566436237, + "loss": 1.13, + "step": 2313 + }, + { + "epoch": 0.44520066135577935, + "grad_norm": 0.30951874977039784, + "learning_rate": 0.00012239962374664028, + "loss": 1.1493, + "step": 2314 + }, + { + "epoch": 0.44539305576431687, + "grad_norm": 0.3144828648010736, + "learning_rate": 0.00012233888214990113, + "loss": 1.0235, + "step": 2315 + }, + { + "epoch": 0.44558545017285434, + "grad_norm": 0.3111912066543634, + "learning_rate": 0.00012227813187699757, + "loss": 1.0199, + "step": 2316 + }, + { + "epoch": 0.44577784458139186, + "grad_norm": 0.41224042941558886, + "learning_rate": 0.00012221737295152428, + "loss": 1.0213, + "step": 2317 + }, + { + "epoch": 0.4459702389899294, + "grad_norm": 0.29260845775090655, + "learning_rate": 0.00012215660539707936, + "loss": 1.0794, + "step": 2318 + }, + { + "epoch": 0.44616263339846685, + "grad_norm": 0.24297002760316577, + "learning_rate": 0.00012209582923726424, + "loss": 1.1566, + "step": 2319 + }, + { + "epoch": 0.44635502780700437, + "grad_norm": 0.30903627654247995, + "learning_rate": 0.00012203504449568362, + "loss": 1.1673, + "step": 2320 + }, + { + "epoch": 0.44654742221554183, + "grad_norm": 0.28261616514108767, + "learning_rate": 0.00012197425119594563, + "loss": 1.1039, + "step": 2321 + }, + { + "epoch": 0.44673981662407936, + "grad_norm": 0.2736733990665549, + "learning_rate": 0.00012191344936166169, + "loss": 1.2263, + "step": 2322 + }, + { + "epoch": 0.4469322110326169, + "grad_norm": 0.33792510219654087, + "learning_rate": 0.00012185263901644653, + "loss": 1.0791, + "step": 2323 + }, + { + "epoch": 0.44712460544115434, + "grad_norm": 0.3178971140769497, + "learning_rate": 0.00012179182018391819, + "loss": 0.9916, + "step": 2324 + }, + { + "epoch": 0.44731699984969187, + "grad_norm": 0.3007494206127297, + "learning_rate": 0.00012173099288769798, + "loss": 1.0695, + "step": 2325 + }, + { + "epoch": 0.4475093942582294, + "grad_norm": 0.26162239528255915, + "learning_rate": 0.00012167015715141057, + "loss": 1.1611, + "step": 2326 + }, + { + "epoch": 0.44770178866676685, + "grad_norm": 0.28619737465668155, + "learning_rate": 0.00012160931299868378, + "loss": 1.1205, + "step": 2327 + }, + { + "epoch": 0.4478941830753044, + "grad_norm": 0.29636098922637666, + "learning_rate": 0.00012154846045314884, + "loss": 1.055, + "step": 2328 + }, + { + "epoch": 0.4480865774838419, + "grad_norm": 0.2884154527928569, + "learning_rate": 0.0001214875995384402, + "loss": 0.9509, + "step": 2329 + }, + { + "epoch": 0.44827897189237936, + "grad_norm": 0.33756811442876733, + "learning_rate": 0.00012142673027819549, + "loss": 1.0143, + "step": 2330 + }, + { + "epoch": 0.4484713663009169, + "grad_norm": 0.37983895454116856, + "learning_rate": 0.00012136585269605558, + "loss": 1.0938, + "step": 2331 + }, + { + "epoch": 0.4486637607094544, + "grad_norm": 0.2792182801510256, + "learning_rate": 0.00012130496681566475, + "loss": 1.056, + "step": 2332 + }, + { + "epoch": 0.4488561551179919, + "grad_norm": 0.2754923642654477, + "learning_rate": 0.00012124407266067028, + "loss": 0.998, + "step": 2333 + }, + { + "epoch": 0.4490485495265294, + "grad_norm": 0.28017988863404325, + "learning_rate": 0.00012118317025472279, + "loss": 1.0686, + "step": 2334 + }, + { + "epoch": 0.44924094393506686, + "grad_norm": 0.32288983285963274, + "learning_rate": 0.00012112225962147605, + "loss": 1.1215, + "step": 2335 + }, + { + "epoch": 0.4494333383436044, + "grad_norm": 0.26517265302977716, + "learning_rate": 0.00012106134078458709, + "loss": 1.1099, + "step": 2336 + }, + { + "epoch": 0.4496257327521419, + "grad_norm": 0.2984857294534992, + "learning_rate": 0.00012100041376771605, + "loss": 1.1385, + "step": 2337 + }, + { + "epoch": 0.44981812716067937, + "grad_norm": 0.26019049655232596, + "learning_rate": 0.0001209394785945263, + "loss": 1.0233, + "step": 2338 + }, + { + "epoch": 0.4500105215692169, + "grad_norm": 0.3154873324289275, + "learning_rate": 0.00012087853528868431, + "loss": 1.0183, + "step": 2339 + }, + { + "epoch": 0.4502029159777544, + "grad_norm": 0.28033044241149513, + "learning_rate": 0.00012081758387385982, + "loss": 1.1335, + "step": 2340 + }, + { + "epoch": 0.4503953103862919, + "grad_norm": 0.26562508763124576, + "learning_rate": 0.00012075662437372566, + "loss": 1.0227, + "step": 2341 + }, + { + "epoch": 0.4505877047948294, + "grad_norm": 0.2853151766333661, + "learning_rate": 0.00012069565681195776, + "loss": 1.097, + "step": 2342 + }, + { + "epoch": 0.4507800992033669, + "grad_norm": 0.2656617061596788, + "learning_rate": 0.0001206346812122352, + "loss": 1.0976, + "step": 2343 + }, + { + "epoch": 0.4509724936119044, + "grad_norm": 0.31942213283619686, + "learning_rate": 0.00012057369759824024, + "loss": 1.0778, + "step": 2344 + }, + { + "epoch": 0.4511648880204419, + "grad_norm": 0.28674403721840247, + "learning_rate": 0.00012051270599365824, + "loss": 1.0341, + "step": 2345 + }, + { + "epoch": 0.45135728242897943, + "grad_norm": 0.27215019991413036, + "learning_rate": 0.00012045170642217755, + "loss": 1.0346, + "step": 2346 + }, + { + "epoch": 0.4515496768375169, + "grad_norm": 0.3690805921299747, + "learning_rate": 0.00012039069890748978, + "loss": 1.1343, + "step": 2347 + }, + { + "epoch": 0.4517420712460544, + "grad_norm": 0.26925325933876115, + "learning_rate": 0.00012032968347328953, + "loss": 1.0873, + "step": 2348 + }, + { + "epoch": 0.4519344656545919, + "grad_norm": 0.32482317575765907, + "learning_rate": 0.00012026866014327444, + "loss": 1.1025, + "step": 2349 + }, + { + "epoch": 0.4521268600631294, + "grad_norm": 0.287112199346636, + "learning_rate": 0.00012020762894114535, + "loss": 1.0765, + "step": 2350 + }, + { + "epoch": 0.45231925447166693, + "grad_norm": 0.28727869303811654, + "learning_rate": 0.00012014658989060599, + "loss": 1.1223, + "step": 2351 + }, + { + "epoch": 0.4525116488802044, + "grad_norm": 0.35428310771798416, + "learning_rate": 0.00012008554301536328, + "loss": 1.1729, + "step": 2352 + }, + { + "epoch": 0.4527040432887419, + "grad_norm": 0.34666994090451514, + "learning_rate": 0.00012002448833912711, + "loss": 1.063, + "step": 2353 + }, + { + "epoch": 0.45289643769727944, + "grad_norm": 0.2694700834559087, + "learning_rate": 0.00011996342588561043, + "loss": 1.07, + "step": 2354 + }, + { + "epoch": 0.4530888321058169, + "grad_norm": 0.2547057156353204, + "learning_rate": 0.00011990235567852917, + "loss": 1.0682, + "step": 2355 + }, + { + "epoch": 0.45328122651435443, + "grad_norm": 0.31560348872341953, + "learning_rate": 0.00011984127774160225, + "loss": 1.1713, + "step": 2356 + }, + { + "epoch": 0.45347362092289195, + "grad_norm": 0.33723859691101654, + "learning_rate": 0.00011978019209855174, + "loss": 1.1122, + "step": 2357 + }, + { + "epoch": 0.4536660153314294, + "grad_norm": 0.3465027052553532, + "learning_rate": 0.00011971909877310253, + "loss": 1.1081, + "step": 2358 + }, + { + "epoch": 0.45385840973996694, + "grad_norm": 0.34054655641772036, + "learning_rate": 0.00011965799778898256, + "loss": 1.062, + "step": 2359 + }, + { + "epoch": 0.45405080414850446, + "grad_norm": 0.36220274477650694, + "learning_rate": 0.00011959688916992277, + "loss": 0.9796, + "step": 2360 + }, + { + "epoch": 0.4542431985570419, + "grad_norm": 0.3261600758421371, + "learning_rate": 0.00011953577293965708, + "loss": 1.1029, + "step": 2361 + }, + { + "epoch": 0.45443559296557945, + "grad_norm": 0.3322167143751733, + "learning_rate": 0.00011947464912192227, + "loss": 1.0256, + "step": 2362 + }, + { + "epoch": 0.4546279873741169, + "grad_norm": 0.2933363106680554, + "learning_rate": 0.00011941351774045815, + "loss": 1.0768, + "step": 2363 + }, + { + "epoch": 0.45482038178265444, + "grad_norm": 0.35459185899438167, + "learning_rate": 0.00011935237881900743, + "loss": 1.0544, + "step": 2364 + }, + { + "epoch": 0.45501277619119196, + "grad_norm": 0.2766898676397248, + "learning_rate": 0.00011929123238131579, + "loss": 1.0767, + "step": 2365 + }, + { + "epoch": 0.4552051705997294, + "grad_norm": 0.3094980044974744, + "learning_rate": 0.00011923007845113176, + "loss": 1.085, + "step": 2366 + }, + { + "epoch": 0.45539756500826695, + "grad_norm": 0.40095158661505026, + "learning_rate": 0.00011916891705220687, + "loss": 1.1046, + "step": 2367 + }, + { + "epoch": 0.45558995941680447, + "grad_norm": 0.318977039667831, + "learning_rate": 0.00011910774820829549, + "loss": 1.0942, + "step": 2368 + }, + { + "epoch": 0.45578235382534193, + "grad_norm": 0.3072838892086311, + "learning_rate": 0.00011904657194315485, + "loss": 1.0379, + "step": 2369 + }, + { + "epoch": 0.45597474823387946, + "grad_norm": 0.36936268066809186, + "learning_rate": 0.00011898538828054517, + "loss": 1.1097, + "step": 2370 + }, + { + "epoch": 0.456167142642417, + "grad_norm": 0.33227338222367825, + "learning_rate": 0.00011892419724422946, + "loss": 1.0905, + "step": 2371 + }, + { + "epoch": 0.45635953705095444, + "grad_norm": 0.3276191470664485, + "learning_rate": 0.00011886299885797356, + "loss": 1.1359, + "step": 2372 + }, + { + "epoch": 0.45655193145949197, + "grad_norm": 0.36487282188100045, + "learning_rate": 0.00011880179314554629, + "loss": 1.0813, + "step": 2373 + }, + { + "epoch": 0.45674432586802943, + "grad_norm": 0.2645058478472509, + "learning_rate": 0.00011874058013071923, + "loss": 1.138, + "step": 2374 + }, + { + "epoch": 0.45693672027656695, + "grad_norm": 0.34007458867204715, + "learning_rate": 0.00011867935983726677, + "loss": 1.0535, + "step": 2375 + }, + { + "epoch": 0.4571291146851045, + "grad_norm": 0.2850488740324468, + "learning_rate": 0.0001186181322889662, + "loss": 1.1171, + "step": 2376 + }, + { + "epoch": 0.45732150909364194, + "grad_norm": 0.2432625894589558, + "learning_rate": 0.00011855689750959757, + "loss": 1.0781, + "step": 2377 + }, + { + "epoch": 0.45751390350217946, + "grad_norm": 0.29386832480620884, + "learning_rate": 0.0001184956555229438, + "loss": 1.15, + "step": 2378 + }, + { + "epoch": 0.457706297910717, + "grad_norm": 0.26862782200848306, + "learning_rate": 0.00011843440635279055, + "loss": 1.1044, + "step": 2379 + }, + { + "epoch": 0.45789869231925445, + "grad_norm": 0.42155578944834304, + "learning_rate": 0.00011837315002292628, + "loss": 1.0668, + "step": 2380 + }, + { + "epoch": 0.458091086727792, + "grad_norm": 0.3021973754570017, + "learning_rate": 0.00011831188655714225, + "loss": 1.0575, + "step": 2381 + }, + { + "epoch": 0.4582834811363295, + "grad_norm": 0.2629087916374506, + "learning_rate": 0.0001182506159792325, + "loss": 1.1012, + "step": 2382 + }, + { + "epoch": 0.45847587554486696, + "grad_norm": 0.31147553186013294, + "learning_rate": 0.00011818933831299382, + "loss": 1.0495, + "step": 2383 + }, + { + "epoch": 0.4586682699534045, + "grad_norm": 0.28110777583345414, + "learning_rate": 0.00011812805358222571, + "loss": 1.0646, + "step": 2384 + }, + { + "epoch": 0.458860664361942, + "grad_norm": 0.3158535523354326, + "learning_rate": 0.00011806676181073048, + "loss": 1.1176, + "step": 2385 + }, + { + "epoch": 0.45905305877047947, + "grad_norm": 0.30322657882936893, + "learning_rate": 0.00011800546302231316, + "loss": 1.0483, + "step": 2386 + }, + { + "epoch": 0.459245453179017, + "grad_norm": 0.31842168511961255, + "learning_rate": 0.00011794415724078147, + "loss": 1.1516, + "step": 2387 + }, + { + "epoch": 0.45943784758755446, + "grad_norm": 0.31712597940783244, + "learning_rate": 0.00011788284448994586, + "loss": 1.0873, + "step": 2388 + }, + { + "epoch": 0.459630241996092, + "grad_norm": 0.30256333423176934, + "learning_rate": 0.00011782152479361957, + "loss": 1.0781, + "step": 2389 + }, + { + "epoch": 0.4598226364046295, + "grad_norm": 0.29955739856902935, + "learning_rate": 0.00011776019817561834, + "loss": 0.9694, + "step": 2390 + }, + { + "epoch": 0.46001503081316697, + "grad_norm": 0.281558904867827, + "learning_rate": 0.00011769886465976086, + "loss": 1.0423, + "step": 2391 + }, + { + "epoch": 0.4602074252217045, + "grad_norm": 0.3431549084874546, + "learning_rate": 0.00011763752426986823, + "loss": 1.0413, + "step": 2392 + }, + { + "epoch": 0.460399819630242, + "grad_norm": 0.2779997009445957, + "learning_rate": 0.00011757617702976442, + "loss": 1.1059, + "step": 2393 + }, + { + "epoch": 0.4605922140387795, + "grad_norm": 0.2818652570094477, + "learning_rate": 0.000117514822963276, + "loss": 1.1298, + "step": 2394 + }, + { + "epoch": 0.460784608447317, + "grad_norm": 0.26916626681457934, + "learning_rate": 0.00011745346209423216, + "loss": 1.0699, + "step": 2395 + }, + { + "epoch": 0.4609770028558545, + "grad_norm": 0.2742812769718791, + "learning_rate": 0.00011739209444646477, + "loss": 1.0416, + "step": 2396 + }, + { + "epoch": 0.461169397264392, + "grad_norm": 0.33114910849519996, + "learning_rate": 0.00011733072004380827, + "loss": 1.1922, + "step": 2397 + }, + { + "epoch": 0.4613617916729295, + "grad_norm": 0.3338207337392457, + "learning_rate": 0.00011726933891009983, + "loss": 1.123, + "step": 2398 + }, + { + "epoch": 0.46155418608146703, + "grad_norm": 0.3715629482370197, + "learning_rate": 0.00011720795106917917, + "loss": 1.104, + "step": 2399 + }, + { + "epoch": 0.4617465804900045, + "grad_norm": 0.26564206366393767, + "learning_rate": 0.00011714655654488859, + "loss": 1.1455, + "step": 2400 + }, + { + "epoch": 0.461938974898542, + "grad_norm": 0.25886338899516165, + "learning_rate": 0.00011708515536107299, + "loss": 1.0442, + "step": 2401 + }, + { + "epoch": 0.4621313693070795, + "grad_norm": 0.33739270473479455, + "learning_rate": 0.00011702374754157997, + "loss": 0.9982, + "step": 2402 + }, + { + "epoch": 0.462323763715617, + "grad_norm": 0.3516541452146191, + "learning_rate": 0.00011696233311025957, + "loss": 1.1059, + "step": 2403 + }, + { + "epoch": 0.46251615812415453, + "grad_norm": 0.28091670825325904, + "learning_rate": 0.00011690091209096441, + "loss": 1.0874, + "step": 2404 + }, + { + "epoch": 0.462708552532692, + "grad_norm": 0.44757795831758546, + "learning_rate": 0.00011683948450754976, + "loss": 1.034, + "step": 2405 + }, + { + "epoch": 0.4629009469412295, + "grad_norm": 0.28681423657596056, + "learning_rate": 0.00011677805038387337, + "loss": 1.0373, + "step": 2406 + }, + { + "epoch": 0.46309334134976704, + "grad_norm": 0.2703859948163023, + "learning_rate": 0.00011671660974379555, + "loss": 1.0819, + "step": 2407 + }, + { + "epoch": 0.4632857357583045, + "grad_norm": 0.3163916228926271, + "learning_rate": 0.00011665516261117912, + "loss": 1.1011, + "step": 2408 + }, + { + "epoch": 0.46347813016684203, + "grad_norm": 0.27350316299605615, + "learning_rate": 0.00011659370900988947, + "loss": 0.9953, + "step": 2409 + }, + { + "epoch": 0.46367052457537955, + "grad_norm": 0.29232531072109774, + "learning_rate": 0.00011653224896379439, + "loss": 1.0311, + "step": 2410 + }, + { + "epoch": 0.463862918983917, + "grad_norm": 0.36498789740764503, + "learning_rate": 0.0001164707824967644, + "loss": 0.9932, + "step": 2411 + }, + { + "epoch": 0.46405531339245454, + "grad_norm": 0.3140372558749361, + "learning_rate": 0.00011640930963267225, + "loss": 1.1021, + "step": 2412 + }, + { + "epoch": 0.46424770780099206, + "grad_norm": 0.2826486998073901, + "learning_rate": 0.00011634783039539328, + "loss": 1.0499, + "step": 2413 + }, + { + "epoch": 0.4644401022095295, + "grad_norm": 0.28672878644276156, + "learning_rate": 0.0001162863448088054, + "loss": 1.0147, + "step": 2414 + }, + { + "epoch": 0.46463249661806705, + "grad_norm": 0.3462903380271429, + "learning_rate": 0.00011622485289678886, + "loss": 1.1023, + "step": 2415 + }, + { + "epoch": 0.4648248910266045, + "grad_norm": 0.3308961791445071, + "learning_rate": 0.00011616335468322641, + "loss": 1.0974, + "step": 2416 + }, + { + "epoch": 0.46501728543514204, + "grad_norm": 0.32146135483703064, + "learning_rate": 0.00011610185019200323, + "loss": 0.9963, + "step": 2417 + }, + { + "epoch": 0.46520967984367956, + "grad_norm": 0.30784864548896584, + "learning_rate": 0.00011604033944700701, + "loss": 1.0494, + "step": 2418 + }, + { + "epoch": 0.465402074252217, + "grad_norm": 0.3481653940663827, + "learning_rate": 0.00011597882247212776, + "loss": 0.9466, + "step": 2419 + }, + { + "epoch": 0.46559446866075455, + "grad_norm": 0.2670947755386483, + "learning_rate": 0.000115917299291258, + "loss": 1.0268, + "step": 2420 + }, + { + "epoch": 0.46578686306929207, + "grad_norm": 0.3241922940225946, + "learning_rate": 0.00011585576992829261, + "loss": 1.1111, + "step": 2421 + }, + { + "epoch": 0.46597925747782953, + "grad_norm": 0.37070086289730036, + "learning_rate": 0.00011579423440712886, + "loss": 1.0777, + "step": 2422 + }, + { + "epoch": 0.46617165188636706, + "grad_norm": 0.29925540305956333, + "learning_rate": 0.00011573269275166652, + "loss": 1.1428, + "step": 2423 + }, + { + "epoch": 0.4663640462949046, + "grad_norm": 0.32103609892224627, + "learning_rate": 0.00011567114498580758, + "loss": 1.0564, + "step": 2424 + }, + { + "epoch": 0.46655644070344204, + "grad_norm": 0.31649134667432066, + "learning_rate": 0.00011560959113345649, + "loss": 1.1181, + "step": 2425 + }, + { + "epoch": 0.46674883511197957, + "grad_norm": 0.3495530455616566, + "learning_rate": 0.00011554803121852005, + "loss": 1.0189, + "step": 2426 + }, + { + "epoch": 0.4669412295205171, + "grad_norm": 0.2918508303469182, + "learning_rate": 0.00011548646526490749, + "loss": 1.0675, + "step": 2427 + }, + { + "epoch": 0.46713362392905455, + "grad_norm": 0.2988520983363615, + "learning_rate": 0.00011542489329653024, + "loss": 1.0899, + "step": 2428 + }, + { + "epoch": 0.4673260183375921, + "grad_norm": 0.2576049541547143, + "learning_rate": 0.00011536331533730221, + "loss": 1.0882, + "step": 2429 + }, + { + "epoch": 0.46751841274612954, + "grad_norm": 0.26004376842000726, + "learning_rate": 0.00011530173141113947, + "loss": 1.0869, + "step": 2430 + }, + { + "epoch": 0.46771080715466706, + "grad_norm": 0.27800349692250365, + "learning_rate": 0.00011524014154196063, + "loss": 1.0189, + "step": 2431 + }, + { + "epoch": 0.4679032015632046, + "grad_norm": 0.36082387811606265, + "learning_rate": 0.00011517854575368642, + "loss": 1.0089, + "step": 2432 + }, + { + "epoch": 0.46809559597174205, + "grad_norm": 0.2948309937878554, + "learning_rate": 0.00011511694407023994, + "loss": 1.0338, + "step": 2433 + }, + { + "epoch": 0.4682879903802796, + "grad_norm": 0.3286568445270373, + "learning_rate": 0.00011505533651554654, + "loss": 1.012, + "step": 2434 + }, + { + "epoch": 0.4684803847888171, + "grad_norm": 0.3167569352769985, + "learning_rate": 0.00011499372311353398, + "loss": 1.0574, + "step": 2435 + }, + { + "epoch": 0.46867277919735456, + "grad_norm": 0.2639196979482772, + "learning_rate": 0.0001149321038881321, + "loss": 1.0875, + "step": 2436 + }, + { + "epoch": 0.4688651736058921, + "grad_norm": 0.2661156202677314, + "learning_rate": 0.00011487047886327313, + "loss": 1.1757, + "step": 2437 + }, + { + "epoch": 0.4690575680144296, + "grad_norm": 0.3009152148444373, + "learning_rate": 0.00011480884806289152, + "loss": 1.0668, + "step": 2438 + }, + { + "epoch": 0.46924996242296707, + "grad_norm": 0.37544920914394553, + "learning_rate": 0.00011474721151092396, + "loss": 1.0562, + "step": 2439 + }, + { + "epoch": 0.4694423568315046, + "grad_norm": 0.3013747633863066, + "learning_rate": 0.00011468556923130942, + "loss": 1.1155, + "step": 2440 + }, + { + "epoch": 0.46963475124004206, + "grad_norm": 0.2916423561005298, + "learning_rate": 0.000114623921247989, + "loss": 1.0454, + "step": 2441 + }, + { + "epoch": 0.4698271456485796, + "grad_norm": 0.30209189554148286, + "learning_rate": 0.00011456226758490602, + "loss": 1.1009, + "step": 2442 + }, + { + "epoch": 0.4700195400571171, + "grad_norm": 0.4444882427931337, + "learning_rate": 0.00011450060826600617, + "loss": 1.0391, + "step": 2443 + }, + { + "epoch": 0.47021193446565457, + "grad_norm": 0.2856392935289551, + "learning_rate": 0.00011443894331523717, + "loss": 1.069, + "step": 2444 + }, + { + "epoch": 0.4704043288741921, + "grad_norm": 0.2929797319254082, + "learning_rate": 0.00011437727275654892, + "loss": 1.1212, + "step": 2445 + }, + { + "epoch": 0.4705967232827296, + "grad_norm": 0.3388439031139151, + "learning_rate": 0.00011431559661389362, + "loss": 1.0363, + "step": 2446 + }, + { + "epoch": 0.4707891176912671, + "grad_norm": 0.42997177745217463, + "learning_rate": 0.00011425391491122556, + "loss": 1.0631, + "step": 2447 + }, + { + "epoch": 0.4709815120998046, + "grad_norm": 0.26047216024754893, + "learning_rate": 0.00011419222767250119, + "loss": 1.1293, + "step": 2448 + }, + { + "epoch": 0.4711739065083421, + "grad_norm": 0.3067675308083437, + "learning_rate": 0.00011413053492167916, + "loss": 0.9995, + "step": 2449 + }, + { + "epoch": 0.4713663009168796, + "grad_norm": 0.34392169665096517, + "learning_rate": 0.00011406883668272014, + "loss": 1.1126, + "step": 2450 + }, + { + "epoch": 0.4715586953254171, + "grad_norm": 0.28771277526109607, + "learning_rate": 0.0001140071329795871, + "loss": 1.0986, + "step": 2451 + }, + { + "epoch": 0.47175108973395463, + "grad_norm": 0.2966084911052895, + "learning_rate": 0.000113945423836245, + "loss": 1.1005, + "step": 2452 + }, + { + "epoch": 0.4719434841424921, + "grad_norm": 0.32011544620504834, + "learning_rate": 0.00011388370927666101, + "loss": 0.9713, + "step": 2453 + }, + { + "epoch": 0.4721358785510296, + "grad_norm": 0.2906261196653228, + "learning_rate": 0.00011382198932480428, + "loss": 1.0719, + "step": 2454 + }, + { + "epoch": 0.4723282729595671, + "grad_norm": 0.30714586501668545, + "learning_rate": 0.00011376026400464617, + "loss": 1.0281, + "step": 2455 + }, + { + "epoch": 0.4725206673681046, + "grad_norm": 0.3078207126511925, + "learning_rate": 0.00011369853334016009, + "loss": 1.096, + "step": 2456 + }, + { + "epoch": 0.47271306177664213, + "grad_norm": 0.32017475890493713, + "learning_rate": 0.0001136367973553215, + "loss": 1.0625, + "step": 2457 + }, + { + "epoch": 0.4729054561851796, + "grad_norm": 0.32482432949187245, + "learning_rate": 0.00011357505607410797, + "loss": 1.1041, + "step": 2458 + }, + { + "epoch": 0.4730978505937171, + "grad_norm": 0.26549248121833174, + "learning_rate": 0.00011351330952049908, + "loss": 1.1516, + "step": 2459 + }, + { + "epoch": 0.47329024500225464, + "grad_norm": 0.2624649604038464, + "learning_rate": 0.00011345155771847646, + "loss": 1.0213, + "step": 2460 + }, + { + "epoch": 0.4734826394107921, + "grad_norm": 0.30952729581419347, + "learning_rate": 0.00011338980069202387, + "loss": 1.0981, + "step": 2461 + }, + { + "epoch": 0.4736750338193296, + "grad_norm": 0.37943549674014154, + "learning_rate": 0.00011332803846512697, + "loss": 1.0728, + "step": 2462 + }, + { + "epoch": 0.47386742822786715, + "grad_norm": 0.28536607623304083, + "learning_rate": 0.00011326627106177347, + "loss": 1.0961, + "step": 2463 + }, + { + "epoch": 0.4740598226364046, + "grad_norm": 0.3067005267882926, + "learning_rate": 0.0001132044985059532, + "loss": 1.0373, + "step": 2464 + }, + { + "epoch": 0.47425221704494214, + "grad_norm": 0.3204502995861548, + "learning_rate": 0.00011314272082165784, + "loss": 0.9995, + "step": 2465 + }, + { + "epoch": 0.47444461145347966, + "grad_norm": 0.3291924589533158, + "learning_rate": 0.00011308093803288118, + "loss": 1.0895, + "step": 2466 + }, + { + "epoch": 0.4746370058620171, + "grad_norm": 0.3417270692438031, + "learning_rate": 0.00011301915016361891, + "loss": 1.1515, + "step": 2467 + }, + { + "epoch": 0.47482940027055465, + "grad_norm": 0.2985479459112958, + "learning_rate": 0.0001129573572378687, + "loss": 1.1156, + "step": 2468 + }, + { + "epoch": 0.4750217946790921, + "grad_norm": 0.2672237204246719, + "learning_rate": 0.00011289555927963031, + "loss": 1.0562, + "step": 2469 + }, + { + "epoch": 0.47521418908762963, + "grad_norm": 0.2782865846539132, + "learning_rate": 0.00011283375631290527, + "loss": 1.0884, + "step": 2470 + }, + { + "epoch": 0.47540658349616716, + "grad_norm": 0.29891313505133355, + "learning_rate": 0.00011277194836169714, + "loss": 1.1497, + "step": 2471 + }, + { + "epoch": 0.4755989779047046, + "grad_norm": 0.2939161275307998, + "learning_rate": 0.00011271013545001143, + "loss": 1.1317, + "step": 2472 + }, + { + "epoch": 0.47579137231324214, + "grad_norm": 0.2935173681906, + "learning_rate": 0.00011264831760185562, + "loss": 1.1136, + "step": 2473 + }, + { + "epoch": 0.47598376672177967, + "grad_norm": 0.30430278052563287, + "learning_rate": 0.00011258649484123896, + "loss": 1.0256, + "step": 2474 + }, + { + "epoch": 0.47617616113031713, + "grad_norm": 0.327969229597276, + "learning_rate": 0.00011252466719217273, + "loss": 1.0752, + "step": 2475 + }, + { + "epoch": 0.47636855553885465, + "grad_norm": 0.3070924360453279, + "learning_rate": 0.00011246283467867012, + "loss": 1.0928, + "step": 2476 + }, + { + "epoch": 0.4765609499473922, + "grad_norm": 0.26050029622275817, + "learning_rate": 0.00011240099732474611, + "loss": 1.0053, + "step": 2477 + }, + { + "epoch": 0.47675334435592964, + "grad_norm": 0.31549357223842334, + "learning_rate": 0.00011233915515441764, + "loss": 1.0787, + "step": 2478 + }, + { + "epoch": 0.47694573876446716, + "grad_norm": 0.31213070358897843, + "learning_rate": 0.00011227730819170348, + "loss": 1.1011, + "step": 2479 + }, + { + "epoch": 0.4771381331730047, + "grad_norm": 0.30214759097239374, + "learning_rate": 0.0001122154564606243, + "loss": 1.0055, + "step": 2480 + }, + { + "epoch": 0.47733052758154215, + "grad_norm": 0.3315365730666589, + "learning_rate": 0.0001121535999852026, + "loss": 1.0846, + "step": 2481 + }, + { + "epoch": 0.4775229219900797, + "grad_norm": 0.3027198839201215, + "learning_rate": 0.0001120917387894627, + "loss": 1.1211, + "step": 2482 + }, + { + "epoch": 0.47771531639861714, + "grad_norm": 0.29139373354457415, + "learning_rate": 0.00011202987289743079, + "loss": 0.9735, + "step": 2483 + }, + { + "epoch": 0.47790771080715466, + "grad_norm": 0.28622281575804387, + "learning_rate": 0.00011196800233313487, + "loss": 1.0428, + "step": 2484 + }, + { + "epoch": 0.4781001052156922, + "grad_norm": 0.3083370931275903, + "learning_rate": 0.00011190612712060476, + "loss": 1.0225, + "step": 2485 + }, + { + "epoch": 0.47829249962422965, + "grad_norm": 0.3453898740103891, + "learning_rate": 0.00011184424728387204, + "loss": 1.1167, + "step": 2486 + }, + { + "epoch": 0.47848489403276717, + "grad_norm": 0.2809754425406349, + "learning_rate": 0.00011178236284697017, + "loss": 1.0966, + "step": 2487 + }, + { + "epoch": 0.4786772884413047, + "grad_norm": 0.33603653127564614, + "learning_rate": 0.00011172047383393434, + "loss": 0.9848, + "step": 2488 + }, + { + "epoch": 0.47886968284984216, + "grad_norm": 0.32530594572132543, + "learning_rate": 0.00011165858026880152, + "loss": 1.1285, + "step": 2489 + }, + { + "epoch": 0.4790620772583797, + "grad_norm": 0.28477375710938924, + "learning_rate": 0.00011159668217561048, + "loss": 0.9873, + "step": 2490 + }, + { + "epoch": 0.4792544716669172, + "grad_norm": 0.26850697287763875, + "learning_rate": 0.0001115347795784017, + "loss": 1.1048, + "step": 2491 + }, + { + "epoch": 0.47944686607545467, + "grad_norm": 0.32263744208401285, + "learning_rate": 0.00011147287250121745, + "loss": 1.0445, + "step": 2492 + }, + { + "epoch": 0.4796392604839922, + "grad_norm": 0.3527327693555526, + "learning_rate": 0.00011141096096810173, + "loss": 1.085, + "step": 2493 + }, + { + "epoch": 0.4798316548925297, + "grad_norm": 0.304616016772181, + "learning_rate": 0.00011134904500310028, + "loss": 1.0607, + "step": 2494 + }, + { + "epoch": 0.4800240493010672, + "grad_norm": 0.30320494796454556, + "learning_rate": 0.00011128712463026048, + "loss": 1.1149, + "step": 2495 + }, + { + "epoch": 0.4802164437096047, + "grad_norm": 0.2887639606992245, + "learning_rate": 0.00011122519987363155, + "loss": 1.0105, + "step": 2496 + }, + { + "epoch": 0.48040883811814217, + "grad_norm": 0.34694566403040783, + "learning_rate": 0.00011116327075726435, + "loss": 1.1541, + "step": 2497 + }, + { + "epoch": 0.4806012325266797, + "grad_norm": 0.249702910513426, + "learning_rate": 0.00011110133730521142, + "loss": 1.0795, + "step": 2498 + }, + { + "epoch": 0.4807936269352172, + "grad_norm": 0.2852476586916126, + "learning_rate": 0.000111039399541527, + "loss": 1.0455, + "step": 2499 + }, + { + "epoch": 0.4809860213437547, + "grad_norm": 0.3477661351396813, + "learning_rate": 0.000110977457490267, + "loss": 1.0762, + "step": 2500 + }, + { + "epoch": 0.4811784157522922, + "grad_norm": 0.29912510631231753, + "learning_rate": 0.000110915511175489, + "loss": 1.0767, + "step": 2501 + }, + { + "epoch": 0.4813708101608297, + "grad_norm": 0.31702866915248035, + "learning_rate": 0.00011085356062125225, + "loss": 1.0021, + "step": 2502 + }, + { + "epoch": 0.4815632045693672, + "grad_norm": 0.28837950745195445, + "learning_rate": 0.00011079160585161759, + "loss": 1.1517, + "step": 2503 + }, + { + "epoch": 0.4817555989779047, + "grad_norm": 0.271426384992999, + "learning_rate": 0.00011072964689064759, + "loss": 0.999, + "step": 2504 + }, + { + "epoch": 0.48194799338644223, + "grad_norm": 0.32204520349360083, + "learning_rate": 0.0001106676837624064, + "loss": 1.0527, + "step": 2505 + }, + { + "epoch": 0.4821403877949797, + "grad_norm": 0.31410915148149016, + "learning_rate": 0.00011060571649095972, + "loss": 1.031, + "step": 2506 + }, + { + "epoch": 0.4823327822035172, + "grad_norm": 0.27931797715272966, + "learning_rate": 0.000110543745100375, + "loss": 1.1246, + "step": 2507 + }, + { + "epoch": 0.4825251766120547, + "grad_norm": 0.2752501976050457, + "learning_rate": 0.00011048176961472114, + "loss": 1.1451, + "step": 2508 + }, + { + "epoch": 0.4827175710205922, + "grad_norm": 0.2747532456619337, + "learning_rate": 0.00011041979005806875, + "loss": 1.0734, + "step": 2509 + }, + { + "epoch": 0.48290996542912973, + "grad_norm": 0.2342734006645817, + "learning_rate": 0.00011035780645449001, + "loss": 1.0454, + "step": 2510 + }, + { + "epoch": 0.4831023598376672, + "grad_norm": 0.32742955385460815, + "learning_rate": 0.00011029581882805857, + "loss": 0.9748, + "step": 2511 + }, + { + "epoch": 0.4832947542462047, + "grad_norm": 0.37946687327215983, + "learning_rate": 0.00011023382720284972, + "loss": 1.0628, + "step": 2512 + }, + { + "epoch": 0.48348714865474224, + "grad_norm": 0.2732650447123761, + "learning_rate": 0.00011017183160294033, + "loss": 1.0259, + "step": 2513 + }, + { + "epoch": 0.4836795430632797, + "grad_norm": 0.2729585455966021, + "learning_rate": 0.00011010983205240877, + "loss": 1.0414, + "step": 2514 + }, + { + "epoch": 0.4838719374718172, + "grad_norm": 0.32655314274408553, + "learning_rate": 0.00011004782857533489, + "loss": 1.1244, + "step": 2515 + }, + { + "epoch": 0.48406433188035475, + "grad_norm": 0.23700820491114358, + "learning_rate": 0.00010998582119580019, + "loss": 1.1172, + "step": 2516 + }, + { + "epoch": 0.4842567262888922, + "grad_norm": 0.3928723737777664, + "learning_rate": 0.00010992380993788762, + "loss": 1.0799, + "step": 2517 + }, + { + "epoch": 0.48444912069742974, + "grad_norm": 0.2912192721303968, + "learning_rate": 0.00010986179482568161, + "loss": 1.1404, + "step": 2518 + }, + { + "epoch": 0.48464151510596726, + "grad_norm": 0.2978549220616921, + "learning_rate": 0.00010979977588326815, + "loss": 1.1675, + "step": 2519 + }, + { + "epoch": 0.4848339095145047, + "grad_norm": 0.2926954785297245, + "learning_rate": 0.00010973775313473464, + "loss": 1.1438, + "step": 2520 + }, + { + "epoch": 0.48502630392304225, + "grad_norm": 0.36000964866389756, + "learning_rate": 0.00010967572660417, + "loss": 1.038, + "step": 2521 + }, + { + "epoch": 0.4852186983315797, + "grad_norm": 0.37910792556434847, + "learning_rate": 0.00010961369631566467, + "loss": 1.0837, + "step": 2522 + }, + { + "epoch": 0.48541109274011723, + "grad_norm": 0.32581771747503674, + "learning_rate": 0.00010955166229331048, + "loss": 1.0686, + "step": 2523 + }, + { + "epoch": 0.48560348714865476, + "grad_norm": 0.40931583909959296, + "learning_rate": 0.00010948962456120068, + "loss": 1.1053, + "step": 2524 + }, + { + "epoch": 0.4857958815571922, + "grad_norm": 0.2777818201392416, + "learning_rate": 0.00010942758314343006, + "loss": 1.1047, + "step": 2525 + }, + { + "epoch": 0.48598827596572974, + "grad_norm": 0.2723539189762568, + "learning_rate": 0.00010936553806409482, + "loss": 0.9886, + "step": 2526 + }, + { + "epoch": 0.48618067037426727, + "grad_norm": 0.29465289512987186, + "learning_rate": 0.00010930348934729248, + "loss": 1.1171, + "step": 2527 + }, + { + "epoch": 0.48637306478280473, + "grad_norm": 0.34597539212264616, + "learning_rate": 0.00010924143701712211, + "loss": 1.0029, + "step": 2528 + }, + { + "epoch": 0.48656545919134225, + "grad_norm": 0.2786773823360309, + "learning_rate": 0.00010917938109768404, + "loss": 1.0654, + "step": 2529 + }, + { + "epoch": 0.4867578535998798, + "grad_norm": 0.35869410693668063, + "learning_rate": 0.00010911732161308015, + "loss": 1.0327, + "step": 2530 + }, + { + "epoch": 0.48695024800841724, + "grad_norm": 0.30607208498100946, + "learning_rate": 0.00010905525858741364, + "loss": 1.173, + "step": 2531 + }, + { + "epoch": 0.48714264241695476, + "grad_norm": 0.2887048883746236, + "learning_rate": 0.000108993192044789, + "loss": 1.1132, + "step": 2532 + }, + { + "epoch": 0.4873350368254923, + "grad_norm": 0.2722475202006041, + "learning_rate": 0.00010893112200931219, + "loss": 1.0333, + "step": 2533 + }, + { + "epoch": 0.48752743123402975, + "grad_norm": 0.3061443115824018, + "learning_rate": 0.00010886904850509052, + "loss": 1.0755, + "step": 2534 + }, + { + "epoch": 0.4877198256425673, + "grad_norm": 0.28704696881583397, + "learning_rate": 0.00010880697155623263, + "loss": 1.0567, + "step": 2535 + }, + { + "epoch": 0.48791222005110474, + "grad_norm": 0.26753181810345267, + "learning_rate": 0.00010874489118684845, + "loss": 1.1283, + "step": 2536 + }, + { + "epoch": 0.48810461445964226, + "grad_norm": 0.3972482521246636, + "learning_rate": 0.00010868280742104928, + "loss": 1.1804, + "step": 2537 + }, + { + "epoch": 0.4882970088681798, + "grad_norm": 0.4312688126899029, + "learning_rate": 0.00010862072028294776, + "loss": 1.1485, + "step": 2538 + }, + { + "epoch": 0.48848940327671725, + "grad_norm": 0.2605607369109513, + "learning_rate": 0.00010855862979665787, + "loss": 1.0787, + "step": 2539 + }, + { + "epoch": 0.48868179768525477, + "grad_norm": 0.33775397735827983, + "learning_rate": 0.00010849653598629476, + "loss": 1.0298, + "step": 2540 + }, + { + "epoch": 0.4888741920937923, + "grad_norm": 0.3875969639199608, + "learning_rate": 0.00010843443887597495, + "loss": 1.0571, + "step": 2541 + }, + { + "epoch": 0.48906658650232976, + "grad_norm": 0.2666988960590085, + "learning_rate": 0.00010837233848981632, + "loss": 1.097, + "step": 2542 + }, + { + "epoch": 0.4892589809108673, + "grad_norm": 0.31221334810809465, + "learning_rate": 0.00010831023485193788, + "loss": 1.0807, + "step": 2543 + }, + { + "epoch": 0.4894513753194048, + "grad_norm": 0.29251910199626263, + "learning_rate": 0.00010824812798645996, + "loss": 1.1063, + "step": 2544 + }, + { + "epoch": 0.48964376972794227, + "grad_norm": 0.3491128473642241, + "learning_rate": 0.00010818601791750417, + "loss": 1.0837, + "step": 2545 + }, + { + "epoch": 0.4898361641364798, + "grad_norm": 0.24704517407416768, + "learning_rate": 0.00010812390466919338, + "loss": 1.0647, + "step": 2546 + }, + { + "epoch": 0.4900285585450173, + "grad_norm": 0.2613052283228881, + "learning_rate": 0.00010806178826565161, + "loss": 1.1837, + "step": 2547 + }, + { + "epoch": 0.4902209529535548, + "grad_norm": 0.3441270718443512, + "learning_rate": 0.00010799966873100417, + "loss": 1.1432, + "step": 2548 + }, + { + "epoch": 0.4904133473620923, + "grad_norm": 0.2968938160248746, + "learning_rate": 0.00010793754608937758, + "loss": 1.173, + "step": 2549 + }, + { + "epoch": 0.49060574177062977, + "grad_norm": 0.2709027638329477, + "learning_rate": 0.00010787542036489955, + "loss": 1.1116, + "step": 2550 + }, + { + "epoch": 0.4907981361791673, + "grad_norm": 0.2350569375522259, + "learning_rate": 0.00010781329158169902, + "loss": 1.0758, + "step": 2551 + }, + { + "epoch": 0.4909905305877048, + "grad_norm": 0.30759414944729424, + "learning_rate": 0.00010775115976390606, + "loss": 0.9265, + "step": 2552 + }, + { + "epoch": 0.4911829249962423, + "grad_norm": 0.3420019187451639, + "learning_rate": 0.00010768902493565196, + "loss": 1.0837, + "step": 2553 + }, + { + "epoch": 0.4913753194047798, + "grad_norm": 0.26775812639526614, + "learning_rate": 0.00010762688712106917, + "loss": 1.0271, + "step": 2554 + }, + { + "epoch": 0.4915677138133173, + "grad_norm": 0.28723952286715476, + "learning_rate": 0.00010756474634429132, + "loss": 1.0798, + "step": 2555 + }, + { + "epoch": 0.4917601082218548, + "grad_norm": 0.3115782159029881, + "learning_rate": 0.00010750260262945314, + "loss": 1.131, + "step": 2556 + }, + { + "epoch": 0.4919525026303923, + "grad_norm": 0.2638660440405208, + "learning_rate": 0.00010744045600069055, + "loss": 1.0695, + "step": 2557 + }, + { + "epoch": 0.49214489703892983, + "grad_norm": 0.31320999280179135, + "learning_rate": 0.00010737830648214062, + "loss": 1.0713, + "step": 2558 + }, + { + "epoch": 0.4923372914474673, + "grad_norm": 0.2625801367677137, + "learning_rate": 0.00010731615409794143, + "loss": 1.0681, + "step": 2559 + }, + { + "epoch": 0.4925296858560048, + "grad_norm": 0.3148140684694239, + "learning_rate": 0.00010725399887223233, + "loss": 1.0057, + "step": 2560 + }, + { + "epoch": 0.49272208026454234, + "grad_norm": 0.3986659373568962, + "learning_rate": 0.00010719184082915363, + "loss": 1.1615, + "step": 2561 + }, + { + "epoch": 0.4929144746730798, + "grad_norm": 0.3212605066343875, + "learning_rate": 0.00010712967999284682, + "loss": 1.0646, + "step": 2562 + }, + { + "epoch": 0.4931068690816173, + "grad_norm": 0.26949647752321304, + "learning_rate": 0.00010706751638745447, + "loss": 1.0891, + "step": 2563 + }, + { + "epoch": 0.4932992634901548, + "grad_norm": 0.3147194011096115, + "learning_rate": 0.00010700535003712022, + "loss": 0.9794, + "step": 2564 + }, + { + "epoch": 0.4934916578986923, + "grad_norm": 0.27266970059366, + "learning_rate": 0.0001069431809659887, + "loss": 1.1, + "step": 2565 + }, + { + "epoch": 0.49368405230722984, + "grad_norm": 0.3216890395388259, + "learning_rate": 0.0001068810091982057, + "loss": 1.1474, + "step": 2566 + }, + { + "epoch": 0.4938764467157673, + "grad_norm": 0.26261033443954596, + "learning_rate": 0.00010681883475791803, + "loss": 1.0907, + "step": 2567 + }, + { + "epoch": 0.4940688411243048, + "grad_norm": 0.32713879777200705, + "learning_rate": 0.0001067566576692735, + "loss": 1.1572, + "step": 2568 + }, + { + "epoch": 0.49426123553284235, + "grad_norm": 0.2720177629628606, + "learning_rate": 0.00010669447795642103, + "loss": 1.0116, + "step": 2569 + }, + { + "epoch": 0.4944536299413798, + "grad_norm": 0.31697399187316516, + "learning_rate": 0.00010663229564351041, + "loss": 1.1468, + "step": 2570 + }, + { + "epoch": 0.49464602434991733, + "grad_norm": 0.31191288445545967, + "learning_rate": 0.00010657011075469259, + "loss": 1.0153, + "step": 2571 + }, + { + "epoch": 0.49483841875845486, + "grad_norm": 0.2697020764144713, + "learning_rate": 0.0001065079233141195, + "loss": 1.0992, + "step": 2572 + }, + { + "epoch": 0.4950308131669923, + "grad_norm": 0.2833597359763036, + "learning_rate": 0.00010644573334594395, + "loss": 0.9997, + "step": 2573 + }, + { + "epoch": 0.49522320757552984, + "grad_norm": 0.2832566604790744, + "learning_rate": 0.00010638354087431985, + "loss": 1.0163, + "step": 2574 + }, + { + "epoch": 0.4954156019840673, + "grad_norm": 0.33171593960929263, + "learning_rate": 0.00010632134592340204, + "loss": 1.0766, + "step": 2575 + }, + { + "epoch": 0.49560799639260483, + "grad_norm": 0.2822850139561916, + "learning_rate": 0.0001062591485173463, + "loss": 1.1371, + "step": 2576 + }, + { + "epoch": 0.49580039080114235, + "grad_norm": 0.40692033349733203, + "learning_rate": 0.00010619694868030942, + "loss": 1.0977, + "step": 2577 + }, + { + "epoch": 0.4959927852096798, + "grad_norm": 0.29906416732614016, + "learning_rate": 0.00010613474643644908, + "loss": 1.0658, + "step": 2578 + }, + { + "epoch": 0.49618517961821734, + "grad_norm": 0.29619045834352786, + "learning_rate": 0.0001060725418099239, + "loss": 0.9958, + "step": 2579 + }, + { + "epoch": 0.49637757402675486, + "grad_norm": 0.33353541430091754, + "learning_rate": 0.00010601033482489346, + "loss": 1.0225, + "step": 2580 + }, + { + "epoch": 0.49656996843529233, + "grad_norm": 0.2923531560869263, + "learning_rate": 0.00010594812550551825, + "loss": 1.0753, + "step": 2581 + }, + { + "epoch": 0.49676236284382985, + "grad_norm": 0.2730992878560735, + "learning_rate": 0.00010588591387595961, + "loss": 1.0468, + "step": 2582 + }, + { + "epoch": 0.4969547572523674, + "grad_norm": 0.32951452182989416, + "learning_rate": 0.00010582369996037984, + "loss": 1.075, + "step": 2583 + }, + { + "epoch": 0.49714715166090484, + "grad_norm": 0.33759576811525555, + "learning_rate": 0.00010576148378294213, + "loss": 1.1513, + "step": 2584 + }, + { + "epoch": 0.49733954606944236, + "grad_norm": 0.3197998033215524, + "learning_rate": 0.0001056992653678105, + "loss": 1.0203, + "step": 2585 + }, + { + "epoch": 0.4975319404779799, + "grad_norm": 0.2869669207047471, + "learning_rate": 0.00010563704473914986, + "loss": 1.0632, + "step": 2586 + }, + { + "epoch": 0.49772433488651735, + "grad_norm": 0.27943692117962127, + "learning_rate": 0.00010557482192112602, + "loss": 1.0381, + "step": 2587 + }, + { + "epoch": 0.49791672929505487, + "grad_norm": 0.30820481904437813, + "learning_rate": 0.00010551259693790556, + "loss": 0.9657, + "step": 2588 + }, + { + "epoch": 0.49810912370359234, + "grad_norm": 0.2980366953914205, + "learning_rate": 0.00010545036981365602, + "loss": 1.1031, + "step": 2589 + }, + { + "epoch": 0.49830151811212986, + "grad_norm": 0.28084455929546465, + "learning_rate": 0.0001053881405725456, + "loss": 1.0672, + "step": 2590 + }, + { + "epoch": 0.4984939125206674, + "grad_norm": 0.28771732213575407, + "learning_rate": 0.00010532590923874349, + "loss": 0.9641, + "step": 2591 + }, + { + "epoch": 0.49868630692920485, + "grad_norm": 0.34389843173403906, + "learning_rate": 0.00010526367583641958, + "loss": 1.0802, + "step": 2592 + }, + { + "epoch": 0.49887870133774237, + "grad_norm": 0.3732839017386677, + "learning_rate": 0.00010520144038974466, + "loss": 0.9591, + "step": 2593 + }, + { + "epoch": 0.4990710957462799, + "grad_norm": 0.34674764173762207, + "learning_rate": 0.00010513920292289021, + "loss": 1.0662, + "step": 2594 + }, + { + "epoch": 0.49926349015481736, + "grad_norm": 0.29948221452749685, + "learning_rate": 0.00010507696346002858, + "loss": 1.0667, + "step": 2595 + }, + { + "epoch": 0.4994558845633549, + "grad_norm": 0.3475302187613384, + "learning_rate": 0.00010501472202533284, + "loss": 1.0526, + "step": 2596 + }, + { + "epoch": 0.4996482789718924, + "grad_norm": 0.2997366532667675, + "learning_rate": 0.00010495247864297684, + "loss": 1.0594, + "step": 2597 + }, + { + "epoch": 0.49984067338042987, + "grad_norm": 0.30994459615061065, + "learning_rate": 0.00010489023333713521, + "loss": 0.9625, + "step": 2598 + }, + { + "epoch": 0.5000330677889674, + "grad_norm": 0.3686286763345276, + "learning_rate": 0.00010482798613198329, + "loss": 0.998, + "step": 2599 + }, + { + "epoch": 0.5002254621975049, + "grad_norm": 0.3163621335454191, + "learning_rate": 0.00010476573705169719, + "loss": 1.0923, + "step": 2600 + }, + { + "epoch": 0.5004178566060424, + "grad_norm": 0.30144952669238584, + "learning_rate": 0.00010470348612045375, + "loss": 1.0036, + "step": 2601 + }, + { + "epoch": 0.5006102510145799, + "grad_norm": 0.37770094153244205, + "learning_rate": 0.00010464123336243048, + "loss": 1.0699, + "step": 2602 + }, + { + "epoch": 0.5008026454231174, + "grad_norm": 0.35502403376775843, + "learning_rate": 0.00010457897880180565, + "loss": 1.0344, + "step": 2603 + }, + { + "epoch": 0.5009950398316549, + "grad_norm": 0.25372534454374396, + "learning_rate": 0.00010451672246275825, + "loss": 0.9606, + "step": 2604 + }, + { + "epoch": 0.5011874342401924, + "grad_norm": 0.2835736745333487, + "learning_rate": 0.00010445446436946789, + "loss": 1.0621, + "step": 2605 + }, + { + "epoch": 0.5013798286487299, + "grad_norm": 0.31098446920641104, + "learning_rate": 0.00010439220454611487, + "loss": 1.0722, + "step": 2606 + }, + { + "epoch": 0.5015722230572675, + "grad_norm": 0.31501677875249645, + "learning_rate": 0.00010432994301688021, + "loss": 1.0762, + "step": 2607 + }, + { + "epoch": 0.5017646174658049, + "grad_norm": 0.2656806264910574, + "learning_rate": 0.00010426767980594558, + "loss": 1.092, + "step": 2608 + }, + { + "epoch": 0.5019570118743424, + "grad_norm": 0.29719386992571994, + "learning_rate": 0.00010420541493749332, + "loss": 0.975, + "step": 2609 + }, + { + "epoch": 0.5021494062828799, + "grad_norm": 0.2930741864291432, + "learning_rate": 0.00010414314843570634, + "loss": 1.0567, + "step": 2610 + }, + { + "epoch": 0.5023418006914174, + "grad_norm": 0.27144962176639403, + "learning_rate": 0.00010408088032476822, + "loss": 1.0395, + "step": 2611 + }, + { + "epoch": 0.5025341950999549, + "grad_norm": 0.30055243602535087, + "learning_rate": 0.00010401861062886325, + "loss": 1.1904, + "step": 2612 + }, + { + "epoch": 0.5027265895084924, + "grad_norm": 0.3549779485460265, + "learning_rate": 0.00010395633937217621, + "loss": 1.0692, + "step": 2613 + }, + { + "epoch": 0.5029189839170299, + "grad_norm": 0.26843258273395965, + "learning_rate": 0.00010389406657889254, + "loss": 0.9925, + "step": 2614 + }, + { + "epoch": 0.5031113783255674, + "grad_norm": 0.2574939342570903, + "learning_rate": 0.00010383179227319826, + "loss": 1.0854, + "step": 2615 + }, + { + "epoch": 0.5033037727341049, + "grad_norm": 0.27246358490382644, + "learning_rate": 0.00010376951647928005, + "loss": 1.1691, + "step": 2616 + }, + { + "epoch": 0.5034961671426424, + "grad_norm": 0.274051757221271, + "learning_rate": 0.00010370723922132506, + "loss": 1.1617, + "step": 2617 + }, + { + "epoch": 0.5036885615511799, + "grad_norm": 0.3553250776188068, + "learning_rate": 0.00010364496052352108, + "loss": 0.9463, + "step": 2618 + }, + { + "epoch": 0.5038809559597174, + "grad_norm": 0.2898745997092644, + "learning_rate": 0.00010358268041005643, + "loss": 1.0145, + "step": 2619 + }, + { + "epoch": 0.504073350368255, + "grad_norm": 0.24409249658862053, + "learning_rate": 0.00010352039890511998, + "loss": 1.0222, + "step": 2620 + }, + { + "epoch": 0.5042657447767924, + "grad_norm": 0.2889115040400644, + "learning_rate": 0.00010345811603290118, + "loss": 1.1149, + "step": 2621 + }, + { + "epoch": 0.5044581391853299, + "grad_norm": 0.31833577913532923, + "learning_rate": 0.00010339583181758996, + "loss": 1.0173, + "step": 2622 + }, + { + "epoch": 0.5046505335938675, + "grad_norm": 0.26892391856935105, + "learning_rate": 0.00010333354628337679, + "loss": 0.9919, + "step": 2623 + }, + { + "epoch": 0.5048429280024049, + "grad_norm": 0.29372557612701433, + "learning_rate": 0.00010327125945445264, + "loss": 1.1192, + "step": 2624 + }, + { + "epoch": 0.5050353224109424, + "grad_norm": 0.30514287790701333, + "learning_rate": 0.00010320897135500905, + "loss": 1.0816, + "step": 2625 + }, + { + "epoch": 0.50522771681948, + "grad_norm": 0.2663340382110766, + "learning_rate": 0.00010314668200923792, + "loss": 0.9706, + "step": 2626 + }, + { + "epoch": 0.5054201112280174, + "grad_norm": 0.27497538646337466, + "learning_rate": 0.00010308439144133177, + "loss": 0.9269, + "step": 2627 + }, + { + "epoch": 0.5056125056365549, + "grad_norm": 0.3747878051056086, + "learning_rate": 0.00010302209967548353, + "loss": 1.0395, + "step": 2628 + }, + { + "epoch": 0.5058049000450925, + "grad_norm": 0.32699886016865964, + "learning_rate": 0.00010295980673588659, + "loss": 1.0573, + "step": 2629 + }, + { + "epoch": 0.50599729445363, + "grad_norm": 0.33481225110411705, + "learning_rate": 0.00010289751264673484, + "loss": 1.0797, + "step": 2630 + }, + { + "epoch": 0.5061896888621674, + "grad_norm": 0.3733809136960564, + "learning_rate": 0.00010283521743222255, + "loss": 1.0912, + "step": 2631 + }, + { + "epoch": 0.506382083270705, + "grad_norm": 0.29952830016660215, + "learning_rate": 0.00010277292111654446, + "loss": 1.0953, + "step": 2632 + }, + { + "epoch": 0.5065744776792425, + "grad_norm": 0.28969819219624027, + "learning_rate": 0.00010271062372389581, + "loss": 1.0718, + "step": 2633 + }, + { + "epoch": 0.5067668720877799, + "grad_norm": 0.2855058383205973, + "learning_rate": 0.00010264832527847212, + "loss": 1.106, + "step": 2634 + }, + { + "epoch": 0.5069592664963174, + "grad_norm": 0.2866604186099265, + "learning_rate": 0.00010258602580446941, + "loss": 1.0022, + "step": 2635 + }, + { + "epoch": 0.507151660904855, + "grad_norm": 0.3028260283123913, + "learning_rate": 0.00010252372532608405, + "loss": 1.1272, + "step": 2636 + }, + { + "epoch": 0.5073440553133924, + "grad_norm": 0.35341763494903256, + "learning_rate": 0.0001024614238675129, + "loss": 1.0565, + "step": 2637 + }, + { + "epoch": 0.5075364497219299, + "grad_norm": 0.2813090653405737, + "learning_rate": 0.00010239912145295303, + "loss": 1.077, + "step": 2638 + }, + { + "epoch": 0.5077288441304675, + "grad_norm": 0.28657049227187126, + "learning_rate": 0.00010233681810660207, + "loss": 1.0751, + "step": 2639 + }, + { + "epoch": 0.507921238539005, + "grad_norm": 0.42781320547043034, + "learning_rate": 0.00010227451385265787, + "loss": 0.9878, + "step": 2640 + }, + { + "epoch": 0.5081136329475424, + "grad_norm": 0.39459958921270927, + "learning_rate": 0.00010221220871531869, + "loss": 0.9328, + "step": 2641 + }, + { + "epoch": 0.50830602735608, + "grad_norm": 0.4018612321690641, + "learning_rate": 0.00010214990271878318, + "loss": 1.0537, + "step": 2642 + }, + { + "epoch": 0.5084984217646175, + "grad_norm": 0.29053851124061514, + "learning_rate": 0.00010208759588725016, + "loss": 1.0487, + "step": 2643 + }, + { + "epoch": 0.5086908161731549, + "grad_norm": 0.29738007242546566, + "learning_rate": 0.000102025288244919, + "loss": 1.0101, + "step": 2644 + }, + { + "epoch": 0.5088832105816925, + "grad_norm": 0.3250214277297258, + "learning_rate": 0.00010196297981598922, + "loss": 1.1369, + "step": 2645 + }, + { + "epoch": 0.50907560499023, + "grad_norm": 0.34190619789289134, + "learning_rate": 0.00010190067062466069, + "loss": 1.0531, + "step": 2646 + }, + { + "epoch": 0.5092679993987674, + "grad_norm": 0.2947098929850498, + "learning_rate": 0.00010183836069513359, + "loss": 1.1089, + "step": 2647 + }, + { + "epoch": 0.509460393807305, + "grad_norm": 0.3536138621507215, + "learning_rate": 0.00010177605005160838, + "loss": 1.1267, + "step": 2648 + }, + { + "epoch": 0.5096527882158425, + "grad_norm": 0.26788902121616626, + "learning_rate": 0.00010171373871828578, + "loss": 1.1427, + "step": 2649 + }, + { + "epoch": 0.50984518262438, + "grad_norm": 0.30130205048109276, + "learning_rate": 0.00010165142671936685, + "loss": 1.0723, + "step": 2650 + }, + { + "epoch": 0.5100375770329175, + "grad_norm": 0.2770160095983166, + "learning_rate": 0.00010158911407905278, + "loss": 1.1712, + "step": 2651 + }, + { + "epoch": 0.510229971441455, + "grad_norm": 0.26671936004694313, + "learning_rate": 0.00010152680082154513, + "loss": 1.0766, + "step": 2652 + }, + { + "epoch": 0.5104223658499925, + "grad_norm": 0.40072532513761644, + "learning_rate": 0.0001014644869710456, + "loss": 0.9855, + "step": 2653 + }, + { + "epoch": 0.51061476025853, + "grad_norm": 0.26219903596687183, + "learning_rate": 0.00010140217255175625, + "loss": 1.0791, + "step": 2654 + }, + { + "epoch": 0.5108071546670675, + "grad_norm": 0.3228343803495144, + "learning_rate": 0.00010133985758787921, + "loss": 0.99, + "step": 2655 + }, + { + "epoch": 0.510999549075605, + "grad_norm": 0.3244082266353016, + "learning_rate": 0.00010127754210361693, + "loss": 1.1227, + "step": 2656 + }, + { + "epoch": 0.5111919434841425, + "grad_norm": 0.36065187450740854, + "learning_rate": 0.00010121522612317203, + "loss": 1.0787, + "step": 2657 + }, + { + "epoch": 0.51138433789268, + "grad_norm": 0.34035338975275603, + "learning_rate": 0.00010115290967074728, + "loss": 1.1208, + "step": 2658 + }, + { + "epoch": 0.5115767323012175, + "grad_norm": 0.30493698593909646, + "learning_rate": 0.00010109059277054574, + "loss": 1.1078, + "step": 2659 + }, + { + "epoch": 0.511769126709755, + "grad_norm": 0.2840535902614037, + "learning_rate": 0.00010102827544677049, + "loss": 1.0772, + "step": 2660 + }, + { + "epoch": 0.5119615211182925, + "grad_norm": 0.2988125811168846, + "learning_rate": 0.00010096595772362491, + "loss": 1.1629, + "step": 2661 + }, + { + "epoch": 0.51215391552683, + "grad_norm": 0.32361070861201896, + "learning_rate": 0.0001009036396253125, + "loss": 1.0723, + "step": 2662 + }, + { + "epoch": 0.5123463099353674, + "grad_norm": 0.27752404145549714, + "learning_rate": 0.00010084132117603689, + "loss": 1.0606, + "step": 2663 + }, + { + "epoch": 0.512538704343905, + "grad_norm": 0.25688663722710564, + "learning_rate": 0.00010077900240000179, + "loss": 1.0527, + "step": 2664 + }, + { + "epoch": 0.5127310987524425, + "grad_norm": 0.28328637659502315, + "learning_rate": 0.00010071668332141115, + "loss": 1.0329, + "step": 2665 + }, + { + "epoch": 0.51292349316098, + "grad_norm": 0.3342898186566314, + "learning_rate": 0.00010065436396446899, + "loss": 1.0472, + "step": 2666 + }, + { + "epoch": 0.5131158875695175, + "grad_norm": 0.33043726905356363, + "learning_rate": 0.00010059204435337937, + "loss": 1.0561, + "step": 2667 + }, + { + "epoch": 0.513308281978055, + "grad_norm": 0.34945810910931496, + "learning_rate": 0.00010052972451234657, + "loss": 1.0958, + "step": 2668 + }, + { + "epoch": 0.5135006763865925, + "grad_norm": 0.3279197009321726, + "learning_rate": 0.00010046740446557486, + "loss": 1.0391, + "step": 2669 + }, + { + "epoch": 0.51369307079513, + "grad_norm": 0.3317184582762969, + "learning_rate": 0.00010040508423726865, + "loss": 1.0203, + "step": 2670 + }, + { + "epoch": 0.5138854652036675, + "grad_norm": 0.24702965120855871, + "learning_rate": 0.00010034276385163237, + "loss": 1.0539, + "step": 2671 + }, + { + "epoch": 0.514077859612205, + "grad_norm": 0.29554405798530037, + "learning_rate": 0.00010028044333287057, + "loss": 1.0724, + "step": 2672 + }, + { + "epoch": 0.5142702540207426, + "grad_norm": 0.3113869697297527, + "learning_rate": 0.0001002181227051878, + "loss": 0.9427, + "step": 2673 + }, + { + "epoch": 0.51446264842928, + "grad_norm": 0.2982163121180696, + "learning_rate": 0.00010015580199278873, + "loss": 0.9998, + "step": 2674 + }, + { + "epoch": 0.5146550428378175, + "grad_norm": 0.4630578795642548, + "learning_rate": 0.00010009348121987794, + "loss": 1.0666, + "step": 2675 + }, + { + "epoch": 0.5148474372463551, + "grad_norm": 0.3424864477705635, + "learning_rate": 0.0001000311604106601, + "loss": 1.0087, + "step": 2676 + }, + { + "epoch": 0.5150398316548925, + "grad_norm": 0.2749273603356742, + "learning_rate": 9.996883958933992e-05, + "loss": 1.0186, + "step": 2677 + }, + { + "epoch": 0.51523222606343, + "grad_norm": 0.2751288045530157, + "learning_rate": 9.990651878012211e-05, + "loss": 1.1156, + "step": 2678 + }, + { + "epoch": 0.5154246204719676, + "grad_norm": 0.3852014664002532, + "learning_rate": 9.984419800721132e-05, + "loss": 1.0128, + "step": 2679 + }, + { + "epoch": 0.515617014880505, + "grad_norm": 0.3370617303217846, + "learning_rate": 9.97818772948122e-05, + "loss": 0.9892, + "step": 2680 + }, + { + "epoch": 0.5158094092890425, + "grad_norm": 0.36322859436231897, + "learning_rate": 9.971955666712944e-05, + "loss": 1.0977, + "step": 2681 + }, + { + "epoch": 0.5160018036975801, + "grad_norm": 0.2849679576817036, + "learning_rate": 9.965723614836764e-05, + "loss": 0.9967, + "step": 2682 + }, + { + "epoch": 0.5161941981061176, + "grad_norm": 0.32749669764721134, + "learning_rate": 9.959491576273139e-05, + "loss": 1.0315, + "step": 2683 + }, + { + "epoch": 0.516386592514655, + "grad_norm": 0.2980660635842453, + "learning_rate": 9.953259553442518e-05, + "loss": 1.0892, + "step": 2684 + }, + { + "epoch": 0.5165789869231926, + "grad_norm": 0.3130228369711336, + "learning_rate": 9.947027548765348e-05, + "loss": 1.1218, + "step": 2685 + }, + { + "epoch": 0.5167713813317301, + "grad_norm": 0.31813700200054223, + "learning_rate": 9.940795564662063e-05, + "loss": 1.1224, + "step": 2686 + }, + { + "epoch": 0.5169637757402675, + "grad_norm": 0.29391047872832265, + "learning_rate": 9.934563603553102e-05, + "loss": 1.0267, + "step": 2687 + }, + { + "epoch": 0.5171561701488051, + "grad_norm": 0.3142164764833463, + "learning_rate": 9.928331667858886e-05, + "loss": 1.0478, + "step": 2688 + }, + { + "epoch": 0.5173485645573426, + "grad_norm": 0.3191948303423121, + "learning_rate": 9.922099759999822e-05, + "loss": 1.0669, + "step": 2689 + }, + { + "epoch": 0.51754095896588, + "grad_norm": 0.3188475774024312, + "learning_rate": 9.915867882396315e-05, + "loss": 1.0429, + "step": 2690 + }, + { + "epoch": 0.5177333533744175, + "grad_norm": 0.2909051017286928, + "learning_rate": 9.909636037468753e-05, + "loss": 1.0452, + "step": 2691 + }, + { + "epoch": 0.5179257477829551, + "grad_norm": 0.30835000947059094, + "learning_rate": 9.903404227637508e-05, + "loss": 0.972, + "step": 2692 + }, + { + "epoch": 0.5181181421914925, + "grad_norm": 0.41145153340626306, + "learning_rate": 9.897172455322953e-05, + "loss": 1.0764, + "step": 2693 + }, + { + "epoch": 0.51831053660003, + "grad_norm": 0.39122752702358476, + "learning_rate": 9.890940722945428e-05, + "loss": 1.0617, + "step": 2694 + }, + { + "epoch": 0.5185029310085676, + "grad_norm": 0.39610199342283875, + "learning_rate": 9.884709032925273e-05, + "loss": 1.0784, + "step": 2695 + }, + { + "epoch": 0.5186953254171051, + "grad_norm": 0.3397739407149826, + "learning_rate": 9.878477387682803e-05, + "loss": 0.9895, + "step": 2696 + }, + { + "epoch": 0.5188877198256425, + "grad_norm": 0.3009485489557237, + "learning_rate": 9.872245789638308e-05, + "loss": 1.0658, + "step": 2697 + }, + { + "epoch": 0.5190801142341801, + "grad_norm": 0.2802731105031589, + "learning_rate": 9.866014241212079e-05, + "loss": 1.0106, + "step": 2698 + }, + { + "epoch": 0.5192725086427176, + "grad_norm": 0.2824960458523637, + "learning_rate": 9.859782744824376e-05, + "loss": 1.0747, + "step": 2699 + }, + { + "epoch": 0.519464903051255, + "grad_norm": 0.2927205430549778, + "learning_rate": 9.85355130289544e-05, + "loss": 1.0738, + "step": 2700 + }, + { + "epoch": 0.5196572974597926, + "grad_norm": 0.32586985138104474, + "learning_rate": 9.84731991784549e-05, + "loss": 1.0894, + "step": 2701 + }, + { + "epoch": 0.5198496918683301, + "grad_norm": 0.2859626837060444, + "learning_rate": 9.841088592094725e-05, + "loss": 1.0044, + "step": 2702 + }, + { + "epoch": 0.5200420862768675, + "grad_norm": 0.3431843579012332, + "learning_rate": 9.834857328063316e-05, + "loss": 1.0482, + "step": 2703 + }, + { + "epoch": 0.5202344806854051, + "grad_norm": 0.32562736513819074, + "learning_rate": 9.828626128171421e-05, + "loss": 1.0656, + "step": 2704 + }, + { + "epoch": 0.5204268750939426, + "grad_norm": 0.2618859474564516, + "learning_rate": 9.822394994839165e-05, + "loss": 1.087, + "step": 2705 + }, + { + "epoch": 0.52061926950248, + "grad_norm": 0.27248647838384255, + "learning_rate": 9.816163930486642e-05, + "loss": 1.0671, + "step": 2706 + }, + { + "epoch": 0.5208116639110176, + "grad_norm": 0.3484565938222194, + "learning_rate": 9.809932937533934e-05, + "loss": 1.1142, + "step": 2707 + }, + { + "epoch": 0.5210040583195551, + "grad_norm": 0.2768075250068996, + "learning_rate": 9.803702018401083e-05, + "loss": 1.0704, + "step": 2708 + }, + { + "epoch": 0.5211964527280926, + "grad_norm": 0.3144587533028435, + "learning_rate": 9.7974711755081e-05, + "loss": 1.0815, + "step": 2709 + }, + { + "epoch": 0.5213888471366301, + "grad_norm": 0.27773867743775527, + "learning_rate": 9.791240411274982e-05, + "loss": 1.0781, + "step": 2710 + }, + { + "epoch": 0.5215812415451676, + "grad_norm": 0.3125349683222927, + "learning_rate": 9.785009728121686e-05, + "loss": 1.0468, + "step": 2711 + }, + { + "epoch": 0.5217736359537051, + "grad_norm": 0.3185643889770487, + "learning_rate": 9.778779128468132e-05, + "loss": 1.0691, + "step": 2712 + }, + { + "epoch": 0.5219660303622427, + "grad_norm": 0.2479368912483558, + "learning_rate": 9.772548614734217e-05, + "loss": 1.0711, + "step": 2713 + }, + { + "epoch": 0.5221584247707801, + "grad_norm": 0.36081129278836305, + "learning_rate": 9.766318189339797e-05, + "loss": 1.0686, + "step": 2714 + }, + { + "epoch": 0.5223508191793176, + "grad_norm": 0.28520848572758933, + "learning_rate": 9.760087854704698e-05, + "loss": 1.0164, + "step": 2715 + }, + { + "epoch": 0.522543213587855, + "grad_norm": 0.33016854865971146, + "learning_rate": 9.753857613248714e-05, + "loss": 1.1244, + "step": 2716 + }, + { + "epoch": 0.5227356079963926, + "grad_norm": 0.3292921902789938, + "learning_rate": 9.747627467391596e-05, + "loss": 1.0906, + "step": 2717 + }, + { + "epoch": 0.5229280024049301, + "grad_norm": 0.29538709272466723, + "learning_rate": 9.741397419553063e-05, + "loss": 1.05, + "step": 2718 + }, + { + "epoch": 0.5231203968134676, + "grad_norm": 0.3110439058725263, + "learning_rate": 9.735167472152791e-05, + "loss": 0.9908, + "step": 2719 + }, + { + "epoch": 0.5233127912220051, + "grad_norm": 0.3158559888079872, + "learning_rate": 9.728937627610424e-05, + "loss": 1.0779, + "step": 2720 + }, + { + "epoch": 0.5235051856305426, + "grad_norm": 0.286200450977235, + "learning_rate": 9.722707888345552e-05, + "loss": 1.0948, + "step": 2721 + }, + { + "epoch": 0.5236975800390801, + "grad_norm": 0.28452082130062206, + "learning_rate": 9.716478256777747e-05, + "loss": 1.0167, + "step": 2722 + }, + { + "epoch": 0.5238899744476176, + "grad_norm": 0.2979739181344111, + "learning_rate": 9.710248735326519e-05, + "loss": 1.0223, + "step": 2723 + }, + { + "epoch": 0.5240823688561551, + "grad_norm": 0.33157498620222703, + "learning_rate": 9.704019326411343e-05, + "loss": 1.0609, + "step": 2724 + }, + { + "epoch": 0.5242747632646926, + "grad_norm": 0.3655860162714879, + "learning_rate": 9.697790032451651e-05, + "loss": 1.0378, + "step": 2725 + }, + { + "epoch": 0.5244671576732302, + "grad_norm": 0.3881593466629942, + "learning_rate": 9.691560855866826e-05, + "loss": 1.1249, + "step": 2726 + }, + { + "epoch": 0.5246595520817676, + "grad_norm": 0.301132490633845, + "learning_rate": 9.685331799076209e-05, + "loss": 1.0453, + "step": 2727 + }, + { + "epoch": 0.5248519464903051, + "grad_norm": 0.26668304761279166, + "learning_rate": 9.679102864499099e-05, + "loss": 1.1148, + "step": 2728 + }, + { + "epoch": 0.5250443408988427, + "grad_norm": 0.3376845559654377, + "learning_rate": 9.672874054554738e-05, + "loss": 1.1388, + "step": 2729 + }, + { + "epoch": 0.5252367353073801, + "grad_norm": 0.33459083057765177, + "learning_rate": 9.666645371662324e-05, + "loss": 1.1106, + "step": 2730 + }, + { + "epoch": 0.5254291297159176, + "grad_norm": 0.26500515784677214, + "learning_rate": 9.660416818241007e-05, + "loss": 1.035, + "step": 2731 + }, + { + "epoch": 0.5256215241244552, + "grad_norm": 0.33717677608962726, + "learning_rate": 9.654188396709882e-05, + "loss": 1.1347, + "step": 2732 + }, + { + "epoch": 0.5258139185329926, + "grad_norm": 0.3369284977260401, + "learning_rate": 9.647960109488003e-05, + "loss": 1.1271, + "step": 2733 + }, + { + "epoch": 0.5260063129415301, + "grad_norm": 0.311167262364201, + "learning_rate": 9.641731958994359e-05, + "loss": 1.0501, + "step": 2734 + }, + { + "epoch": 0.5261987073500677, + "grad_norm": 0.27073114029613154, + "learning_rate": 9.635503947647894e-05, + "loss": 1.1564, + "step": 2735 + }, + { + "epoch": 0.5263911017586052, + "grad_norm": 0.3227954134919288, + "learning_rate": 9.629276077867497e-05, + "loss": 1.0419, + "step": 2736 + }, + { + "epoch": 0.5265834961671426, + "grad_norm": 0.29174345704097504, + "learning_rate": 9.623048352071998e-05, + "loss": 1.0503, + "step": 2737 + }, + { + "epoch": 0.5267758905756802, + "grad_norm": 0.30989115194163197, + "learning_rate": 9.616820772680173e-05, + "loss": 1.031, + "step": 2738 + }, + { + "epoch": 0.5269682849842177, + "grad_norm": 0.30412929310668163, + "learning_rate": 9.610593342110746e-05, + "loss": 1.0234, + "step": 2739 + }, + { + "epoch": 0.5271606793927551, + "grad_norm": 0.2926047636961451, + "learning_rate": 9.604366062782381e-05, + "loss": 1.0525, + "step": 2740 + }, + { + "epoch": 0.5273530738012927, + "grad_norm": 0.4168850642209728, + "learning_rate": 9.598138937113676e-05, + "loss": 0.9568, + "step": 2741 + }, + { + "epoch": 0.5275454682098302, + "grad_norm": 0.3078878204238304, + "learning_rate": 9.591911967523179e-05, + "loss": 1.0086, + "step": 2742 + }, + { + "epoch": 0.5277378626183676, + "grad_norm": 0.24207485728750425, + "learning_rate": 9.585685156429369e-05, + "loss": 1.0984, + "step": 2743 + }, + { + "epoch": 0.5279302570269051, + "grad_norm": 0.3322540073081415, + "learning_rate": 9.579458506250669e-05, + "loss": 1.0875, + "step": 2744 + }, + { + "epoch": 0.5281226514354427, + "grad_norm": 0.26985254397003916, + "learning_rate": 9.573232019405441e-05, + "loss": 1.0612, + "step": 2745 + }, + { + "epoch": 0.5283150458439801, + "grad_norm": 0.27146483728943643, + "learning_rate": 9.567005698311981e-05, + "loss": 1.1319, + "step": 2746 + }, + { + "epoch": 0.5285074402525176, + "grad_norm": 0.31292999571775915, + "learning_rate": 9.560779545388517e-05, + "loss": 1.0504, + "step": 2747 + }, + { + "epoch": 0.5286998346610552, + "grad_norm": 0.382001753843954, + "learning_rate": 9.554553563053215e-05, + "loss": 1.044, + "step": 2748 + }, + { + "epoch": 0.5288922290695927, + "grad_norm": 0.315149746667852, + "learning_rate": 9.54832775372418e-05, + "loss": 1.0604, + "step": 2749 + }, + { + "epoch": 0.5290846234781301, + "grad_norm": 0.3229981866016716, + "learning_rate": 9.542102119819434e-05, + "loss": 1.1197, + "step": 2750 + }, + { + "epoch": 0.5292770178866677, + "grad_norm": 0.3187424846693923, + "learning_rate": 9.535876663756954e-05, + "loss": 1.078, + "step": 2751 + }, + { + "epoch": 0.5294694122952052, + "grad_norm": 0.3615588509829996, + "learning_rate": 9.529651387954627e-05, + "loss": 1.0651, + "step": 2752 + }, + { + "epoch": 0.5296618067037426, + "grad_norm": 0.31526782514947366, + "learning_rate": 9.523426294830284e-05, + "loss": 1.0682, + "step": 2753 + }, + { + "epoch": 0.5298542011122802, + "grad_norm": 0.28015305728954015, + "learning_rate": 9.517201386801675e-05, + "loss": 1.1625, + "step": 2754 + }, + { + "epoch": 0.5300465955208177, + "grad_norm": 0.27640149620410465, + "learning_rate": 9.510976666286484e-05, + "loss": 1.1234, + "step": 2755 + }, + { + "epoch": 0.5302389899293551, + "grad_norm": 0.2843590225786937, + "learning_rate": 9.504752135702317e-05, + "loss": 1.0188, + "step": 2756 + }, + { + "epoch": 0.5304313843378927, + "grad_norm": 0.3227943181420178, + "learning_rate": 9.498527797466718e-05, + "loss": 1.0053, + "step": 2757 + }, + { + "epoch": 0.5306237787464302, + "grad_norm": 0.3720956893322388, + "learning_rate": 9.492303653997146e-05, + "loss": 1.0461, + "step": 2758 + }, + { + "epoch": 0.5308161731549677, + "grad_norm": 0.34037755114619667, + "learning_rate": 9.48607970771098e-05, + "loss": 1.0244, + "step": 2759 + }, + { + "epoch": 0.5310085675635052, + "grad_norm": 0.3133560430418856, + "learning_rate": 9.479855961025536e-05, + "loss": 1.0927, + "step": 2760 + }, + { + "epoch": 0.5312009619720427, + "grad_norm": 0.36269324634286576, + "learning_rate": 9.473632416358044e-05, + "loss": 1.0558, + "step": 2761 + }, + { + "epoch": 0.5313933563805802, + "grad_norm": 0.2903963671912293, + "learning_rate": 9.467409076125652e-05, + "loss": 1.1751, + "step": 2762 + }, + { + "epoch": 0.5315857507891177, + "grad_norm": 0.3160080584023893, + "learning_rate": 9.461185942745443e-05, + "loss": 1.0913, + "step": 2763 + }, + { + "epoch": 0.5317781451976552, + "grad_norm": 0.33624050187679827, + "learning_rate": 9.454963018634402e-05, + "loss": 0.8959, + "step": 2764 + }, + { + "epoch": 0.5319705396061927, + "grad_norm": 0.30220024132171014, + "learning_rate": 9.448740306209446e-05, + "loss": 1.0349, + "step": 2765 + }, + { + "epoch": 0.5321629340147302, + "grad_norm": 0.27063084653526037, + "learning_rate": 9.442517807887403e-05, + "loss": 1.0956, + "step": 2766 + }, + { + "epoch": 0.5323553284232677, + "grad_norm": 0.30000972601986187, + "learning_rate": 9.436295526085015e-05, + "loss": 1.1024, + "step": 2767 + }, + { + "epoch": 0.5325477228318052, + "grad_norm": 0.30672501398732777, + "learning_rate": 9.430073463218951e-05, + "loss": 0.9967, + "step": 2768 + }, + { + "epoch": 0.5327401172403426, + "grad_norm": 0.29709113816444055, + "learning_rate": 9.42385162170579e-05, + "loss": 0.9671, + "step": 2769 + }, + { + "epoch": 0.5329325116488802, + "grad_norm": 0.297654727456846, + "learning_rate": 9.417630003962018e-05, + "loss": 1.0928, + "step": 2770 + }, + { + "epoch": 0.5331249060574177, + "grad_norm": 0.3028868168277822, + "learning_rate": 9.411408612404042e-05, + "loss": 1.0585, + "step": 2771 + }, + { + "epoch": 0.5333173004659552, + "grad_norm": 0.3616804117522196, + "learning_rate": 9.405187449448179e-05, + "loss": 1.0256, + "step": 2772 + }, + { + "epoch": 0.5335096948744927, + "grad_norm": 0.3291472649954265, + "learning_rate": 9.398966517510653e-05, + "loss": 1.1663, + "step": 2773 + }, + { + "epoch": 0.5337020892830302, + "grad_norm": 0.29427033422444704, + "learning_rate": 9.392745819007611e-05, + "loss": 1.0456, + "step": 2774 + }, + { + "epoch": 0.5338944836915677, + "grad_norm": 0.29669086798730604, + "learning_rate": 9.386525356355094e-05, + "loss": 1.0459, + "step": 2775 + }, + { + "epoch": 0.5340868781001052, + "grad_norm": 0.31207769708090366, + "learning_rate": 9.38030513196906e-05, + "loss": 1.0466, + "step": 2776 + }, + { + "epoch": 0.5342792725086427, + "grad_norm": 0.37966080780743977, + "learning_rate": 9.37408514826537e-05, + "loss": 1.0014, + "step": 2777 + }, + { + "epoch": 0.5344716669171802, + "grad_norm": 0.26385790450294977, + "learning_rate": 9.3678654076598e-05, + "loss": 1.0082, + "step": 2778 + }, + { + "epoch": 0.5346640613257178, + "grad_norm": 0.3218037031651044, + "learning_rate": 9.361645912568016e-05, + "loss": 1.0125, + "step": 2779 + }, + { + "epoch": 0.5348564557342552, + "grad_norm": 0.30518090575277684, + "learning_rate": 9.355426665405606e-05, + "loss": 1.0402, + "step": 2780 + }, + { + "epoch": 0.5350488501427927, + "grad_norm": 0.3041275829512792, + "learning_rate": 9.349207668588052e-05, + "loss": 1.0473, + "step": 2781 + }, + { + "epoch": 0.5352412445513303, + "grad_norm": 0.2707767455349892, + "learning_rate": 9.342988924530742e-05, + "loss": 1.1194, + "step": 2782 + }, + { + "epoch": 0.5354336389598677, + "grad_norm": 0.2970468228020865, + "learning_rate": 9.336770435648964e-05, + "loss": 1.0255, + "step": 2783 + }, + { + "epoch": 0.5356260333684052, + "grad_norm": 0.31351423831171826, + "learning_rate": 9.330552204357904e-05, + "loss": 1.0481, + "step": 2784 + }, + { + "epoch": 0.5358184277769428, + "grad_norm": 0.3845474184550142, + "learning_rate": 9.324334233072648e-05, + "loss": 0.9651, + "step": 2785 + }, + { + "epoch": 0.5360108221854802, + "grad_norm": 0.281176962524045, + "learning_rate": 9.318116524208198e-05, + "loss": 1.0951, + "step": 2786 + }, + { + "epoch": 0.5362032165940177, + "grad_norm": 0.2914111318352684, + "learning_rate": 9.311899080179433e-05, + "loss": 1.0827, + "step": 2787 + }, + { + "epoch": 0.5363956110025553, + "grad_norm": 0.3564120679051981, + "learning_rate": 9.305681903401132e-05, + "loss": 1.0194, + "step": 2788 + }, + { + "epoch": 0.5365880054110927, + "grad_norm": 0.24276411649445256, + "learning_rate": 9.299464996287983e-05, + "loss": 1.0857, + "step": 2789 + }, + { + "epoch": 0.5367803998196302, + "grad_norm": 0.30739416618655285, + "learning_rate": 9.293248361254556e-05, + "loss": 0.9993, + "step": 2790 + }, + { + "epoch": 0.5369727942281678, + "grad_norm": 0.4960691729042597, + "learning_rate": 9.287032000715318e-05, + "loss": 1.0502, + "step": 2791 + }, + { + "epoch": 0.5371651886367053, + "grad_norm": 0.31124029201715964, + "learning_rate": 9.28081591708464e-05, + "loss": 1.0489, + "step": 2792 + }, + { + "epoch": 0.5373575830452427, + "grad_norm": 0.2740784549147511, + "learning_rate": 9.274600112776769e-05, + "loss": 1.0375, + "step": 2793 + }, + { + "epoch": 0.5375499774537803, + "grad_norm": 0.27141699117857976, + "learning_rate": 9.268384590205858e-05, + "loss": 1.0799, + "step": 2794 + }, + { + "epoch": 0.5377423718623178, + "grad_norm": 0.32863385363595055, + "learning_rate": 9.262169351785943e-05, + "loss": 1.042, + "step": 2795 + }, + { + "epoch": 0.5379347662708552, + "grad_norm": 0.3299091517342768, + "learning_rate": 9.255954399930947e-05, + "loss": 0.9722, + "step": 2796 + }, + { + "epoch": 0.5381271606793927, + "grad_norm": 0.32779796428934976, + "learning_rate": 9.249739737054686e-05, + "loss": 1.135, + "step": 2797 + }, + { + "epoch": 0.5383195550879303, + "grad_norm": 0.31454421970021534, + "learning_rate": 9.24352536557087e-05, + "loss": 1.057, + "step": 2798 + }, + { + "epoch": 0.5385119494964677, + "grad_norm": 0.26750739049993183, + "learning_rate": 9.237311287893086e-05, + "loss": 1.0418, + "step": 2799 + }, + { + "epoch": 0.5387043439050052, + "grad_norm": 0.2731401553352244, + "learning_rate": 9.231097506434807e-05, + "loss": 1.0911, + "step": 2800 + }, + { + "epoch": 0.5388967383135428, + "grad_norm": 0.29354989195287307, + "learning_rate": 9.224884023609397e-05, + "loss": 1.0442, + "step": 2801 + }, + { + "epoch": 0.5390891327220803, + "grad_norm": 0.35260721010315077, + "learning_rate": 9.218670841830098e-05, + "loss": 0.9677, + "step": 2802 + }, + { + "epoch": 0.5392815271306177, + "grad_norm": 0.2802246546142186, + "learning_rate": 9.212457963510044e-05, + "loss": 1.1106, + "step": 2803 + }, + { + "epoch": 0.5394739215391553, + "grad_norm": 0.3358029617604148, + "learning_rate": 9.206245391062244e-05, + "loss": 1.1165, + "step": 2804 + }, + { + "epoch": 0.5396663159476928, + "grad_norm": 0.3166541082824362, + "learning_rate": 9.200033126899584e-05, + "loss": 1.043, + "step": 2805 + }, + { + "epoch": 0.5398587103562302, + "grad_norm": 0.30795967955353276, + "learning_rate": 9.193821173434842e-05, + "loss": 1.0437, + "step": 2806 + }, + { + "epoch": 0.5400511047647678, + "grad_norm": 0.3327238498922548, + "learning_rate": 9.187609533080667e-05, + "loss": 1.0617, + "step": 2807 + }, + { + "epoch": 0.5402434991733053, + "grad_norm": 0.36117726524740024, + "learning_rate": 9.181398208249583e-05, + "loss": 1.0449, + "step": 2808 + }, + { + "epoch": 0.5404358935818427, + "grad_norm": 0.2822965590599794, + "learning_rate": 9.175187201354004e-05, + "loss": 1.0556, + "step": 2809 + }, + { + "epoch": 0.5406282879903803, + "grad_norm": 0.34551883558973434, + "learning_rate": 9.168976514806216e-05, + "loss": 1.0436, + "step": 2810 + }, + { + "epoch": 0.5408206823989178, + "grad_norm": 0.3276728211785904, + "learning_rate": 9.162766151018372e-05, + "loss": 1.0218, + "step": 2811 + }, + { + "epoch": 0.5410130768074553, + "grad_norm": 0.2809564264393135, + "learning_rate": 9.156556112402507e-05, + "loss": 1.1238, + "step": 2812 + }, + { + "epoch": 0.5412054712159928, + "grad_norm": 0.32923288760595176, + "learning_rate": 9.150346401370527e-05, + "loss": 1.0828, + "step": 2813 + }, + { + "epoch": 0.5413978656245303, + "grad_norm": 0.30982590545276434, + "learning_rate": 9.144137020334214e-05, + "loss": 1.1131, + "step": 2814 + }, + { + "epoch": 0.5415902600330678, + "grad_norm": 0.2968755265338655, + "learning_rate": 9.137927971705222e-05, + "loss": 1.008, + "step": 2815 + }, + { + "epoch": 0.5417826544416053, + "grad_norm": 0.2661602971152169, + "learning_rate": 9.131719257895074e-05, + "loss": 1.1025, + "step": 2816 + }, + { + "epoch": 0.5419750488501428, + "grad_norm": 0.31899816939093295, + "learning_rate": 9.125510881315158e-05, + "loss": 1.0911, + "step": 2817 + }, + { + "epoch": 0.5421674432586803, + "grad_norm": 0.31194303994512873, + "learning_rate": 9.119302844376741e-05, + "loss": 1.0762, + "step": 2818 + }, + { + "epoch": 0.5423598376672178, + "grad_norm": 0.30479570213560897, + "learning_rate": 9.113095149490952e-05, + "loss": 1.0359, + "step": 2819 + }, + { + "epoch": 0.5425522320757553, + "grad_norm": 0.3249136726390395, + "learning_rate": 9.106887799068782e-05, + "loss": 1.0743, + "step": 2820 + }, + { + "epoch": 0.5427446264842928, + "grad_norm": 0.3636120345725698, + "learning_rate": 9.100680795521104e-05, + "loss": 1.0495, + "step": 2821 + }, + { + "epoch": 0.5429370208928304, + "grad_norm": 0.2848339273714078, + "learning_rate": 9.09447414125864e-05, + "loss": 1.0268, + "step": 2822 + }, + { + "epoch": 0.5431294153013678, + "grad_norm": 0.2857247053117168, + "learning_rate": 9.088267838691986e-05, + "loss": 1.0178, + "step": 2823 + }, + { + "epoch": 0.5433218097099053, + "grad_norm": 0.3487514775644445, + "learning_rate": 9.0820618902316e-05, + "loss": 1.0549, + "step": 2824 + }, + { + "epoch": 0.5435142041184428, + "grad_norm": 0.3300207026944498, + "learning_rate": 9.075856298287795e-05, + "loss": 0.943, + "step": 2825 + }, + { + "epoch": 0.5437065985269803, + "grad_norm": 0.311900787783297, + "learning_rate": 9.069651065270752e-05, + "loss": 1.0415, + "step": 2826 + }, + { + "epoch": 0.5438989929355178, + "grad_norm": 0.2603950382145914, + "learning_rate": 9.06344619359052e-05, + "loss": 1.0866, + "step": 2827 + }, + { + "epoch": 0.5440913873440553, + "grad_norm": 0.33786791580426334, + "learning_rate": 9.057241685656995e-05, + "loss": 1.0252, + "step": 2828 + }, + { + "epoch": 0.5442837817525928, + "grad_norm": 0.37359857562233495, + "learning_rate": 9.051037543879932e-05, + "loss": 1.003, + "step": 2829 + }, + { + "epoch": 0.5444761761611303, + "grad_norm": 0.2969417967748203, + "learning_rate": 9.044833770668956e-05, + "loss": 1.1049, + "step": 2830 + }, + { + "epoch": 0.5446685705696678, + "grad_norm": 0.31430576504033775, + "learning_rate": 9.038630368433535e-05, + "loss": 1.0444, + "step": 2831 + }, + { + "epoch": 0.5448609649782054, + "grad_norm": 0.31783943377008056, + "learning_rate": 9.032427339583e-05, + "loss": 1.1372, + "step": 2832 + }, + { + "epoch": 0.5450533593867428, + "grad_norm": 0.2957587310922381, + "learning_rate": 9.026224686526538e-05, + "loss": 1.0809, + "step": 2833 + }, + { + "epoch": 0.5452457537952803, + "grad_norm": 0.27822396836129787, + "learning_rate": 9.020022411673187e-05, + "loss": 1.031, + "step": 2834 + }, + { + "epoch": 0.5454381482038179, + "grad_norm": 0.300938603064847, + "learning_rate": 9.01382051743184e-05, + "loss": 1.1484, + "step": 2835 + }, + { + "epoch": 0.5456305426123553, + "grad_norm": 0.31966311928483815, + "learning_rate": 9.007619006211241e-05, + "loss": 1.079, + "step": 2836 + }, + { + "epoch": 0.5458229370208928, + "grad_norm": 0.2749902856625382, + "learning_rate": 9.00141788041998e-05, + "loss": 1.0762, + "step": 2837 + }, + { + "epoch": 0.5460153314294304, + "grad_norm": 0.3276725006229355, + "learning_rate": 8.99521714246651e-05, + "loss": 1.012, + "step": 2838 + }, + { + "epoch": 0.5462077258379678, + "grad_norm": 0.35701775296019184, + "learning_rate": 8.989016794759127e-05, + "loss": 1.1177, + "step": 2839 + }, + { + "epoch": 0.5464001202465053, + "grad_norm": 0.34521768665052943, + "learning_rate": 8.982816839705969e-05, + "loss": 1.0087, + "step": 2840 + }, + { + "epoch": 0.5465925146550429, + "grad_norm": 0.31296761727600764, + "learning_rate": 8.976617279715031e-05, + "loss": 1.0251, + "step": 2841 + }, + { + "epoch": 0.5467849090635803, + "grad_norm": 0.2877284160127235, + "learning_rate": 8.970418117194146e-05, + "loss": 1.1764, + "step": 2842 + }, + { + "epoch": 0.5469773034721178, + "grad_norm": 0.3491685011984224, + "learning_rate": 8.964219354550999e-05, + "loss": 1.0731, + "step": 2843 + }, + { + "epoch": 0.5471696978806554, + "grad_norm": 0.33580397212952245, + "learning_rate": 8.958020994193124e-05, + "loss": 1.0638, + "step": 2844 + }, + { + "epoch": 0.5473620922891929, + "grad_norm": 0.3692386492957108, + "learning_rate": 8.951823038527888e-05, + "loss": 1.0258, + "step": 2845 + }, + { + "epoch": 0.5475544866977303, + "grad_norm": 0.2894896857350827, + "learning_rate": 8.945625489962503e-05, + "loss": 1.0558, + "step": 2846 + }, + { + "epoch": 0.5477468811062679, + "grad_norm": 0.2858192761845107, + "learning_rate": 8.93942835090403e-05, + "loss": 1.008, + "step": 2847 + }, + { + "epoch": 0.5479392755148054, + "grad_norm": 0.25297580804698744, + "learning_rate": 8.933231623759365e-05, + "loss": 1.0477, + "step": 2848 + }, + { + "epoch": 0.5481316699233428, + "grad_norm": 0.2966275998196543, + "learning_rate": 8.927035310935242e-05, + "loss": 1.0029, + "step": 2849 + }, + { + "epoch": 0.5483240643318803, + "grad_norm": 0.4199080177543365, + "learning_rate": 8.920839414838242e-05, + "loss": 0.9477, + "step": 2850 + }, + { + "epoch": 0.5485164587404179, + "grad_norm": 0.3665132119139109, + "learning_rate": 8.914643937874778e-05, + "loss": 1.0608, + "step": 2851 + }, + { + "epoch": 0.5487088531489553, + "grad_norm": 0.2966821211781953, + "learning_rate": 8.908448882451104e-05, + "loss": 0.9727, + "step": 2852 + }, + { + "epoch": 0.5489012475574928, + "grad_norm": 0.3102070609033994, + "learning_rate": 8.902254250973305e-05, + "loss": 1.0988, + "step": 2853 + }, + { + "epoch": 0.5490936419660304, + "grad_norm": 0.294385063859879, + "learning_rate": 8.896060045847304e-05, + "loss": 1.1261, + "step": 2854 + }, + { + "epoch": 0.5492860363745679, + "grad_norm": 0.30667231559898744, + "learning_rate": 8.889866269478859e-05, + "loss": 1.0785, + "step": 2855 + }, + { + "epoch": 0.5494784307831053, + "grad_norm": 0.3517437039855562, + "learning_rate": 8.883672924273566e-05, + "loss": 1.0161, + "step": 2856 + }, + { + "epoch": 0.5496708251916429, + "grad_norm": 0.37631058254100214, + "learning_rate": 8.877480012636847e-05, + "loss": 1.0885, + "step": 2857 + }, + { + "epoch": 0.5498632196001804, + "grad_norm": 0.33444593087880226, + "learning_rate": 8.871287536973953e-05, + "loss": 1.0089, + "step": 2858 + }, + { + "epoch": 0.5500556140087178, + "grad_norm": 0.29141826135560583, + "learning_rate": 8.865095499689977e-05, + "loss": 1.0822, + "step": 2859 + }, + { + "epoch": 0.5502480084172554, + "grad_norm": 0.37836689058252226, + "learning_rate": 8.858903903189831e-05, + "loss": 1.035, + "step": 2860 + }, + { + "epoch": 0.5504404028257929, + "grad_norm": 0.2808594228189031, + "learning_rate": 8.852712749878254e-05, + "loss": 1.0318, + "step": 2861 + }, + { + "epoch": 0.5506327972343303, + "grad_norm": 0.31959706265304494, + "learning_rate": 8.846522042159832e-05, + "loss": 1.0232, + "step": 2862 + }, + { + "epoch": 0.5508251916428679, + "grad_norm": 0.2804531424986975, + "learning_rate": 8.840331782438953e-05, + "loss": 0.9697, + "step": 2863 + }, + { + "epoch": 0.5510175860514054, + "grad_norm": 0.2676175782294614, + "learning_rate": 8.834141973119849e-05, + "loss": 1.0522, + "step": 2864 + }, + { + "epoch": 0.5512099804599428, + "grad_norm": 0.2596985852011845, + "learning_rate": 8.82795261660657e-05, + "loss": 1.0758, + "step": 2865 + }, + { + "epoch": 0.5514023748684804, + "grad_norm": 0.41373696903188156, + "learning_rate": 8.821763715302987e-05, + "loss": 1.0527, + "step": 2866 + }, + { + "epoch": 0.5515947692770179, + "grad_norm": 0.320969070662106, + "learning_rate": 8.815575271612797e-05, + "loss": 1.0917, + "step": 2867 + }, + { + "epoch": 0.5517871636855554, + "grad_norm": 0.3547632523405083, + "learning_rate": 8.809387287939528e-05, + "loss": 1.0736, + "step": 2868 + }, + { + "epoch": 0.5519795580940929, + "grad_norm": 0.44054579481182676, + "learning_rate": 8.803199766686516e-05, + "loss": 1.1252, + "step": 2869 + }, + { + "epoch": 0.5521719525026304, + "grad_norm": 0.41091653661454736, + "learning_rate": 8.797012710256923e-05, + "loss": 1.0356, + "step": 2870 + }, + { + "epoch": 0.5523643469111679, + "grad_norm": 0.2967478708848712, + "learning_rate": 8.790826121053733e-05, + "loss": 1.1003, + "step": 2871 + }, + { + "epoch": 0.5525567413197054, + "grad_norm": 0.29904638106311077, + "learning_rate": 8.784640001479741e-05, + "loss": 1.0463, + "step": 2872 + }, + { + "epoch": 0.5527491357282429, + "grad_norm": 0.3125859749474423, + "learning_rate": 8.77845435393757e-05, + "loss": 1.0916, + "step": 2873 + }, + { + "epoch": 0.5529415301367804, + "grad_norm": 0.37931912983022686, + "learning_rate": 8.772269180829653e-05, + "loss": 1.1105, + "step": 2874 + }, + { + "epoch": 0.553133924545318, + "grad_norm": 0.34877374229130115, + "learning_rate": 8.766084484558237e-05, + "loss": 1.077, + "step": 2875 + }, + { + "epoch": 0.5533263189538554, + "grad_norm": 0.26242106157551015, + "learning_rate": 8.759900267525391e-05, + "loss": 1.1204, + "step": 2876 + }, + { + "epoch": 0.5535187133623929, + "grad_norm": 0.2804202584959949, + "learning_rate": 8.753716532132992e-05, + "loss": 1.0686, + "step": 2877 + }, + { + "epoch": 0.5537111077709304, + "grad_norm": 0.3944097406678049, + "learning_rate": 8.747533280782726e-05, + "loss": 1.0112, + "step": 2878 + }, + { + "epoch": 0.5539035021794679, + "grad_norm": 0.2898151481611907, + "learning_rate": 8.741350515876104e-05, + "loss": 1.0187, + "step": 2879 + }, + { + "epoch": 0.5540958965880054, + "grad_norm": 0.2912261695515056, + "learning_rate": 8.735168239814439e-05, + "loss": 0.9711, + "step": 2880 + }, + { + "epoch": 0.5542882909965429, + "grad_norm": 0.3013843622351798, + "learning_rate": 8.728986454998858e-05, + "loss": 0.9723, + "step": 2881 + }, + { + "epoch": 0.5544806854050804, + "grad_norm": 0.3256206939588518, + "learning_rate": 8.72280516383029e-05, + "loss": 0.9352, + "step": 2882 + }, + { + "epoch": 0.5546730798136179, + "grad_norm": 0.29571597280881323, + "learning_rate": 8.716624368709477e-05, + "loss": 1.0715, + "step": 2883 + }, + { + "epoch": 0.5548654742221554, + "grad_norm": 0.3016978852087846, + "learning_rate": 8.71044407203697e-05, + "loss": 1.0182, + "step": 2884 + }, + { + "epoch": 0.555057868630693, + "grad_norm": 0.2964638439991812, + "learning_rate": 8.704264276213129e-05, + "loss": 1.0537, + "step": 2885 + }, + { + "epoch": 0.5552502630392304, + "grad_norm": 0.2798235691934867, + "learning_rate": 8.698084983638111e-05, + "loss": 0.9875, + "step": 2886 + }, + { + "epoch": 0.5554426574477679, + "grad_norm": 0.3035192055389043, + "learning_rate": 8.691906196711884e-05, + "loss": 1.1402, + "step": 2887 + }, + { + "epoch": 0.5556350518563055, + "grad_norm": 0.330054918020496, + "learning_rate": 8.685727917834217e-05, + "loss": 1.0881, + "step": 2888 + }, + { + "epoch": 0.5558274462648429, + "grad_norm": 0.3268851139586036, + "learning_rate": 8.679550149404685e-05, + "loss": 1.0162, + "step": 2889 + }, + { + "epoch": 0.5560198406733804, + "grad_norm": 0.3106436762216314, + "learning_rate": 8.673372893822654e-05, + "loss": 1.0518, + "step": 2890 + }, + { + "epoch": 0.556212235081918, + "grad_norm": 0.28321711918575027, + "learning_rate": 8.667196153487307e-05, + "loss": 1.0468, + "step": 2891 + }, + { + "epoch": 0.5564046294904554, + "grad_norm": 0.3593566557919029, + "learning_rate": 8.661019930797614e-05, + "loss": 1.0154, + "step": 2892 + }, + { + "epoch": 0.5565970238989929, + "grad_norm": 0.25402597082912576, + "learning_rate": 8.654844228152355e-05, + "loss": 1.0784, + "step": 2893 + }, + { + "epoch": 0.5567894183075305, + "grad_norm": 0.39642091437982707, + "learning_rate": 8.648669047950097e-05, + "loss": 1.0931, + "step": 2894 + }, + { + "epoch": 0.556981812716068, + "grad_norm": 0.3282317294342435, + "learning_rate": 8.642494392589206e-05, + "loss": 1.0096, + "step": 2895 + }, + { + "epoch": 0.5571742071246054, + "grad_norm": 0.31148082446589526, + "learning_rate": 8.63632026446785e-05, + "loss": 1.0613, + "step": 2896 + }, + { + "epoch": 0.557366601533143, + "grad_norm": 0.35293748890427656, + "learning_rate": 8.630146665983992e-05, + "loss": 1.1124, + "step": 2897 + }, + { + "epoch": 0.5575589959416805, + "grad_norm": 0.2922389511492973, + "learning_rate": 8.623973599535385e-05, + "loss": 1.0646, + "step": 2898 + }, + { + "epoch": 0.5577513903502179, + "grad_norm": 0.25579634072324514, + "learning_rate": 8.617801067519574e-05, + "loss": 1.1038, + "step": 2899 + }, + { + "epoch": 0.5579437847587555, + "grad_norm": 0.34931617057055997, + "learning_rate": 8.611629072333904e-05, + "loss": 1.0979, + "step": 2900 + }, + { + "epoch": 0.558136179167293, + "grad_norm": 0.26884340317373434, + "learning_rate": 8.605457616375503e-05, + "loss": 1.0341, + "step": 2901 + }, + { + "epoch": 0.5583285735758304, + "grad_norm": 0.3298652036955405, + "learning_rate": 8.599286702041292e-05, + "loss": 0.9975, + "step": 2902 + }, + { + "epoch": 0.5585209679843679, + "grad_norm": 0.31408532841608927, + "learning_rate": 8.593116331727987e-05, + "loss": 1.0318, + "step": 2903 + }, + { + "epoch": 0.5587133623929055, + "grad_norm": 0.2922561135077166, + "learning_rate": 8.586946507832088e-05, + "loss": 1.0685, + "step": 2904 + }, + { + "epoch": 0.5589057568014429, + "grad_norm": 0.2765400696849115, + "learning_rate": 8.580777232749883e-05, + "loss": 1.0929, + "step": 2905 + }, + { + "epoch": 0.5590981512099804, + "grad_norm": 0.3356421625174065, + "learning_rate": 8.574608508877448e-05, + "loss": 1.0141, + "step": 2906 + }, + { + "epoch": 0.559290545618518, + "grad_norm": 0.49676670681708085, + "learning_rate": 8.568440338610638e-05, + "loss": 1.1265, + "step": 2907 + }, + { + "epoch": 0.5594829400270555, + "grad_norm": 0.31312977063260056, + "learning_rate": 8.562272724345108e-05, + "loss": 1.0335, + "step": 2908 + }, + { + "epoch": 0.5596753344355929, + "grad_norm": 0.38195926565004296, + "learning_rate": 8.556105668476286e-05, + "loss": 0.9858, + "step": 2909 + }, + { + "epoch": 0.5598677288441305, + "grad_norm": 0.3150881476193059, + "learning_rate": 8.549939173399385e-05, + "loss": 1.0554, + "step": 2910 + }, + { + "epoch": 0.560060123252668, + "grad_norm": 0.40396081695784297, + "learning_rate": 8.5437732415094e-05, + "loss": 1.0308, + "step": 2911 + }, + { + "epoch": 0.5602525176612054, + "grad_norm": 0.3041510627946518, + "learning_rate": 8.537607875201106e-05, + "loss": 1.1022, + "step": 2912 + }, + { + "epoch": 0.560444912069743, + "grad_norm": 0.29310168589718943, + "learning_rate": 8.531443076869059e-05, + "loss": 1.0168, + "step": 2913 + }, + { + "epoch": 0.5606373064782805, + "grad_norm": 0.29803076151224067, + "learning_rate": 8.525278848907604e-05, + "loss": 1.0347, + "step": 2914 + }, + { + "epoch": 0.5608297008868179, + "grad_norm": 0.3522448099447787, + "learning_rate": 8.519115193710849e-05, + "loss": 1.0471, + "step": 2915 + }, + { + "epoch": 0.5610220952953555, + "grad_norm": 0.35125451333791413, + "learning_rate": 8.512952113672689e-05, + "loss": 1.1041, + "step": 2916 + }, + { + "epoch": 0.561214489703893, + "grad_norm": 0.33007617837125985, + "learning_rate": 8.506789611186794e-05, + "loss": 1.0859, + "step": 2917 + }, + { + "epoch": 0.5614068841124304, + "grad_norm": 0.3282077574145357, + "learning_rate": 8.500627688646607e-05, + "loss": 1.0369, + "step": 2918 + }, + { + "epoch": 0.561599278520968, + "grad_norm": 0.3574013581671308, + "learning_rate": 8.494466348445345e-05, + "loss": 1.0112, + "step": 2919 + }, + { + "epoch": 0.5617916729295055, + "grad_norm": 0.32669518195491326, + "learning_rate": 8.48830559297601e-05, + "loss": 1.0256, + "step": 2920 + }, + { + "epoch": 0.561984067338043, + "grad_norm": 0.34915107762167114, + "learning_rate": 8.48214542463136e-05, + "loss": 1.0796, + "step": 2921 + }, + { + "epoch": 0.5621764617465805, + "grad_norm": 0.34200888221421405, + "learning_rate": 8.475985845803939e-05, + "loss": 1.1363, + "step": 2922 + }, + { + "epoch": 0.562368856155118, + "grad_norm": 0.3233520630722986, + "learning_rate": 8.469826858886054e-05, + "loss": 1.0679, + "step": 2923 + }, + { + "epoch": 0.5625612505636555, + "grad_norm": 0.3220418829222748, + "learning_rate": 8.463668466269784e-05, + "loss": 1.1137, + "step": 2924 + }, + { + "epoch": 0.562753644972193, + "grad_norm": 0.3493961359395474, + "learning_rate": 8.457510670346976e-05, + "loss": 1.0761, + "step": 2925 + }, + { + "epoch": 0.5629460393807305, + "grad_norm": 0.2522171366067136, + "learning_rate": 8.451353473509253e-05, + "loss": 1.114, + "step": 2926 + }, + { + "epoch": 0.563138433789268, + "grad_norm": 0.42238325404920435, + "learning_rate": 8.445196878147996e-05, + "loss": 1.0114, + "step": 2927 + }, + { + "epoch": 0.5633308281978056, + "grad_norm": 0.30275201960687115, + "learning_rate": 8.439040886654355e-05, + "loss": 1.1037, + "step": 2928 + }, + { + "epoch": 0.563523222606343, + "grad_norm": 0.33375987212409763, + "learning_rate": 8.432885501419247e-05, + "loss": 1.0279, + "step": 2929 + }, + { + "epoch": 0.5637156170148805, + "grad_norm": 0.3221588554133989, + "learning_rate": 8.426730724833354e-05, + "loss": 1.1009, + "step": 2930 + }, + { + "epoch": 0.563908011423418, + "grad_norm": 0.37092319935498647, + "learning_rate": 8.420576559287112e-05, + "loss": 1.1381, + "step": 2931 + }, + { + "epoch": 0.5641004058319555, + "grad_norm": 0.2897348933751357, + "learning_rate": 8.414423007170741e-05, + "loss": 1.0954, + "step": 2932 + }, + { + "epoch": 0.564292800240493, + "grad_norm": 0.3659774461234699, + "learning_rate": 8.4082700708742e-05, + "loss": 1.0984, + "step": 2933 + }, + { + "epoch": 0.5644851946490305, + "grad_norm": 0.46693933336326643, + "learning_rate": 8.402117752787226e-05, + "loss": 1.0542, + "step": 2934 + }, + { + "epoch": 0.564677589057568, + "grad_norm": 0.32568646658369843, + "learning_rate": 8.395966055299303e-05, + "loss": 1.0483, + "step": 2935 + }, + { + "epoch": 0.5648699834661055, + "grad_norm": 0.3442078687631252, + "learning_rate": 8.389814980799678e-05, + "loss": 1.0723, + "step": 2936 + }, + { + "epoch": 0.565062377874643, + "grad_norm": 0.3472095691264837, + "learning_rate": 8.38366453167736e-05, + "loss": 0.9716, + "step": 2937 + }, + { + "epoch": 0.5652547722831806, + "grad_norm": 0.27964568766938036, + "learning_rate": 8.377514710321117e-05, + "loss": 1.1544, + "step": 2938 + }, + { + "epoch": 0.565447166691718, + "grad_norm": 0.3408981584310334, + "learning_rate": 8.371365519119462e-05, + "loss": 1.131, + "step": 2939 + }, + { + "epoch": 0.5656395611002555, + "grad_norm": 0.2911958386346275, + "learning_rate": 8.365216960460676e-05, + "loss": 1.0964, + "step": 2940 + }, + { + "epoch": 0.5658319555087931, + "grad_norm": 0.3073056917380063, + "learning_rate": 8.35906903673278e-05, + "loss": 1.0181, + "step": 2941 + }, + { + "epoch": 0.5660243499173305, + "grad_norm": 0.2922271104890582, + "learning_rate": 8.352921750323562e-05, + "loss": 1.0217, + "step": 2942 + }, + { + "epoch": 0.566216744325868, + "grad_norm": 0.3290922456340551, + "learning_rate": 8.346775103620559e-05, + "loss": 1.0823, + "step": 2943 + }, + { + "epoch": 0.5664091387344056, + "grad_norm": 0.2818427572569486, + "learning_rate": 8.340629099011057e-05, + "loss": 1.0648, + "step": 2944 + }, + { + "epoch": 0.566601533142943, + "grad_norm": 0.36915445556975685, + "learning_rate": 8.334483738882089e-05, + "loss": 1.0104, + "step": 2945 + }, + { + "epoch": 0.5667939275514805, + "grad_norm": 0.27973847369457555, + "learning_rate": 8.328339025620449e-05, + "loss": 1.1257, + "step": 2946 + }, + { + "epoch": 0.5669863219600181, + "grad_norm": 0.39155358428980874, + "learning_rate": 8.322194961612667e-05, + "loss": 1.1273, + "step": 2947 + }, + { + "epoch": 0.5671787163685555, + "grad_norm": 0.325140818309315, + "learning_rate": 8.316051549245025e-05, + "loss": 1.0835, + "step": 2948 + }, + { + "epoch": 0.567371110777093, + "grad_norm": 0.3459094343374345, + "learning_rate": 8.309908790903561e-05, + "loss": 1.0567, + "step": 2949 + }, + { + "epoch": 0.5675635051856306, + "grad_norm": 0.2705266112923663, + "learning_rate": 8.303766688974046e-05, + "loss": 1.0434, + "step": 2950 + }, + { + "epoch": 0.5677558995941681, + "grad_norm": 0.2727472811050495, + "learning_rate": 8.297625245842005e-05, + "loss": 1.2022, + "step": 2951 + }, + { + "epoch": 0.5679482940027055, + "grad_norm": 0.319155395199419, + "learning_rate": 8.291484463892702e-05, + "loss": 0.9956, + "step": 2952 + }, + { + "epoch": 0.5681406884112431, + "grad_norm": 0.28603855577669535, + "learning_rate": 8.285344345511146e-05, + "loss": 1.1022, + "step": 2953 + }, + { + "epoch": 0.5683330828197806, + "grad_norm": 0.37313080220443556, + "learning_rate": 8.279204893082084e-05, + "loss": 1.1066, + "step": 2954 + }, + { + "epoch": 0.568525477228318, + "grad_norm": 0.30771450982283066, + "learning_rate": 8.273066108990018e-05, + "loss": 1.0124, + "step": 2955 + }, + { + "epoch": 0.5687178716368555, + "grad_norm": 0.2856987042573155, + "learning_rate": 8.266927995619174e-05, + "loss": 1.0554, + "step": 2956 + }, + { + "epoch": 0.5689102660453931, + "grad_norm": 0.3043271138540745, + "learning_rate": 8.260790555353526e-05, + "loss": 0.9545, + "step": 2957 + }, + { + "epoch": 0.5691026604539305, + "grad_norm": 0.3432701605487397, + "learning_rate": 8.254653790576787e-05, + "loss": 1.1622, + "step": 2958 + }, + { + "epoch": 0.569295054862468, + "grad_norm": 0.28219474642821746, + "learning_rate": 8.248517703672403e-05, + "loss": 1.0287, + "step": 2959 + }, + { + "epoch": 0.5694874492710056, + "grad_norm": 0.2936952098314389, + "learning_rate": 8.242382297023559e-05, + "loss": 1.209, + "step": 2960 + }, + { + "epoch": 0.569679843679543, + "grad_norm": 0.3374468229543128, + "learning_rate": 8.23624757301318e-05, + "loss": 0.9702, + "step": 2961 + }, + { + "epoch": 0.5698722380880805, + "grad_norm": 0.27543060302655936, + "learning_rate": 8.230113534023918e-05, + "loss": 0.9777, + "step": 2962 + }, + { + "epoch": 0.5700646324966181, + "grad_norm": 0.33936999941642565, + "learning_rate": 8.223980182438167e-05, + "loss": 1.0399, + "step": 2963 + }, + { + "epoch": 0.5702570269051556, + "grad_norm": 0.29275017099012624, + "learning_rate": 8.217847520638048e-05, + "loss": 1.0333, + "step": 2964 + }, + { + "epoch": 0.570449421313693, + "grad_norm": 0.31328004913119656, + "learning_rate": 8.211715551005415e-05, + "loss": 0.9984, + "step": 2965 + }, + { + "epoch": 0.5706418157222306, + "grad_norm": 0.256653750000591, + "learning_rate": 8.205584275921854e-05, + "loss": 1.0715, + "step": 2966 + }, + { + "epoch": 0.5708342101307681, + "grad_norm": 0.329455369514639, + "learning_rate": 8.199453697768686e-05, + "loss": 1.0999, + "step": 2967 + }, + { + "epoch": 0.5710266045393055, + "grad_norm": 0.28984053603135707, + "learning_rate": 8.193323818926954e-05, + "loss": 1.0483, + "step": 2968 + }, + { + "epoch": 0.5712189989478431, + "grad_norm": 0.3095215667263857, + "learning_rate": 8.187194641777431e-05, + "loss": 1.0846, + "step": 2969 + }, + { + "epoch": 0.5714113933563806, + "grad_norm": 0.30774485147540814, + "learning_rate": 8.181066168700622e-05, + "loss": 1.0476, + "step": 2970 + }, + { + "epoch": 0.571603787764918, + "grad_norm": 0.3387347512175656, + "learning_rate": 8.174938402076754e-05, + "loss": 1.0258, + "step": 2971 + }, + { + "epoch": 0.5717961821734556, + "grad_norm": 0.35602066290546375, + "learning_rate": 8.168811344285776e-05, + "loss": 1.0413, + "step": 2972 + }, + { + "epoch": 0.5719885765819931, + "grad_norm": 0.3015380111079108, + "learning_rate": 8.162684997707374e-05, + "loss": 1.0803, + "step": 2973 + }, + { + "epoch": 0.5721809709905306, + "grad_norm": 0.2963561122363027, + "learning_rate": 8.156559364720947e-05, + "loss": 1.1005, + "step": 2974 + }, + { + "epoch": 0.5723733653990681, + "grad_norm": 0.329341006860998, + "learning_rate": 8.150434447705623e-05, + "loss": 1.0479, + "step": 2975 + }, + { + "epoch": 0.5725657598076056, + "grad_norm": 0.30062702471568525, + "learning_rate": 8.144310249040246e-05, + "loss": 1.0658, + "step": 2976 + }, + { + "epoch": 0.5727581542161431, + "grad_norm": 0.2585440426067988, + "learning_rate": 8.138186771103382e-05, + "loss": 1.0614, + "step": 2977 + }, + { + "epoch": 0.5729505486246806, + "grad_norm": 0.3532383685540636, + "learning_rate": 8.132064016273324e-05, + "loss": 1.0534, + "step": 2978 + }, + { + "epoch": 0.5731429430332181, + "grad_norm": 0.35197392508075986, + "learning_rate": 8.12594198692808e-05, + "loss": 1.1429, + "step": 2979 + }, + { + "epoch": 0.5733353374417556, + "grad_norm": 0.3552647550885328, + "learning_rate": 8.119820685445372e-05, + "loss": 1.0337, + "step": 2980 + }, + { + "epoch": 0.5735277318502932, + "grad_norm": 0.2798084376744557, + "learning_rate": 8.113700114202648e-05, + "loss": 1.0314, + "step": 2981 + }, + { + "epoch": 0.5737201262588306, + "grad_norm": 0.3122306524448876, + "learning_rate": 8.107580275577058e-05, + "loss": 1.0146, + "step": 2982 + }, + { + "epoch": 0.5739125206673681, + "grad_norm": 0.31672008977998933, + "learning_rate": 8.101461171945483e-05, + "loss": 1.1221, + "step": 2983 + }, + { + "epoch": 0.5741049150759056, + "grad_norm": 0.3142657351882375, + "learning_rate": 8.095342805684515e-05, + "loss": 1.1336, + "step": 2984 + }, + { + "epoch": 0.5742973094844431, + "grad_norm": 0.289786399207669, + "learning_rate": 8.089225179170455e-05, + "loss": 1.1975, + "step": 2985 + }, + { + "epoch": 0.5744897038929806, + "grad_norm": 0.29182138906010224, + "learning_rate": 8.083108294779314e-05, + "loss": 1.1137, + "step": 2986 + }, + { + "epoch": 0.5746820983015181, + "grad_norm": 0.3214063840165335, + "learning_rate": 8.076992154886826e-05, + "loss": 1.1256, + "step": 2987 + }, + { + "epoch": 0.5748744927100556, + "grad_norm": 0.31043283097334473, + "learning_rate": 8.070876761868426e-05, + "loss": 1.0392, + "step": 2988 + }, + { + "epoch": 0.5750668871185931, + "grad_norm": 0.3678254248700128, + "learning_rate": 8.064762118099258e-05, + "loss": 0.9888, + "step": 2989 + }, + { + "epoch": 0.5752592815271306, + "grad_norm": 0.32502399308706836, + "learning_rate": 8.058648225954188e-05, + "loss": 1.0283, + "step": 2990 + }, + { + "epoch": 0.5754516759356681, + "grad_norm": 0.46103901803142605, + "learning_rate": 8.052535087807774e-05, + "loss": 1.0449, + "step": 2991 + }, + { + "epoch": 0.5756440703442056, + "grad_norm": 0.2848126321191939, + "learning_rate": 8.046422706034293e-05, + "loss": 1.1115, + "step": 2992 + }, + { + "epoch": 0.5758364647527431, + "grad_norm": 0.3880332796301805, + "learning_rate": 8.040311083007725e-05, + "loss": 1.0332, + "step": 2993 + }, + { + "epoch": 0.5760288591612807, + "grad_norm": 0.27972481698711205, + "learning_rate": 8.034200221101746e-05, + "loss": 1.0622, + "step": 2994 + }, + { + "epoch": 0.5762212535698181, + "grad_norm": 0.29036261568266625, + "learning_rate": 8.028090122689748e-05, + "loss": 1.067, + "step": 2995 + }, + { + "epoch": 0.5764136479783556, + "grad_norm": 0.31195394162882, + "learning_rate": 8.021980790144827e-05, + "loss": 1.007, + "step": 2996 + }, + { + "epoch": 0.5766060423868932, + "grad_norm": 0.29190211397801924, + "learning_rate": 8.015872225839776e-05, + "loss": 1.0594, + "step": 2997 + }, + { + "epoch": 0.5767984367954306, + "grad_norm": 0.3460122315939625, + "learning_rate": 8.009764432147087e-05, + "loss": 1.0253, + "step": 2998 + }, + { + "epoch": 0.5769908312039681, + "grad_norm": 0.3134818257765632, + "learning_rate": 8.00365741143896e-05, + "loss": 1.083, + "step": 2999 + }, + { + "epoch": 0.5771832256125057, + "grad_norm": 0.19852239844831535, + "learning_rate": 7.997551166087292e-05, + "loss": 1.0636, + "step": 3000 + }, + { + "epoch": 0.5773756200210431, + "grad_norm": 0.32157172158787256, + "learning_rate": 7.991445698463672e-05, + "loss": 1.0905, + "step": 3001 + }, + { + "epoch": 0.5775680144295806, + "grad_norm": 0.2595311745107518, + "learning_rate": 7.985341010939402e-05, + "loss": 1.0962, + "step": 3002 + }, + { + "epoch": 0.5777604088381182, + "grad_norm": 0.2592225998516401, + "learning_rate": 7.979237105885467e-05, + "loss": 1.0211, + "step": 3003 + }, + { + "epoch": 0.5779528032466557, + "grad_norm": 0.2841325486468255, + "learning_rate": 7.973133985672558e-05, + "loss": 1.0899, + "step": 3004 + }, + { + "epoch": 0.5781451976551931, + "grad_norm": 0.3920423417893378, + "learning_rate": 7.967031652671051e-05, + "loss": 0.988, + "step": 3005 + }, + { + "epoch": 0.5783375920637307, + "grad_norm": 0.3315538864014733, + "learning_rate": 7.960930109251023e-05, + "loss": 1.1081, + "step": 3006 + }, + { + "epoch": 0.5785299864722682, + "grad_norm": 0.38049696106104786, + "learning_rate": 7.954829357782243e-05, + "loss": 1.0139, + "step": 3007 + }, + { + "epoch": 0.5787223808808056, + "grad_norm": 0.3031004736351881, + "learning_rate": 7.948729400634178e-05, + "loss": 0.9951, + "step": 3008 + }, + { + "epoch": 0.5789147752893432, + "grad_norm": 0.3076935334332335, + "learning_rate": 7.942630240175977e-05, + "loss": 1.0051, + "step": 3009 + }, + { + "epoch": 0.5791071696978807, + "grad_norm": 0.254168632252162, + "learning_rate": 7.936531878776483e-05, + "loss": 1.0902, + "step": 3010 + }, + { + "epoch": 0.5792995641064181, + "grad_norm": 0.35922989990736226, + "learning_rate": 7.930434318804229e-05, + "loss": 1.121, + "step": 3011 + }, + { + "epoch": 0.5794919585149556, + "grad_norm": 0.38525056963375937, + "learning_rate": 7.924337562627435e-05, + "loss": 0.9958, + "step": 3012 + }, + { + "epoch": 0.5796843529234932, + "grad_norm": 0.3169114236494042, + "learning_rate": 7.918241612614017e-05, + "loss": 1.0714, + "step": 3013 + }, + { + "epoch": 0.5798767473320307, + "grad_norm": 0.35633178952283684, + "learning_rate": 7.91214647113157e-05, + "loss": 1.1072, + "step": 3014 + }, + { + "epoch": 0.5800691417405681, + "grad_norm": 0.33817475160978044, + "learning_rate": 7.906052140547373e-05, + "loss": 1.0128, + "step": 3015 + }, + { + "epoch": 0.5802615361491057, + "grad_norm": 0.4629633589105797, + "learning_rate": 7.899958623228397e-05, + "loss": 0.9721, + "step": 3016 + }, + { + "epoch": 0.5804539305576432, + "grad_norm": 0.401536867567892, + "learning_rate": 7.893865921541295e-05, + "loss": 1.0179, + "step": 3017 + }, + { + "epoch": 0.5806463249661806, + "grad_norm": 0.34724558848245557, + "learning_rate": 7.887774037852395e-05, + "loss": 1.0602, + "step": 3018 + }, + { + "epoch": 0.5808387193747182, + "grad_norm": 0.3169874278396651, + "learning_rate": 7.881682974527723e-05, + "loss": 1.0927, + "step": 3019 + }, + { + "epoch": 0.5810311137832557, + "grad_norm": 0.2858684245012883, + "learning_rate": 7.875592733932973e-05, + "loss": 1.0323, + "step": 3020 + }, + { + "epoch": 0.5812235081917931, + "grad_norm": 0.2731989063280718, + "learning_rate": 7.869503318433528e-05, + "loss": 1.0445, + "step": 3021 + }, + { + "epoch": 0.5814159026003307, + "grad_norm": 0.3103015954746588, + "learning_rate": 7.863414730394443e-05, + "loss": 1.09, + "step": 3022 + }, + { + "epoch": 0.5816082970088682, + "grad_norm": 0.30203232913115585, + "learning_rate": 7.857326972180455e-05, + "loss": 1.1297, + "step": 3023 + }, + { + "epoch": 0.5818006914174056, + "grad_norm": 0.26525352154377424, + "learning_rate": 7.85124004615598e-05, + "loss": 1.1041, + "step": 3024 + }, + { + "epoch": 0.5819930858259432, + "grad_norm": 0.31640303526051006, + "learning_rate": 7.845153954685115e-05, + "loss": 1.0337, + "step": 3025 + }, + { + "epoch": 0.5821854802344807, + "grad_norm": 0.27913055950021226, + "learning_rate": 7.839068700131623e-05, + "loss": 1.1239, + "step": 3026 + }, + { + "epoch": 0.5823778746430182, + "grad_norm": 0.28708002362777646, + "learning_rate": 7.832984284858947e-05, + "loss": 1.1184, + "step": 3027 + }, + { + "epoch": 0.5825702690515557, + "grad_norm": 0.3422422390565635, + "learning_rate": 7.826900711230205e-05, + "loss": 1.0974, + "step": 3028 + }, + { + "epoch": 0.5827626634600932, + "grad_norm": 0.2948247056925184, + "learning_rate": 7.820817981608185e-05, + "loss": 1.128, + "step": 3029 + }, + { + "epoch": 0.5829550578686307, + "grad_norm": 0.2803711512964012, + "learning_rate": 7.814736098355347e-05, + "loss": 1.0595, + "step": 3030 + }, + { + "epoch": 0.5831474522771682, + "grad_norm": 0.31119626206646295, + "learning_rate": 7.808655063833832e-05, + "loss": 1.0101, + "step": 3031 + }, + { + "epoch": 0.5833398466857057, + "grad_norm": 0.3009823259329833, + "learning_rate": 7.802574880405438e-05, + "loss": 1.0398, + "step": 3032 + }, + { + "epoch": 0.5835322410942432, + "grad_norm": 0.32715195524593876, + "learning_rate": 7.79649555043164e-05, + "loss": 1.1906, + "step": 3033 + }, + { + "epoch": 0.5837246355027808, + "grad_norm": 0.33079442156366184, + "learning_rate": 7.790417076273581e-05, + "loss": 1.0983, + "step": 3034 + }, + { + "epoch": 0.5839170299113182, + "grad_norm": 0.26756437043326453, + "learning_rate": 7.784339460292064e-05, + "loss": 1.1034, + "step": 3035 + }, + { + "epoch": 0.5841094243198557, + "grad_norm": 0.3008002530743136, + "learning_rate": 7.778262704847569e-05, + "loss": 1.0354, + "step": 3036 + }, + { + "epoch": 0.5843018187283932, + "grad_norm": 0.30001681408558467, + "learning_rate": 7.772186812300244e-05, + "loss": 1.0414, + "step": 3037 + }, + { + "epoch": 0.5844942131369307, + "grad_norm": 0.27750164021739476, + "learning_rate": 7.766111785009889e-05, + "loss": 0.9629, + "step": 3038 + }, + { + "epoch": 0.5846866075454682, + "grad_norm": 0.3760468994500705, + "learning_rate": 7.760037625335973e-05, + "loss": 1.0661, + "step": 3039 + }, + { + "epoch": 0.5848790019540057, + "grad_norm": 0.3308287060489697, + "learning_rate": 7.753964335637634e-05, + "loss": 1.0127, + "step": 3040 + }, + { + "epoch": 0.5850713963625432, + "grad_norm": 0.2836083274299015, + "learning_rate": 7.747891918273666e-05, + "loss": 0.9892, + "step": 3041 + }, + { + "epoch": 0.5852637907710807, + "grad_norm": 0.2986784878300844, + "learning_rate": 7.741820375602523e-05, + "loss": 1.0234, + "step": 3042 + }, + { + "epoch": 0.5854561851796182, + "grad_norm": 0.3013287247190927, + "learning_rate": 7.735749709982328e-05, + "loss": 1.075, + "step": 3043 + }, + { + "epoch": 0.5856485795881557, + "grad_norm": 0.3070801058232072, + "learning_rate": 7.729679923770854e-05, + "loss": 1.0188, + "step": 3044 + }, + { + "epoch": 0.5858409739966932, + "grad_norm": 0.3052756652102331, + "learning_rate": 7.723611019325538e-05, + "loss": 0.9934, + "step": 3045 + }, + { + "epoch": 0.5860333684052307, + "grad_norm": 0.27134755611052147, + "learning_rate": 7.717542999003471e-05, + "loss": 1.0411, + "step": 3046 + }, + { + "epoch": 0.5862257628137683, + "grad_norm": 0.3270743296681542, + "learning_rate": 7.711475865161398e-05, + "loss": 1.0282, + "step": 3047 + }, + { + "epoch": 0.5864181572223057, + "grad_norm": 0.3186476049801471, + "learning_rate": 7.705409620155734e-05, + "loss": 1.0951, + "step": 3048 + }, + { + "epoch": 0.5866105516308432, + "grad_norm": 0.31250314585223327, + "learning_rate": 7.699344266342528e-05, + "loss": 1.0227, + "step": 3049 + }, + { + "epoch": 0.5868029460393808, + "grad_norm": 0.25580630747247385, + "learning_rate": 7.693279806077503e-05, + "loss": 1.0338, + "step": 3050 + }, + { + "epoch": 0.5869953404479182, + "grad_norm": 0.31017788484111014, + "learning_rate": 7.687216241716021e-05, + "loss": 1.0808, + "step": 3051 + }, + { + "epoch": 0.5871877348564557, + "grad_norm": 0.29349995299992276, + "learning_rate": 7.681153575613098e-05, + "loss": 1.0587, + "step": 3052 + }, + { + "epoch": 0.5873801292649933, + "grad_norm": 0.3139967126275711, + "learning_rate": 7.675091810123404e-05, + "loss": 0.9752, + "step": 3053 + }, + { + "epoch": 0.5875725236735307, + "grad_norm": 0.2680331480372597, + "learning_rate": 7.669030947601265e-05, + "loss": 1.0127, + "step": 3054 + }, + { + "epoch": 0.5877649180820682, + "grad_norm": 0.26250042079949676, + "learning_rate": 7.662970990400646e-05, + "loss": 1.0904, + "step": 3055 + }, + { + "epoch": 0.5879573124906058, + "grad_norm": 0.36141049467350617, + "learning_rate": 7.656911940875162e-05, + "loss": 1.0701, + "step": 3056 + }, + { + "epoch": 0.5881497068991433, + "grad_norm": 0.3029203904354708, + "learning_rate": 7.650853801378084e-05, + "loss": 1.0959, + "step": 3057 + }, + { + "epoch": 0.5883421013076807, + "grad_norm": 0.28161542771351783, + "learning_rate": 7.644796574262321e-05, + "loss": 1.0199, + "step": 3058 + }, + { + "epoch": 0.5885344957162183, + "grad_norm": 0.27539699107600873, + "learning_rate": 7.638740261880423e-05, + "loss": 1.0165, + "step": 3059 + }, + { + "epoch": 0.5887268901247558, + "grad_norm": 0.3074529165873919, + "learning_rate": 7.632684866584605e-05, + "loss": 1.0613, + "step": 3060 + }, + { + "epoch": 0.5889192845332932, + "grad_norm": 0.2468202849305989, + "learning_rate": 7.626630390726703e-05, + "loss": 1.0617, + "step": 3061 + }, + { + "epoch": 0.5891116789418308, + "grad_norm": 0.3238953587284659, + "learning_rate": 7.620576836658212e-05, + "loss": 1.0019, + "step": 3062 + }, + { + "epoch": 0.5893040733503683, + "grad_norm": 0.32862481835404034, + "learning_rate": 7.614524206730259e-05, + "loss": 1.1729, + "step": 3063 + }, + { + "epoch": 0.5894964677589057, + "grad_norm": 0.3223158404509194, + "learning_rate": 7.608472503293614e-05, + "loss": 1.0441, + "step": 3064 + }, + { + "epoch": 0.5896888621674432, + "grad_norm": 0.27374881913205357, + "learning_rate": 7.60242172869869e-05, + "loss": 1.0838, + "step": 3065 + }, + { + "epoch": 0.5898812565759808, + "grad_norm": 0.3405303338348711, + "learning_rate": 7.596371885295541e-05, + "loss": 1.0497, + "step": 3066 + }, + { + "epoch": 0.5900736509845182, + "grad_norm": 0.2720651033528268, + "learning_rate": 7.590322975433857e-05, + "loss": 1.0814, + "step": 3067 + }, + { + "epoch": 0.5902660453930557, + "grad_norm": 0.2740581800085928, + "learning_rate": 7.584275001462961e-05, + "loss": 1.0741, + "step": 3068 + }, + { + "epoch": 0.5904584398015933, + "grad_norm": 0.2919123866647432, + "learning_rate": 7.578227965731819e-05, + "loss": 1.0061, + "step": 3069 + }, + { + "epoch": 0.5906508342101308, + "grad_norm": 0.26393511203481274, + "learning_rate": 7.572181870589028e-05, + "loss": 1.0526, + "step": 3070 + }, + { + "epoch": 0.5908432286186682, + "grad_norm": 0.32518174277827583, + "learning_rate": 7.56613671838282e-05, + "loss": 1.1785, + "step": 3071 + }, + { + "epoch": 0.5910356230272058, + "grad_norm": 0.2630773409765149, + "learning_rate": 7.560092511461068e-05, + "loss": 1.0677, + "step": 3072 + }, + { + "epoch": 0.5912280174357433, + "grad_norm": 0.27534005804629985, + "learning_rate": 7.554049252171269e-05, + "loss": 1.1679, + "step": 3073 + }, + { + "epoch": 0.5914204118442807, + "grad_norm": 0.3148975311461333, + "learning_rate": 7.548006942860557e-05, + "loss": 1.0578, + "step": 3074 + }, + { + "epoch": 0.5916128062528183, + "grad_norm": 0.37808263234443806, + "learning_rate": 7.541965585875694e-05, + "loss": 0.9947, + "step": 3075 + }, + { + "epoch": 0.5918052006613558, + "grad_norm": 0.2886138057187265, + "learning_rate": 7.535925183563073e-05, + "loss": 1.0711, + "step": 3076 + }, + { + "epoch": 0.5919975950698932, + "grad_norm": 0.35547266160823177, + "learning_rate": 7.529885738268714e-05, + "loss": 1.0901, + "step": 3077 + }, + { + "epoch": 0.5921899894784308, + "grad_norm": 0.37904667679195964, + "learning_rate": 7.523847252338274e-05, + "loss": 1.1462, + "step": 3078 + }, + { + "epoch": 0.5923823838869683, + "grad_norm": 0.37811002078595307, + "learning_rate": 7.51780972811703e-05, + "loss": 1.0059, + "step": 3079 + }, + { + "epoch": 0.5925747782955058, + "grad_norm": 0.346049962005794, + "learning_rate": 7.511773167949884e-05, + "loss": 1.0912, + "step": 3080 + }, + { + "epoch": 0.5927671727040433, + "grad_norm": 0.3892857884151602, + "learning_rate": 7.505737574181369e-05, + "loss": 1.0876, + "step": 3081 + }, + { + "epoch": 0.5929595671125808, + "grad_norm": 0.3093045855396968, + "learning_rate": 7.499702949155633e-05, + "loss": 1.0942, + "step": 3082 + }, + { + "epoch": 0.5931519615211183, + "grad_norm": 0.33446027352832136, + "learning_rate": 7.493669295216467e-05, + "loss": 1.0442, + "step": 3083 + }, + { + "epoch": 0.5933443559296558, + "grad_norm": 0.3364260486611449, + "learning_rate": 7.487636614707266e-05, + "loss": 1.1104, + "step": 3084 + }, + { + "epoch": 0.5935367503381933, + "grad_norm": 0.3575690478645272, + "learning_rate": 7.48160490997105e-05, + "loss": 1.0642, + "step": 3085 + }, + { + "epoch": 0.5937291447467308, + "grad_norm": 0.28266921046206767, + "learning_rate": 7.475574183350471e-05, + "loss": 1.0196, + "step": 3086 + }, + { + "epoch": 0.5939215391552684, + "grad_norm": 0.35033837672162804, + "learning_rate": 7.469544437187789e-05, + "loss": 1.0062, + "step": 3087 + }, + { + "epoch": 0.5941139335638058, + "grad_norm": 0.298906654512516, + "learning_rate": 7.463515673824888e-05, + "loss": 1.0135, + "step": 3088 + }, + { + "epoch": 0.5943063279723433, + "grad_norm": 0.31273259577774803, + "learning_rate": 7.457487895603272e-05, + "loss": 1.0093, + "step": 3089 + }, + { + "epoch": 0.5944987223808808, + "grad_norm": 0.3084641365259164, + "learning_rate": 7.45146110486406e-05, + "loss": 1.0624, + "step": 3090 + }, + { + "epoch": 0.5946911167894183, + "grad_norm": 0.27133471846089224, + "learning_rate": 7.44543530394799e-05, + "loss": 1.0353, + "step": 3091 + }, + { + "epoch": 0.5948835111979558, + "grad_norm": 0.3536885418529265, + "learning_rate": 7.439410495195412e-05, + "loss": 1.1073, + "step": 3092 + }, + { + "epoch": 0.5950759056064933, + "grad_norm": 0.2689452442593363, + "learning_rate": 7.433386680946288e-05, + "loss": 1.077, + "step": 3093 + }, + { + "epoch": 0.5952683000150308, + "grad_norm": 0.32634513396455317, + "learning_rate": 7.427363863540202e-05, + "loss": 1.1045, + "step": 3094 + }, + { + "epoch": 0.5954606944235683, + "grad_norm": 0.2526804935657253, + "learning_rate": 7.421342045316351e-05, + "loss": 1.0189, + "step": 3095 + }, + { + "epoch": 0.5956530888321058, + "grad_norm": 0.32919939522606007, + "learning_rate": 7.415321228613535e-05, + "loss": 1.0347, + "step": 3096 + }, + { + "epoch": 0.5958454832406433, + "grad_norm": 0.3915455066319134, + "learning_rate": 7.409301415770167e-05, + "loss": 1.0579, + "step": 3097 + }, + { + "epoch": 0.5960378776491808, + "grad_norm": 0.27195460632896507, + "learning_rate": 7.40328260912428e-05, + "loss": 0.9482, + "step": 3098 + }, + { + "epoch": 0.5962302720577183, + "grad_norm": 0.2936165252582911, + "learning_rate": 7.397264811013506e-05, + "loss": 1.0041, + "step": 3099 + }, + { + "epoch": 0.5964226664662559, + "grad_norm": 0.3811574263997021, + "learning_rate": 7.391248023775083e-05, + "loss": 1.209, + "step": 3100 + }, + { + "epoch": 0.5966150608747933, + "grad_norm": 0.28079577432916897, + "learning_rate": 7.385232249745872e-05, + "loss": 1.0937, + "step": 3101 + }, + { + "epoch": 0.5968074552833308, + "grad_norm": 0.30109437393785105, + "learning_rate": 7.379217491262325e-05, + "loss": 0.9977, + "step": 3102 + }, + { + "epoch": 0.5969998496918684, + "grad_norm": 0.32664253263075066, + "learning_rate": 7.373203750660505e-05, + "loss": 1.048, + "step": 3103 + }, + { + "epoch": 0.5971922441004058, + "grad_norm": 0.2778939205123568, + "learning_rate": 7.367191030276079e-05, + "loss": 0.9911, + "step": 3104 + }, + { + "epoch": 0.5973846385089433, + "grad_norm": 0.4213010065390305, + "learning_rate": 7.361179332444318e-05, + "loss": 1.0832, + "step": 3105 + }, + { + "epoch": 0.5975770329174809, + "grad_norm": 0.30629462463239404, + "learning_rate": 7.355168659500095e-05, + "loss": 1.0928, + "step": 3106 + }, + { + "epoch": 0.5977694273260183, + "grad_norm": 0.2759873905420565, + "learning_rate": 7.349159013777891e-05, + "loss": 1.0361, + "step": 3107 + }, + { + "epoch": 0.5979618217345558, + "grad_norm": 0.3345478739575179, + "learning_rate": 7.343150397611782e-05, + "loss": 1.0821, + "step": 3108 + }, + { + "epoch": 0.5981542161430934, + "grad_norm": 0.3085659634945026, + "learning_rate": 7.337142813335444e-05, + "loss": 1.0104, + "step": 3109 + }, + { + "epoch": 0.5983466105516309, + "grad_norm": 0.3366321058620241, + "learning_rate": 7.33113626328215e-05, + "loss": 1.0143, + "step": 3110 + }, + { + "epoch": 0.5985390049601683, + "grad_norm": 0.3297433585855268, + "learning_rate": 7.325130749784782e-05, + "loss": 0.9981, + "step": 3111 + }, + { + "epoch": 0.5987313993687059, + "grad_norm": 0.4111893929532169, + "learning_rate": 7.319126275175801e-05, + "loss": 0.9975, + "step": 3112 + }, + { + "epoch": 0.5989237937772434, + "grad_norm": 0.2689472354130831, + "learning_rate": 7.31312284178729e-05, + "loss": 1.0536, + "step": 3113 + }, + { + "epoch": 0.5991161881857808, + "grad_norm": 0.2905316066106125, + "learning_rate": 7.307120451950901e-05, + "loss": 1.0809, + "step": 3114 + }, + { + "epoch": 0.5993085825943184, + "grad_norm": 0.29788026421655234, + "learning_rate": 7.301119107997905e-05, + "loss": 1.0956, + "step": 3115 + }, + { + "epoch": 0.5995009770028559, + "grad_norm": 0.29102152972921824, + "learning_rate": 7.295118812259145e-05, + "loss": 1.1763, + "step": 3116 + }, + { + "epoch": 0.5996933714113933, + "grad_norm": 0.3107038025831909, + "learning_rate": 7.289119567065068e-05, + "loss": 1.0535, + "step": 3117 + }, + { + "epoch": 0.5998857658199308, + "grad_norm": 0.24707902033391899, + "learning_rate": 7.283121374745715e-05, + "loss": 1.0461, + "step": 3118 + }, + { + "epoch": 0.6000781602284684, + "grad_norm": 0.3486109517776961, + "learning_rate": 7.277124237630712e-05, + "loss": 1.0346, + "step": 3119 + }, + { + "epoch": 0.6002705546370058, + "grad_norm": 0.3034906473475664, + "learning_rate": 7.271128158049283e-05, + "loss": 1.1119, + "step": 3120 + }, + { + "epoch": 0.6004629490455433, + "grad_norm": 0.33242807184737416, + "learning_rate": 7.265133138330233e-05, + "loss": 1.0505, + "step": 3121 + }, + { + "epoch": 0.6006553434540809, + "grad_norm": 0.3396780950237764, + "learning_rate": 7.259139180801955e-05, + "loss": 0.9709, + "step": 3122 + }, + { + "epoch": 0.6008477378626184, + "grad_norm": 0.3992152627169244, + "learning_rate": 7.253146287792434e-05, + "loss": 0.9487, + "step": 3123 + }, + { + "epoch": 0.6010401322711558, + "grad_norm": 0.2662520459623596, + "learning_rate": 7.247154461629247e-05, + "loss": 1.1125, + "step": 3124 + }, + { + "epoch": 0.6012325266796934, + "grad_norm": 0.40625749861933913, + "learning_rate": 7.241163704639546e-05, + "loss": 0.9254, + "step": 3125 + }, + { + "epoch": 0.6014249210882309, + "grad_norm": 0.32228443916868366, + "learning_rate": 7.23517401915007e-05, + "loss": 1.1109, + "step": 3126 + }, + { + "epoch": 0.6016173154967683, + "grad_norm": 0.3376790795045926, + "learning_rate": 7.229185407487148e-05, + "loss": 1.0386, + "step": 3127 + }, + { + "epoch": 0.6018097099053059, + "grad_norm": 0.2844271214485223, + "learning_rate": 7.223197871976689e-05, + "loss": 1.0249, + "step": 3128 + }, + { + "epoch": 0.6020021043138434, + "grad_norm": 0.3107249107787389, + "learning_rate": 7.21721141494417e-05, + "loss": 1.0029, + "step": 3129 + }, + { + "epoch": 0.6021944987223808, + "grad_norm": 0.3715500084873061, + "learning_rate": 7.211226038714678e-05, + "loss": 1.075, + "step": 3130 + }, + { + "epoch": 0.6023868931309184, + "grad_norm": 0.293414959678292, + "learning_rate": 7.205241745612856e-05, + "loss": 1.1004, + "step": 3131 + }, + { + "epoch": 0.6025792875394559, + "grad_norm": 0.3369710821044232, + "learning_rate": 7.199258537962936e-05, + "loss": 1.0494, + "step": 3132 + }, + { + "epoch": 0.6027716819479934, + "grad_norm": 0.2796816523812784, + "learning_rate": 7.193276418088729e-05, + "loss": 1.1183, + "step": 3133 + }, + { + "epoch": 0.6029640763565309, + "grad_norm": 0.31454650834902614, + "learning_rate": 7.187295388313617e-05, + "loss": 1.0692, + "step": 3134 + }, + { + "epoch": 0.6031564707650684, + "grad_norm": 0.29282374430420566, + "learning_rate": 7.181315450960562e-05, + "loss": 1.0385, + "step": 3135 + }, + { + "epoch": 0.6033488651736059, + "grad_norm": 0.29889728769600493, + "learning_rate": 7.175336608352112e-05, + "loss": 1.0322, + "step": 3136 + }, + { + "epoch": 0.6035412595821434, + "grad_norm": 0.4074927991287567, + "learning_rate": 7.169358862810373e-05, + "loss": 1.132, + "step": 3137 + }, + { + "epoch": 0.6037336539906809, + "grad_norm": 0.2950774894541228, + "learning_rate": 7.163382216657034e-05, + "loss": 1.031, + "step": 3138 + }, + { + "epoch": 0.6039260483992184, + "grad_norm": 0.2611881653496529, + "learning_rate": 7.157406672213356e-05, + "loss": 1.032, + "step": 3139 + }, + { + "epoch": 0.604118442807756, + "grad_norm": 0.34845492103692877, + "learning_rate": 7.151432231800172e-05, + "loss": 0.9868, + "step": 3140 + }, + { + "epoch": 0.6043108372162934, + "grad_norm": 0.3038089797497765, + "learning_rate": 7.145458897737881e-05, + "loss": 1.0732, + "step": 3141 + }, + { + "epoch": 0.6045032316248309, + "grad_norm": 0.3137752390729656, + "learning_rate": 7.139486672346466e-05, + "loss": 1.108, + "step": 3142 + }, + { + "epoch": 0.6046956260333685, + "grad_norm": 0.3694910468844507, + "learning_rate": 7.133515557945463e-05, + "loss": 1.0062, + "step": 3143 + }, + { + "epoch": 0.6048880204419059, + "grad_norm": 0.3294255713228448, + "learning_rate": 7.12754555685399e-05, + "loss": 1.04, + "step": 3144 + }, + { + "epoch": 0.6050804148504434, + "grad_norm": 0.33653669660879837, + "learning_rate": 7.121576671390722e-05, + "loss": 1.0484, + "step": 3145 + }, + { + "epoch": 0.6052728092589809, + "grad_norm": 0.3020546419585607, + "learning_rate": 7.115608903873905e-05, + "loss": 1.1138, + "step": 3146 + }, + { + "epoch": 0.6054652036675184, + "grad_norm": 0.3310334704837057, + "learning_rate": 7.109642256621353e-05, + "loss": 0.9903, + "step": 3147 + }, + { + "epoch": 0.6056575980760559, + "grad_norm": 0.299325820097342, + "learning_rate": 7.103676731950443e-05, + "loss": 1.1136, + "step": 3148 + }, + { + "epoch": 0.6058499924845934, + "grad_norm": 0.3068790577592849, + "learning_rate": 7.097712332178117e-05, + "loss": 1.16, + "step": 3149 + }, + { + "epoch": 0.606042386893131, + "grad_norm": 0.36793760486500554, + "learning_rate": 7.09174905962088e-05, + "loss": 1.0094, + "step": 3150 + }, + { + "epoch": 0.6062347813016684, + "grad_norm": 0.35934732007828213, + "learning_rate": 7.085786916594794e-05, + "loss": 1.1305, + "step": 3151 + }, + { + "epoch": 0.6064271757102059, + "grad_norm": 0.2848795920834444, + "learning_rate": 7.07982590541549e-05, + "loss": 1.0565, + "step": 3152 + }, + { + "epoch": 0.6066195701187435, + "grad_norm": 0.352791679113492, + "learning_rate": 7.073866028398153e-05, + "loss": 1.0512, + "step": 3153 + }, + { + "epoch": 0.6068119645272809, + "grad_norm": 0.3810594999166601, + "learning_rate": 7.067907287857535e-05, + "loss": 0.9948, + "step": 3154 + }, + { + "epoch": 0.6070043589358184, + "grad_norm": 0.320146452926806, + "learning_rate": 7.061949686107938e-05, + "loss": 1.1565, + "step": 3155 + }, + { + "epoch": 0.607196753344356, + "grad_norm": 0.2911436133839381, + "learning_rate": 7.055993225463231e-05, + "loss": 1.1068, + "step": 3156 + }, + { + "epoch": 0.6073891477528934, + "grad_norm": 0.3633095823610109, + "learning_rate": 7.050037908236831e-05, + "loss": 1.0505, + "step": 3157 + }, + { + "epoch": 0.6075815421614309, + "grad_norm": 0.33426229956446274, + "learning_rate": 7.04408373674171e-05, + "loss": 1.0152, + "step": 3158 + }, + { + "epoch": 0.6077739365699685, + "grad_norm": 0.32742563091903526, + "learning_rate": 7.03813071329041e-05, + "loss": 1.147, + "step": 3159 + }, + { + "epoch": 0.6079663309785059, + "grad_norm": 0.2790811793523364, + "learning_rate": 7.032178840195008e-05, + "loss": 1.0405, + "step": 3160 + }, + { + "epoch": 0.6081587253870434, + "grad_norm": 0.2952337042047172, + "learning_rate": 7.026228119767148e-05, + "loss": 1.0971, + "step": 3161 + }, + { + "epoch": 0.608351119795581, + "grad_norm": 0.27164504353733254, + "learning_rate": 7.020278554318023e-05, + "loss": 1.087, + "step": 3162 + }, + { + "epoch": 0.6085435142041185, + "grad_norm": 0.3558037065668216, + "learning_rate": 7.014330146158366e-05, + "loss": 1.0625, + "step": 3163 + }, + { + "epoch": 0.6087359086126559, + "grad_norm": 0.31779233447839716, + "learning_rate": 7.008382897598477e-05, + "loss": 1.0981, + "step": 3164 + }, + { + "epoch": 0.6089283030211935, + "grad_norm": 0.2657203520374658, + "learning_rate": 7.0024368109482e-05, + "loss": 0.9672, + "step": 3165 + }, + { + "epoch": 0.609120697429731, + "grad_norm": 0.2971543985820181, + "learning_rate": 6.996491888516927e-05, + "loss": 0.9752, + "step": 3166 + }, + { + "epoch": 0.6093130918382684, + "grad_norm": 0.3199890294894419, + "learning_rate": 6.990548132613592e-05, + "loss": 1.0581, + "step": 3167 + }, + { + "epoch": 0.609505486246806, + "grad_norm": 0.30185881267616743, + "learning_rate": 6.984605545546686e-05, + "loss": 1.1385, + "step": 3168 + }, + { + "epoch": 0.6096978806553435, + "grad_norm": 0.31439506448656324, + "learning_rate": 6.97866412962424e-05, + "loss": 1.0981, + "step": 3169 + }, + { + "epoch": 0.6098902750638809, + "grad_norm": 0.2975749646567161, + "learning_rate": 6.972723887153828e-05, + "loss": 1.1096, + "step": 3170 + }, + { + "epoch": 0.6100826694724184, + "grad_norm": 0.32543734781439976, + "learning_rate": 6.966784820442577e-05, + "loss": 1.0293, + "step": 3171 + }, + { + "epoch": 0.610275063880956, + "grad_norm": 0.301005268570769, + "learning_rate": 6.960846931797152e-05, + "loss": 1.0759, + "step": 3172 + }, + { + "epoch": 0.6104674582894934, + "grad_norm": 0.40349220413301146, + "learning_rate": 6.954910223523756e-05, + "loss": 1.1402, + "step": 3173 + }, + { + "epoch": 0.6106598526980309, + "grad_norm": 0.28264332051518454, + "learning_rate": 6.948974697928143e-05, + "loss": 1.1521, + "step": 3174 + }, + { + "epoch": 0.6108522471065685, + "grad_norm": 0.35597297323871074, + "learning_rate": 6.943040357315598e-05, + "loss": 0.9359, + "step": 3175 + }, + { + "epoch": 0.611044641515106, + "grad_norm": 0.29310468134660095, + "learning_rate": 6.937107203990952e-05, + "loss": 1.0784, + "step": 3176 + }, + { + "epoch": 0.6112370359236434, + "grad_norm": 0.31643840882070373, + "learning_rate": 6.931175240258577e-05, + "loss": 1.1196, + "step": 3177 + }, + { + "epoch": 0.611429430332181, + "grad_norm": 0.348644595596235, + "learning_rate": 6.925244468422376e-05, + "loss": 1.0125, + "step": 3178 + }, + { + "epoch": 0.6116218247407185, + "grad_norm": 0.35654473063218517, + "learning_rate": 6.919314890785793e-05, + "loss": 1.1178, + "step": 3179 + }, + { + "epoch": 0.6118142191492559, + "grad_norm": 0.3131207296767454, + "learning_rate": 6.913386509651807e-05, + "loss": 1.0278, + "step": 3180 + }, + { + "epoch": 0.6120066135577935, + "grad_norm": 0.2890381392340612, + "learning_rate": 6.907459327322934e-05, + "loss": 1.1188, + "step": 3181 + }, + { + "epoch": 0.612199007966331, + "grad_norm": 0.3468639764201469, + "learning_rate": 6.90153334610122e-05, + "loss": 1.0762, + "step": 3182 + }, + { + "epoch": 0.6123914023748684, + "grad_norm": 0.3118330279435034, + "learning_rate": 6.895608568288254e-05, + "loss": 1.077, + "step": 3183 + }, + { + "epoch": 0.612583796783406, + "grad_norm": 0.35913788631242155, + "learning_rate": 6.889684996185148e-05, + "loss": 1.0556, + "step": 3184 + }, + { + "epoch": 0.6127761911919435, + "grad_norm": 0.3073499316941991, + "learning_rate": 6.88376263209255e-05, + "loss": 1.0525, + "step": 3185 + }, + { + "epoch": 0.612968585600481, + "grad_norm": 0.27311256791698535, + "learning_rate": 6.877841478310638e-05, + "loss": 1.0137, + "step": 3186 + }, + { + "epoch": 0.6131609800090185, + "grad_norm": 0.36096549011145274, + "learning_rate": 6.871921537139116e-05, + "loss": 1.0076, + "step": 3187 + }, + { + "epoch": 0.613353374417556, + "grad_norm": 0.30737596598278466, + "learning_rate": 6.866002810877225e-05, + "loss": 1.0725, + "step": 3188 + }, + { + "epoch": 0.6135457688260935, + "grad_norm": 0.38236401188781555, + "learning_rate": 6.860085301823729e-05, + "loss": 1.1162, + "step": 3189 + }, + { + "epoch": 0.613738163234631, + "grad_norm": 0.32156577713072626, + "learning_rate": 6.854169012276923e-05, + "loss": 1.0488, + "step": 3190 + }, + { + "epoch": 0.6139305576431685, + "grad_norm": 0.30994306625391804, + "learning_rate": 6.848253944534622e-05, + "loss": 1.054, + "step": 3191 + }, + { + "epoch": 0.614122952051706, + "grad_norm": 0.349438353805856, + "learning_rate": 6.84234010089417e-05, + "loss": 1.1381, + "step": 3192 + }, + { + "epoch": 0.6143153464602435, + "grad_norm": 0.3443864850029389, + "learning_rate": 6.836427483652436e-05, + "loss": 1.0583, + "step": 3193 + }, + { + "epoch": 0.614507740868781, + "grad_norm": 0.30440869719274616, + "learning_rate": 6.830516095105816e-05, + "loss": 0.9697, + "step": 3194 + }, + { + "epoch": 0.6147001352773185, + "grad_norm": 0.3196110425371395, + "learning_rate": 6.824605937550223e-05, + "loss": 0.9898, + "step": 3195 + }, + { + "epoch": 0.6148925296858561, + "grad_norm": 0.2729958040406237, + "learning_rate": 6.818697013281092e-05, + "loss": 1.0265, + "step": 3196 + }, + { + "epoch": 0.6150849240943935, + "grad_norm": 0.35942339061496686, + "learning_rate": 6.812789324593386e-05, + "loss": 1.0153, + "step": 3197 + }, + { + "epoch": 0.615277318502931, + "grad_norm": 0.487622474816348, + "learning_rate": 6.80688287378158e-05, + "loss": 1.0876, + "step": 3198 + }, + { + "epoch": 0.6154697129114685, + "grad_norm": 0.3069970110857509, + "learning_rate": 6.800977663139666e-05, + "loss": 1.0567, + "step": 3199 + }, + { + "epoch": 0.615662107320006, + "grad_norm": 0.32958271293881347, + "learning_rate": 6.79507369496117e-05, + "loss": 1.0528, + "step": 3200 + }, + { + "epoch": 0.6158545017285435, + "grad_norm": 0.25059074857258157, + "learning_rate": 6.789170971539118e-05, + "loss": 1.1515, + "step": 3201 + }, + { + "epoch": 0.616046896137081, + "grad_norm": 0.29879244255037085, + "learning_rate": 6.783269495166065e-05, + "loss": 1.1002, + "step": 3202 + }, + { + "epoch": 0.6162392905456185, + "grad_norm": 0.2688747575922325, + "learning_rate": 6.777369268134076e-05, + "loss": 1.0205, + "step": 3203 + }, + { + "epoch": 0.616431684954156, + "grad_norm": 0.38096085837503346, + "learning_rate": 6.771470292734723e-05, + "loss": 0.9968, + "step": 3204 + }, + { + "epoch": 0.6166240793626935, + "grad_norm": 0.332673446138928, + "learning_rate": 6.765572571259106e-05, + "loss": 1.0151, + "step": 3205 + }, + { + "epoch": 0.616816473771231, + "grad_norm": 0.32996935799721544, + "learning_rate": 6.759676105997834e-05, + "loss": 1.0917, + "step": 3206 + }, + { + "epoch": 0.6170088681797685, + "grad_norm": 0.26740183024326003, + "learning_rate": 6.753780899241027e-05, + "loss": 1.0526, + "step": 3207 + }, + { + "epoch": 0.617201262588306, + "grad_norm": 0.3648189581681417, + "learning_rate": 6.74788695327831e-05, + "loss": 1.0182, + "step": 3208 + }, + { + "epoch": 0.6173936569968436, + "grad_norm": 0.3163587900480176, + "learning_rate": 6.741994270398826e-05, + "loss": 1.0459, + "step": 3209 + }, + { + "epoch": 0.617586051405381, + "grad_norm": 0.327200191085277, + "learning_rate": 6.736102852891227e-05, + "loss": 1.0633, + "step": 3210 + }, + { + "epoch": 0.6177784458139185, + "grad_norm": 0.336514780512139, + "learning_rate": 6.730212703043665e-05, + "loss": 1.1718, + "step": 3211 + }, + { + "epoch": 0.6179708402224561, + "grad_norm": 0.30205308924607555, + "learning_rate": 6.724323823143818e-05, + "loss": 1.0267, + "step": 3212 + }, + { + "epoch": 0.6181632346309935, + "grad_norm": 0.2748087610486462, + "learning_rate": 6.718436215478848e-05, + "loss": 1.0177, + "step": 3213 + }, + { + "epoch": 0.618355629039531, + "grad_norm": 0.3258845059063351, + "learning_rate": 6.712549882335441e-05, + "loss": 1.0164, + "step": 3214 + }, + { + "epoch": 0.6185480234480686, + "grad_norm": 0.3405240358466416, + "learning_rate": 6.70666482599978e-05, + "loss": 1.0605, + "step": 3215 + }, + { + "epoch": 0.618740417856606, + "grad_norm": 0.3316771301275599, + "learning_rate": 6.700781048757547e-05, + "loss": 1.0885, + "step": 3216 + }, + { + "epoch": 0.6189328122651435, + "grad_norm": 0.3479830177651135, + "learning_rate": 6.69489855289394e-05, + "loss": 1.0174, + "step": 3217 + }, + { + "epoch": 0.6191252066736811, + "grad_norm": 0.33697682656409855, + "learning_rate": 6.689017340693648e-05, + "loss": 1.0733, + "step": 3218 + }, + { + "epoch": 0.6193176010822186, + "grad_norm": 0.3202779683593554, + "learning_rate": 6.683137414440872e-05, + "loss": 1.0256, + "step": 3219 + }, + { + "epoch": 0.619509995490756, + "grad_norm": 0.3330827951961716, + "learning_rate": 6.677258776419305e-05, + "loss": 0.9833, + "step": 3220 + }, + { + "epoch": 0.6197023898992936, + "grad_norm": 0.3830597198297923, + "learning_rate": 6.671381428912138e-05, + "loss": 1.0219, + "step": 3221 + }, + { + "epoch": 0.6198947843078311, + "grad_norm": 0.3510247204133472, + "learning_rate": 6.66550537420207e-05, + "loss": 1.0097, + "step": 3222 + }, + { + "epoch": 0.6200871787163685, + "grad_norm": 0.3423656046146432, + "learning_rate": 6.659630614571288e-05, + "loss": 0.9948, + "step": 3223 + }, + { + "epoch": 0.620279573124906, + "grad_norm": 0.2948880138448875, + "learning_rate": 6.653757152301487e-05, + "loss": 1.069, + "step": 3224 + }, + { + "epoch": 0.6204719675334436, + "grad_norm": 0.3815063867172276, + "learning_rate": 6.647884989673849e-05, + "loss": 1.0371, + "step": 3225 + }, + { + "epoch": 0.620664361941981, + "grad_norm": 0.3078550601774885, + "learning_rate": 6.642014128969055e-05, + "loss": 1.0692, + "step": 3226 + }, + { + "epoch": 0.6208567563505185, + "grad_norm": 0.34606514694043655, + "learning_rate": 6.63614457246728e-05, + "loss": 1.0321, + "step": 3227 + }, + { + "epoch": 0.6210491507590561, + "grad_norm": 0.41116911806315476, + "learning_rate": 6.630276322448188e-05, + "loss": 1.0745, + "step": 3228 + }, + { + "epoch": 0.6212415451675936, + "grad_norm": 0.24472103250606803, + "learning_rate": 6.624409381190945e-05, + "loss": 1.0985, + "step": 3229 + }, + { + "epoch": 0.621433939576131, + "grad_norm": 0.31352932435117903, + "learning_rate": 6.618543750974202e-05, + "loss": 1.0179, + "step": 3230 + }, + { + "epoch": 0.6216263339846686, + "grad_norm": 0.3151752206717778, + "learning_rate": 6.612679434076102e-05, + "loss": 1.0628, + "step": 3231 + }, + { + "epoch": 0.6218187283932061, + "grad_norm": 0.3065423050285731, + "learning_rate": 6.606816432774278e-05, + "loss": 1.0622, + "step": 3232 + }, + { + "epoch": 0.6220111228017435, + "grad_norm": 0.29479034068650284, + "learning_rate": 6.600954749345851e-05, + "loss": 1.107, + "step": 3233 + }, + { + "epoch": 0.6222035172102811, + "grad_norm": 0.4010322666023779, + "learning_rate": 6.595094386067429e-05, + "loss": 1.0238, + "step": 3234 + }, + { + "epoch": 0.6223959116188186, + "grad_norm": 0.4423963767043969, + "learning_rate": 6.589235345215117e-05, + "loss": 1.0631, + "step": 3235 + }, + { + "epoch": 0.622588306027356, + "grad_norm": 0.3586024441649116, + "learning_rate": 6.583377629064494e-05, + "loss": 1.0822, + "step": 3236 + }, + { + "epoch": 0.6227807004358936, + "grad_norm": 0.28908757771648613, + "learning_rate": 6.57752123989063e-05, + "loss": 1.0478, + "step": 3237 + }, + { + "epoch": 0.6229730948444311, + "grad_norm": 0.364994836191207, + "learning_rate": 6.571666179968079e-05, + "loss": 1.0865, + "step": 3238 + }, + { + "epoch": 0.6231654892529686, + "grad_norm": 0.2709653216374744, + "learning_rate": 6.56581245157088e-05, + "loss": 1.0485, + "step": 3239 + }, + { + "epoch": 0.6233578836615061, + "grad_norm": 0.3209669544971997, + "learning_rate": 6.55996005697255e-05, + "loss": 1.0074, + "step": 3240 + }, + { + "epoch": 0.6235502780700436, + "grad_norm": 0.35443750198506224, + "learning_rate": 6.554108998446095e-05, + "loss": 1.0011, + "step": 3241 + }, + { + "epoch": 0.6237426724785811, + "grad_norm": 0.3361987303876914, + "learning_rate": 6.548259278264e-05, + "loss": 1.0608, + "step": 3242 + }, + { + "epoch": 0.6239350668871186, + "grad_norm": 0.3776409290138578, + "learning_rate": 6.542410898698226e-05, + "loss": 1.1058, + "step": 3243 + }, + { + "epoch": 0.6241274612956561, + "grad_norm": 0.3164591304376817, + "learning_rate": 6.536563862020218e-05, + "loss": 1.1111, + "step": 3244 + }, + { + "epoch": 0.6243198557041936, + "grad_norm": 0.33195484277223375, + "learning_rate": 6.530718170500895e-05, + "loss": 1.0232, + "step": 3245 + }, + { + "epoch": 0.6245122501127311, + "grad_norm": 0.32228404606207534, + "learning_rate": 6.524873826410658e-05, + "loss": 1.0066, + "step": 3246 + }, + { + "epoch": 0.6247046445212686, + "grad_norm": 0.3515019637636345, + "learning_rate": 6.519030832019383e-05, + "loss": 1.086, + "step": 3247 + }, + { + "epoch": 0.6248970389298061, + "grad_norm": 0.28337674561169207, + "learning_rate": 6.513189189596423e-05, + "loss": 1.0629, + "step": 3248 + }, + { + "epoch": 0.6250894333383437, + "grad_norm": 0.2759755152994146, + "learning_rate": 6.507348901410604e-05, + "loss": 0.9868, + "step": 3249 + }, + { + "epoch": 0.6252818277468811, + "grad_norm": 0.30357710842318314, + "learning_rate": 6.501509969730224e-05, + "loss": 1.0086, + "step": 3250 + }, + { + "epoch": 0.6254742221554186, + "grad_norm": 0.29845308238407653, + "learning_rate": 6.49567239682306e-05, + "loss": 1.1055, + "step": 3251 + }, + { + "epoch": 0.6256666165639561, + "grad_norm": 0.2740601001913743, + "learning_rate": 6.489836184956352e-05, + "loss": 1.076, + "step": 3252 + }, + { + "epoch": 0.6258590109724936, + "grad_norm": 0.28177946664996745, + "learning_rate": 6.484001336396828e-05, + "loss": 1.1389, + "step": 3253 + }, + { + "epoch": 0.6260514053810311, + "grad_norm": 0.3243522137813481, + "learning_rate": 6.478167853410668e-05, + "loss": 1.0381, + "step": 3254 + }, + { + "epoch": 0.6262437997895686, + "grad_norm": 0.24804116839338353, + "learning_rate": 6.472335738263534e-05, + "loss": 1.0058, + "step": 3255 + }, + { + "epoch": 0.6264361941981061, + "grad_norm": 0.2771303461685542, + "learning_rate": 6.466504993220548e-05, + "loss": 1.0447, + "step": 3256 + }, + { + "epoch": 0.6266285886066436, + "grad_norm": 0.36070024374836523, + "learning_rate": 6.460675620546305e-05, + "loss": 1.0533, + "step": 3257 + }, + { + "epoch": 0.6268209830151811, + "grad_norm": 0.3293182096919963, + "learning_rate": 6.454847622504867e-05, + "loss": 1.0899, + "step": 3258 + }, + { + "epoch": 0.6270133774237187, + "grad_norm": 0.31036914850569086, + "learning_rate": 6.449021001359763e-05, + "loss": 1.155, + "step": 3259 + }, + { + "epoch": 0.6272057718322561, + "grad_norm": 0.3653809423664422, + "learning_rate": 6.443195759373984e-05, + "loss": 1.1249, + "step": 3260 + }, + { + "epoch": 0.6273981662407936, + "grad_norm": 0.2988202569348402, + "learning_rate": 6.43737189880999e-05, + "loss": 1.109, + "step": 3261 + }, + { + "epoch": 0.6275905606493312, + "grad_norm": 0.2737358679990404, + "learning_rate": 6.431549421929694e-05, + "loss": 1.0509, + "step": 3262 + }, + { + "epoch": 0.6277829550578686, + "grad_norm": 0.3102302212792746, + "learning_rate": 6.42572833099448e-05, + "loss": 0.9636, + "step": 3263 + }, + { + "epoch": 0.6279753494664061, + "grad_norm": 0.29140370713712194, + "learning_rate": 6.419908628265203e-05, + "loss": 1.0477, + "step": 3264 + }, + { + "epoch": 0.6281677438749437, + "grad_norm": 0.29649158880804677, + "learning_rate": 6.41409031600216e-05, + "loss": 1.1414, + "step": 3265 + }, + { + "epoch": 0.6283601382834811, + "grad_norm": 0.3382830964293109, + "learning_rate": 6.408273396465117e-05, + "loss": 1.0583, + "step": 3266 + }, + { + "epoch": 0.6285525326920186, + "grad_norm": 0.3261742714060955, + "learning_rate": 6.4024578719133e-05, + "loss": 1.0536, + "step": 3267 + }, + { + "epoch": 0.6287449271005562, + "grad_norm": 0.3211658155217184, + "learning_rate": 6.396643744605391e-05, + "loss": 1.0728, + "step": 3268 + }, + { + "epoch": 0.6289373215090936, + "grad_norm": 0.2976463044815811, + "learning_rate": 6.390831016799526e-05, + "loss": 1.0521, + "step": 3269 + }, + { + "epoch": 0.6291297159176311, + "grad_norm": 0.33262577793029213, + "learning_rate": 6.38501969075331e-05, + "loss": 1.0809, + "step": 3270 + }, + { + "epoch": 0.6293221103261687, + "grad_norm": 0.277220432009812, + "learning_rate": 6.379209768723791e-05, + "loss": 1.0484, + "step": 3271 + }, + { + "epoch": 0.6295145047347062, + "grad_norm": 0.33964435513218144, + "learning_rate": 6.373401252967475e-05, + "loss": 1.0739, + "step": 3272 + }, + { + "epoch": 0.6297068991432436, + "grad_norm": 0.28450375692843183, + "learning_rate": 6.367594145740324e-05, + "loss": 1.0327, + "step": 3273 + }, + { + "epoch": 0.6298992935517812, + "grad_norm": 0.32661271925115803, + "learning_rate": 6.361788449297747e-05, + "loss": 1.0674, + "step": 3274 + }, + { + "epoch": 0.6300916879603187, + "grad_norm": 0.315459592093191, + "learning_rate": 6.355984165894613e-05, + "loss": 1.085, + "step": 3275 + }, + { + "epoch": 0.6302840823688561, + "grad_norm": 0.253299074954734, + "learning_rate": 6.350181297785241e-05, + "loss": 1.0749, + "step": 3276 + }, + { + "epoch": 0.6304764767773936, + "grad_norm": 0.3352430348465554, + "learning_rate": 6.344379847223398e-05, + "loss": 0.9969, + "step": 3277 + }, + { + "epoch": 0.6306688711859312, + "grad_norm": 0.44857188716230184, + "learning_rate": 6.338579816462298e-05, + "loss": 1.0771, + "step": 3278 + }, + { + "epoch": 0.6308612655944686, + "grad_norm": 0.31878567263193996, + "learning_rate": 6.332781207754605e-05, + "loss": 1.0634, + "step": 3279 + }, + { + "epoch": 0.6310536600030061, + "grad_norm": 0.4220603210907803, + "learning_rate": 6.326984023352435e-05, + "loss": 1.0782, + "step": 3280 + }, + { + "epoch": 0.6312460544115437, + "grad_norm": 0.3500112673391026, + "learning_rate": 6.321188265507342e-05, + "loss": 1.0784, + "step": 3281 + }, + { + "epoch": 0.6314384488200812, + "grad_norm": 0.3317206323233369, + "learning_rate": 6.31539393647034e-05, + "loss": 1.0612, + "step": 3282 + }, + { + "epoch": 0.6316308432286186, + "grad_norm": 0.2834538104859952, + "learning_rate": 6.309601038491874e-05, + "loss": 1.0232, + "step": 3283 + }, + { + "epoch": 0.6318232376371562, + "grad_norm": 0.2876697113533709, + "learning_rate": 6.303809573821842e-05, + "loss": 1.1626, + "step": 3284 + }, + { + "epoch": 0.6320156320456937, + "grad_norm": 0.32032901784811857, + "learning_rate": 6.298019544709579e-05, + "loss": 1.0028, + "step": 3285 + }, + { + "epoch": 0.6322080264542311, + "grad_norm": 0.31393383093016775, + "learning_rate": 6.292230953403866e-05, + "loss": 1.0758, + "step": 3286 + }, + { + "epoch": 0.6324004208627687, + "grad_norm": 0.2866606285030481, + "learning_rate": 6.286443802152926e-05, + "loss": 1.0882, + "step": 3287 + }, + { + "epoch": 0.6325928152713062, + "grad_norm": 0.3566030827974186, + "learning_rate": 6.280658093204422e-05, + "loss": 1.126, + "step": 3288 + }, + { + "epoch": 0.6327852096798436, + "grad_norm": 0.2882110299183065, + "learning_rate": 6.274873828805458e-05, + "loss": 0.9535, + "step": 3289 + }, + { + "epoch": 0.6329776040883812, + "grad_norm": 0.28770803337268513, + "learning_rate": 6.269091011202575e-05, + "loss": 1.036, + "step": 3290 + }, + { + "epoch": 0.6331699984969187, + "grad_norm": 0.3597252464793629, + "learning_rate": 6.26330964264175e-05, + "loss": 1.0505, + "step": 3291 + }, + { + "epoch": 0.6333623929054562, + "grad_norm": 0.31478618153593463, + "learning_rate": 6.257529725368405e-05, + "loss": 1.0855, + "step": 3292 + }, + { + "epoch": 0.6335547873139937, + "grad_norm": 0.3279002575403472, + "learning_rate": 6.251751261627385e-05, + "loss": 1.0276, + "step": 3293 + }, + { + "epoch": 0.6337471817225312, + "grad_norm": 0.27649089506198865, + "learning_rate": 6.245974253662987e-05, + "loss": 1.0958, + "step": 3294 + }, + { + "epoch": 0.6339395761310687, + "grad_norm": 0.3285416682656933, + "learning_rate": 6.240198703718931e-05, + "loss": 0.9991, + "step": 3295 + }, + { + "epoch": 0.6341319705396062, + "grad_norm": 0.2884009081311535, + "learning_rate": 6.234424614038375e-05, + "loss": 1.0619, + "step": 3296 + }, + { + "epoch": 0.6343243649481437, + "grad_norm": 0.3255285238518406, + "learning_rate": 6.22865198686391e-05, + "loss": 1.0076, + "step": 3297 + }, + { + "epoch": 0.6345167593566812, + "grad_norm": 0.31144663446899296, + "learning_rate": 6.22288082443755e-05, + "loss": 1.0091, + "step": 3298 + }, + { + "epoch": 0.6347091537652187, + "grad_norm": 0.3434456634130591, + "learning_rate": 6.217111129000759e-05, + "loss": 1.0673, + "step": 3299 + }, + { + "epoch": 0.6349015481737562, + "grad_norm": 0.2909305213438964, + "learning_rate": 6.211342902794413e-05, + "loss": 0.9468, + "step": 3300 + }, + { + "epoch": 0.6350939425822937, + "grad_norm": 0.2805083613239678, + "learning_rate": 6.205576148058828e-05, + "loss": 1.0027, + "step": 3301 + }, + { + "epoch": 0.6352863369908313, + "grad_norm": 0.28429525215888946, + "learning_rate": 6.199810867033746e-05, + "loss": 0.9934, + "step": 3302 + }, + { + "epoch": 0.6354787313993687, + "grad_norm": 0.3054596757460676, + "learning_rate": 6.19404706195833e-05, + "loss": 0.9942, + "step": 3303 + }, + { + "epoch": 0.6356711258079062, + "grad_norm": 0.48592238735603327, + "learning_rate": 6.188284735071177e-05, + "loss": 1.1198, + "step": 3304 + }, + { + "epoch": 0.6358635202164437, + "grad_norm": 0.31043074682586275, + "learning_rate": 6.182523888610316e-05, + "loss": 1.1537, + "step": 3305 + }, + { + "epoch": 0.6360559146249812, + "grad_norm": 0.3056883480277573, + "learning_rate": 6.176764524813187e-05, + "loss": 1.0194, + "step": 3306 + }, + { + "epoch": 0.6362483090335187, + "grad_norm": 0.3853795908283225, + "learning_rate": 6.171006645916661e-05, + "loss": 1.011, + "step": 3307 + }, + { + "epoch": 0.6364407034420562, + "grad_norm": 0.33361669580306874, + "learning_rate": 6.165250254157031e-05, + "loss": 0.9554, + "step": 3308 + }, + { + "epoch": 0.6366330978505937, + "grad_norm": 0.43820426918359007, + "learning_rate": 6.159495351770017e-05, + "loss": 1.0356, + "step": 3309 + }, + { + "epoch": 0.6368254922591312, + "grad_norm": 0.38145749560677095, + "learning_rate": 6.15374194099075e-05, + "loss": 0.989, + "step": 3310 + }, + { + "epoch": 0.6370178866676687, + "grad_norm": 0.2591346268038298, + "learning_rate": 6.147990024053796e-05, + "loss": 1.0879, + "step": 3311 + }, + { + "epoch": 0.6372102810762063, + "grad_norm": 0.3173659968597233, + "learning_rate": 6.142239603193127e-05, + "loss": 1.0605, + "step": 3312 + }, + { + "epoch": 0.6374026754847437, + "grad_norm": 0.3097825813450799, + "learning_rate": 6.136490680642146e-05, + "loss": 1.0771, + "step": 3313 + }, + { + "epoch": 0.6375950698932812, + "grad_norm": 0.31390018539583237, + "learning_rate": 6.130743258633667e-05, + "loss": 1.0579, + "step": 3314 + }, + { + "epoch": 0.6377874643018188, + "grad_norm": 0.33347954798568236, + "learning_rate": 6.124997339399916e-05, + "loss": 1.0346, + "step": 3315 + }, + { + "epoch": 0.6379798587103562, + "grad_norm": 0.27345445949181163, + "learning_rate": 6.11925292517255e-05, + "loss": 1.1647, + "step": 3316 + }, + { + "epoch": 0.6381722531188937, + "grad_norm": 0.2737259722260716, + "learning_rate": 6.113510018182627e-05, + "loss": 1.0115, + "step": 3317 + }, + { + "epoch": 0.6383646475274313, + "grad_norm": 0.45835645437873973, + "learning_rate": 6.107768620660632e-05, + "loss": 1.1887, + "step": 3318 + }, + { + "epoch": 0.6385570419359687, + "grad_norm": 0.38738529902933116, + "learning_rate": 6.102028734836456e-05, + "loss": 1.0255, + "step": 3319 + }, + { + "epoch": 0.6387494363445062, + "grad_norm": 0.2910298616448152, + "learning_rate": 6.0962903629394e-05, + "loss": 1.0668, + "step": 3320 + }, + { + "epoch": 0.6389418307530438, + "grad_norm": 0.341935564357545, + "learning_rate": 6.090553507198187e-05, + "loss": 1.0782, + "step": 3321 + }, + { + "epoch": 0.6391342251615812, + "grad_norm": 0.3237868686841295, + "learning_rate": 6.084818169840938e-05, + "loss": 1.037, + "step": 3322 + }, + { + "epoch": 0.6393266195701187, + "grad_norm": 0.2971280606010649, + "learning_rate": 6.079084353095201e-05, + "loss": 1.0298, + "step": 3323 + }, + { + "epoch": 0.6395190139786563, + "grad_norm": 0.38124794607764034, + "learning_rate": 6.07335205918792e-05, + "loss": 1.0519, + "step": 3324 + }, + { + "epoch": 0.6397114083871938, + "grad_norm": 0.2874092020563654, + "learning_rate": 6.0676212903454543e-05, + "loss": 1.0473, + "step": 3325 + }, + { + "epoch": 0.6399038027957312, + "grad_norm": 0.33483377221512484, + "learning_rate": 6.061892048793567e-05, + "loss": 1.1512, + "step": 3326 + }, + { + "epoch": 0.6400961972042688, + "grad_norm": 0.3253224483316621, + "learning_rate": 6.056164336757426e-05, + "loss": 1.0252, + "step": 3327 + }, + { + "epoch": 0.6402885916128063, + "grad_norm": 0.37329254897434155, + "learning_rate": 6.050438156461613e-05, + "loss": 1.0117, + "step": 3328 + }, + { + "epoch": 0.6404809860213437, + "grad_norm": 0.3605613571270172, + "learning_rate": 6.0447135101301077e-05, + "loss": 0.9967, + "step": 3329 + }, + { + "epoch": 0.6406733804298813, + "grad_norm": 0.2952491863544519, + "learning_rate": 6.038990399986302e-05, + "loss": 1.1094, + "step": 3330 + }, + { + "epoch": 0.6408657748384188, + "grad_norm": 0.3031487556295676, + "learning_rate": 6.0332688282529804e-05, + "loss": 0.9857, + "step": 3331 + }, + { + "epoch": 0.6410581692469562, + "grad_norm": 0.3723881085078127, + "learning_rate": 6.027548797152336e-05, + "loss": 1.0544, + "step": 3332 + }, + { + "epoch": 0.6412505636554937, + "grad_norm": 0.3062297014261907, + "learning_rate": 6.0218303089059626e-05, + "loss": 1.0451, + "step": 3333 + }, + { + "epoch": 0.6414429580640313, + "grad_norm": 0.3113549757993061, + "learning_rate": 6.016113365734861e-05, + "loss": 1.0768, + "step": 3334 + }, + { + "epoch": 0.6416353524725688, + "grad_norm": 0.25129588602287267, + "learning_rate": 6.010397969859421e-05, + "loss": 1.1132, + "step": 3335 + }, + { + "epoch": 0.6418277468811062, + "grad_norm": 0.3210328629829216, + "learning_rate": 6.004684123499436e-05, + "loss": 0.9612, + "step": 3336 + }, + { + "epoch": 0.6420201412896438, + "grad_norm": 0.3427142782343593, + "learning_rate": 5.998971828874102e-05, + "loss": 1.0178, + "step": 3337 + }, + { + "epoch": 0.6422125356981813, + "grad_norm": 0.33394889645808373, + "learning_rate": 5.9932610882020046e-05, + "loss": 1.1197, + "step": 3338 + }, + { + "epoch": 0.6424049301067187, + "grad_norm": 0.3112312897238979, + "learning_rate": 5.987551903701127e-05, + "loss": 1.0553, + "step": 3339 + }, + { + "epoch": 0.6425973245152563, + "grad_norm": 0.40182320808874666, + "learning_rate": 5.9818442775888595e-05, + "loss": 0.9917, + "step": 3340 + }, + { + "epoch": 0.6427897189237938, + "grad_norm": 0.3205377821789851, + "learning_rate": 5.97613821208197e-05, + "loss": 0.9541, + "step": 3341 + }, + { + "epoch": 0.6429821133323312, + "grad_norm": 0.32416902476703935, + "learning_rate": 5.9704337093966344e-05, + "loss": 0.9948, + "step": 3342 + }, + { + "epoch": 0.6431745077408688, + "grad_norm": 0.28765774184505194, + "learning_rate": 5.9647307717484143e-05, + "loss": 1.0529, + "step": 3343 + }, + { + "epoch": 0.6433669021494063, + "grad_norm": 0.36221757232170504, + "learning_rate": 5.959029401352262e-05, + "loss": 1.106, + "step": 3344 + }, + { + "epoch": 0.6435592965579437, + "grad_norm": 0.322293932757898, + "learning_rate": 5.9533296004225235e-05, + "loss": 1.0424, + "step": 3345 + }, + { + "epoch": 0.6437516909664813, + "grad_norm": 0.3342649613279946, + "learning_rate": 5.947631371172942e-05, + "loss": 1.0927, + "step": 3346 + }, + { + "epoch": 0.6439440853750188, + "grad_norm": 0.2914043254352665, + "learning_rate": 5.941934715816642e-05, + "loss": 1.0099, + "step": 3347 + }, + { + "epoch": 0.6441364797835563, + "grad_norm": 0.32566465982223153, + "learning_rate": 5.936239636566137e-05, + "loss": 1.0321, + "step": 3348 + }, + { + "epoch": 0.6443288741920938, + "grad_norm": 0.31998849983665684, + "learning_rate": 5.930546135633327e-05, + "loss": 1.0827, + "step": 3349 + }, + { + "epoch": 0.6445212686006313, + "grad_norm": 0.32359460296217146, + "learning_rate": 5.924854215229508e-05, + "loss": 1.0949, + "step": 3350 + }, + { + "epoch": 0.6447136630091688, + "grad_norm": 0.28767243369843587, + "learning_rate": 5.91916387756535e-05, + "loss": 1.0691, + "step": 3351 + }, + { + "epoch": 0.6449060574177063, + "grad_norm": 0.371534559297127, + "learning_rate": 5.9134751248509236e-05, + "loss": 1.1254, + "step": 3352 + }, + { + "epoch": 0.6450984518262438, + "grad_norm": 0.4585563484531112, + "learning_rate": 5.9077879592956675e-05, + "loss": 1.1102, + "step": 3353 + }, + { + "epoch": 0.6452908462347813, + "grad_norm": 0.3170741341399339, + "learning_rate": 5.902102383108414e-05, + "loss": 1.1067, + "step": 3354 + }, + { + "epoch": 0.6454832406433189, + "grad_norm": 0.3122303871654554, + "learning_rate": 5.896418398497377e-05, + "loss": 1.0845, + "step": 3355 + }, + { + "epoch": 0.6456756350518563, + "grad_norm": 0.4088156798252746, + "learning_rate": 5.890736007670144e-05, + "loss": 1.1419, + "step": 3356 + }, + { + "epoch": 0.6458680294603938, + "grad_norm": 0.34816947482301286, + "learning_rate": 5.8850552128336954e-05, + "loss": 1.0364, + "step": 3357 + }, + { + "epoch": 0.6460604238689313, + "grad_norm": 0.3482682499654772, + "learning_rate": 5.879376016194387e-05, + "loss": 1.0198, + "step": 3358 + }, + { + "epoch": 0.6462528182774688, + "grad_norm": 0.28158505635506154, + "learning_rate": 5.873698419957952e-05, + "loss": 0.9989, + "step": 3359 + }, + { + "epoch": 0.6464452126860063, + "grad_norm": 0.2686635351483379, + "learning_rate": 5.868022426329505e-05, + "loss": 0.9743, + "step": 3360 + }, + { + "epoch": 0.6466376070945438, + "grad_norm": 0.305189263070437, + "learning_rate": 5.862348037513532e-05, + "loss": 1.1192, + "step": 3361 + }, + { + "epoch": 0.6468300015030813, + "grad_norm": 0.27525600757420376, + "learning_rate": 5.856675255713905e-05, + "loss": 1.0303, + "step": 3362 + }, + { + "epoch": 0.6470223959116188, + "grad_norm": 0.34737589184864753, + "learning_rate": 5.851004083133862e-05, + "loss": 1.0955, + "step": 3363 + }, + { + "epoch": 0.6472147903201563, + "grad_norm": 0.319264546088472, + "learning_rate": 5.845334521976027e-05, + "loss": 1.0478, + "step": 3364 + }, + { + "epoch": 0.6474071847286939, + "grad_norm": 0.30090464359742247, + "learning_rate": 5.8396665744423885e-05, + "loss": 1.0532, + "step": 3365 + }, + { + "epoch": 0.6475995791372313, + "grad_norm": 0.3937634631338097, + "learning_rate": 5.8340002427343164e-05, + "loss": 1.1079, + "step": 3366 + }, + { + "epoch": 0.6477919735457688, + "grad_norm": 0.325883695652783, + "learning_rate": 5.828335529052541e-05, + "loss": 1.0908, + "step": 3367 + }, + { + "epoch": 0.6479843679543064, + "grad_norm": 0.38670909174801216, + "learning_rate": 5.822672435597172e-05, + "loss": 1.0535, + "step": 3368 + }, + { + "epoch": 0.6481767623628438, + "grad_norm": 0.29345715629570895, + "learning_rate": 5.817010964567702e-05, + "loss": 1.0534, + "step": 3369 + }, + { + "epoch": 0.6483691567713813, + "grad_norm": 0.29964248551292266, + "learning_rate": 5.811351118162969e-05, + "loss": 1.0095, + "step": 3370 + }, + { + "epoch": 0.6485615511799189, + "grad_norm": 0.31196840409199006, + "learning_rate": 5.8056928985811963e-05, + "loss": 1.0901, + "step": 3371 + }, + { + "epoch": 0.6487539455884563, + "grad_norm": 0.45743176378003714, + "learning_rate": 5.8000363080199736e-05, + "loss": 1.1343, + "step": 3372 + }, + { + "epoch": 0.6489463399969938, + "grad_norm": 0.2975454931728827, + "learning_rate": 5.79438134867625e-05, + "loss": 1.0949, + "step": 3373 + }, + { + "epoch": 0.6491387344055314, + "grad_norm": 0.30430171352238705, + "learning_rate": 5.7887280227463484e-05, + "loss": 1.1414, + "step": 3374 + }, + { + "epoch": 0.6493311288140688, + "grad_norm": 0.2816355779911321, + "learning_rate": 5.783076332425956e-05, + "loss": 1.0915, + "step": 3375 + }, + { + "epoch": 0.6495235232226063, + "grad_norm": 0.33298574715222334, + "learning_rate": 5.777426279910125e-05, + "loss": 1.0832, + "step": 3376 + }, + { + "epoch": 0.6497159176311439, + "grad_norm": 0.31926373221196686, + "learning_rate": 5.771777867393274e-05, + "loss": 1.1044, + "step": 3377 + }, + { + "epoch": 0.6499083120396814, + "grad_norm": 0.33555709307971854, + "learning_rate": 5.766131097069174e-05, + "loss": 1.1369, + "step": 3378 + }, + { + "epoch": 0.6501007064482188, + "grad_norm": 0.38930249397283606, + "learning_rate": 5.760485971130969e-05, + "loss": 1.16, + "step": 3379 + }, + { + "epoch": 0.6502931008567564, + "grad_norm": 0.2709197697942827, + "learning_rate": 5.75484249177116e-05, + "loss": 1.1145, + "step": 3380 + }, + { + "epoch": 0.6504854952652939, + "grad_norm": 0.2839442611120986, + "learning_rate": 5.749200661181611e-05, + "loss": 1.0224, + "step": 3381 + }, + { + "epoch": 0.6506778896738313, + "grad_norm": 0.32321847117929114, + "learning_rate": 5.743560481553547e-05, + "loss": 0.9826, + "step": 3382 + }, + { + "epoch": 0.6508702840823689, + "grad_norm": 0.3210531190288898, + "learning_rate": 5.737921955077541e-05, + "loss": 1.0741, + "step": 3383 + }, + { + "epoch": 0.6510626784909064, + "grad_norm": 0.3433622705707647, + "learning_rate": 5.732285083943536e-05, + "loss": 1.0504, + "step": 3384 + }, + { + "epoch": 0.6512550728994438, + "grad_norm": 0.3621977358194807, + "learning_rate": 5.7266498703408325e-05, + "loss": 0.962, + "step": 3385 + }, + { + "epoch": 0.6514474673079813, + "grad_norm": 0.31938858392337316, + "learning_rate": 5.721016316458068e-05, + "loss": 1.0408, + "step": 3386 + }, + { + "epoch": 0.6516398617165189, + "grad_norm": 0.3011130459759042, + "learning_rate": 5.715384424483268e-05, + "loss": 1.062, + "step": 3387 + }, + { + "epoch": 0.6518322561250564, + "grad_norm": 0.2710309344278669, + "learning_rate": 5.7097541966037816e-05, + "loss": 1.0639, + "step": 3388 + }, + { + "epoch": 0.6520246505335938, + "grad_norm": 0.3672832366698096, + "learning_rate": 5.7041256350063286e-05, + "loss": 1.1049, + "step": 3389 + }, + { + "epoch": 0.6522170449421314, + "grad_norm": 0.3618610991051004, + "learning_rate": 5.698498741876982e-05, + "loss": 1.0615, + "step": 3390 + }, + { + "epoch": 0.6524094393506689, + "grad_norm": 0.317243270274209, + "learning_rate": 5.692873519401154e-05, + "loss": 1.0592, + "step": 3391 + }, + { + "epoch": 0.6526018337592063, + "grad_norm": 0.3164909340877395, + "learning_rate": 5.6872499697636195e-05, + "loss": 1.0266, + "step": 3392 + }, + { + "epoch": 0.6527942281677439, + "grad_norm": 0.30335243238724957, + "learning_rate": 5.681628095148501e-05, + "loss": 1.0562, + "step": 3393 + }, + { + "epoch": 0.6529866225762814, + "grad_norm": 0.30089979251518684, + "learning_rate": 5.6760078977392706e-05, + "loss": 0.9966, + "step": 3394 + }, + { + "epoch": 0.6531790169848188, + "grad_norm": 0.340657217900467, + "learning_rate": 5.67038937971875e-05, + "loss": 0.9891, + "step": 3395 + }, + { + "epoch": 0.6533714113933564, + "grad_norm": 0.3019685620085431, + "learning_rate": 5.664772543269101e-05, + "loss": 1.0971, + "step": 3396 + }, + { + "epoch": 0.6535638058018939, + "grad_norm": 0.3114039455358227, + "learning_rate": 5.659157390571842e-05, + "loss": 1.0454, + "step": 3397 + }, + { + "epoch": 0.6537562002104313, + "grad_norm": 0.339025833106066, + "learning_rate": 5.653543923807832e-05, + "loss": 1.0296, + "step": 3398 + }, + { + "epoch": 0.6539485946189689, + "grad_norm": 0.3494007175989049, + "learning_rate": 5.6479321451572784e-05, + "loss": 1.0859, + "step": 3399 + }, + { + "epoch": 0.6541409890275064, + "grad_norm": 0.26910039043189565, + "learning_rate": 5.6423220567997316e-05, + "loss": 1.0297, + "step": 3400 + }, + { + "epoch": 0.6543333834360439, + "grad_norm": 0.633069797895338, + "learning_rate": 5.636713660914087e-05, + "loss": 1.0332, + "step": 3401 + }, + { + "epoch": 0.6545257778445814, + "grad_norm": 0.3077163804044054, + "learning_rate": 5.631106959678575e-05, + "loss": 0.9649, + "step": 3402 + }, + { + "epoch": 0.6547181722531189, + "grad_norm": 0.28593647186853255, + "learning_rate": 5.625501955270776e-05, + "loss": 0.9928, + "step": 3403 + }, + { + "epoch": 0.6549105666616564, + "grad_norm": 0.28051843679710936, + "learning_rate": 5.619898649867612e-05, + "loss": 1.0593, + "step": 3404 + }, + { + "epoch": 0.655102961070194, + "grad_norm": 0.32209970322772813, + "learning_rate": 5.614297045645339e-05, + "loss": 1.012, + "step": 3405 + }, + { + "epoch": 0.6552953554787314, + "grad_norm": 0.2749127172078766, + "learning_rate": 5.6086971447795624e-05, + "loss": 1.0729, + "step": 3406 + }, + { + "epoch": 0.6554877498872689, + "grad_norm": 0.2986434152466038, + "learning_rate": 5.603098949445209e-05, + "loss": 1.0294, + "step": 3407 + }, + { + "epoch": 0.6556801442958065, + "grad_norm": 0.33526433260027283, + "learning_rate": 5.597502461816557e-05, + "loss": 1.0707, + "step": 3408 + }, + { + "epoch": 0.6558725387043439, + "grad_norm": 0.35657004141077403, + "learning_rate": 5.591907684067221e-05, + "loss": 1.1018, + "step": 3409 + }, + { + "epoch": 0.6560649331128814, + "grad_norm": 0.49097401316341893, + "learning_rate": 5.586314618370145e-05, + "loss": 1.1616, + "step": 3410 + }, + { + "epoch": 0.6562573275214189, + "grad_norm": 0.3289508251553119, + "learning_rate": 5.580723266897616e-05, + "loss": 1.0972, + "step": 3411 + }, + { + "epoch": 0.6564497219299564, + "grad_norm": 0.2982384439715662, + "learning_rate": 5.575133631821243e-05, + "loss": 1.0481, + "step": 3412 + }, + { + "epoch": 0.6566421163384939, + "grad_norm": 0.3354765393620317, + "learning_rate": 5.5695457153119804e-05, + "loss": 1.0972, + "step": 3413 + }, + { + "epoch": 0.6568345107470314, + "grad_norm": 0.3580272923048353, + "learning_rate": 5.563959519540114e-05, + "loss": 1.0572, + "step": 3414 + }, + { + "epoch": 0.6570269051555689, + "grad_norm": 0.27018878384234063, + "learning_rate": 5.5583750466752435e-05, + "loss": 1.0714, + "step": 3415 + }, + { + "epoch": 0.6572192995641064, + "grad_norm": 0.36090736243022997, + "learning_rate": 5.552792298886334e-05, + "loss": 1.0173, + "step": 3416 + }, + { + "epoch": 0.6574116939726439, + "grad_norm": 0.27242805653495944, + "learning_rate": 5.547211278341646e-05, + "loss": 1.0593, + "step": 3417 + }, + { + "epoch": 0.6576040883811815, + "grad_norm": 0.2957701975950919, + "learning_rate": 5.541631987208788e-05, + "loss": 1.0237, + "step": 3418 + }, + { + "epoch": 0.6577964827897189, + "grad_norm": 0.3007347148460491, + "learning_rate": 5.536054427654698e-05, + "loss": 1.0863, + "step": 3419 + }, + { + "epoch": 0.6579888771982564, + "grad_norm": 0.5546170878502891, + "learning_rate": 5.530478601845623e-05, + "loss": 0.9821, + "step": 3420 + }, + { + "epoch": 0.658181271606794, + "grad_norm": 0.2968469323713588, + "learning_rate": 5.52490451194716e-05, + "loss": 1.1035, + "step": 3421 + }, + { + "epoch": 0.6583736660153314, + "grad_norm": 0.32686786577907967, + "learning_rate": 5.5193321601242156e-05, + "loss": 1.078, + "step": 3422 + }, + { + "epoch": 0.6585660604238689, + "grad_norm": 0.32951964267826644, + "learning_rate": 5.513761548541031e-05, + "loss": 1.1353, + "step": 3423 + }, + { + "epoch": 0.6587584548324065, + "grad_norm": 0.2980474553296739, + "learning_rate": 5.5081926793611694e-05, + "loss": 1.0147, + "step": 3424 + }, + { + "epoch": 0.6589508492409439, + "grad_norm": 0.28767968988064563, + "learning_rate": 5.502625554747508e-05, + "loss": 1.1066, + "step": 3425 + }, + { + "epoch": 0.6591432436494814, + "grad_norm": 0.35069418962184845, + "learning_rate": 5.497060176862259e-05, + "loss": 1.0916, + "step": 3426 + }, + { + "epoch": 0.659335638058019, + "grad_norm": 0.30115289278766205, + "learning_rate": 5.4914965478669475e-05, + "loss": 1.1414, + "step": 3427 + }, + { + "epoch": 0.6595280324665564, + "grad_norm": 0.31274459691709966, + "learning_rate": 5.485934669922428e-05, + "loss": 1.0208, + "step": 3428 + }, + { + "epoch": 0.6597204268750939, + "grad_norm": 0.2979652159762174, + "learning_rate": 5.480374545188866e-05, + "loss": 1.067, + "step": 3429 + }, + { + "epoch": 0.6599128212836315, + "grad_norm": 0.2778377680927135, + "learning_rate": 5.4748161758257544e-05, + "loss": 1.071, + "step": 3430 + }, + { + "epoch": 0.660105215692169, + "grad_norm": 0.3373862880581623, + "learning_rate": 5.469259563991893e-05, + "loss": 1.0109, + "step": 3431 + }, + { + "epoch": 0.6602976101007064, + "grad_norm": 0.3287814288371032, + "learning_rate": 5.4637047118454096e-05, + "loss": 1.0454, + "step": 3432 + }, + { + "epoch": 0.660490004509244, + "grad_norm": 0.3289065610296748, + "learning_rate": 5.458151621543743e-05, + "loss": 0.989, + "step": 3433 + }, + { + "epoch": 0.6606823989177815, + "grad_norm": 0.3021531338177011, + "learning_rate": 5.4526002952436526e-05, + "loss": 1.0527, + "step": 3434 + }, + { + "epoch": 0.6608747933263189, + "grad_norm": 0.4688877912363289, + "learning_rate": 5.447050735101211e-05, + "loss": 1.1106, + "step": 3435 + }, + { + "epoch": 0.6610671877348565, + "grad_norm": 0.3150331400644717, + "learning_rate": 5.441502943271797e-05, + "loss": 1.1096, + "step": 3436 + }, + { + "epoch": 0.661259582143394, + "grad_norm": 0.3228519058353967, + "learning_rate": 5.4359569219101114e-05, + "loss": 1.0497, + "step": 3437 + }, + { + "epoch": 0.6614519765519314, + "grad_norm": 0.35287891576341984, + "learning_rate": 5.4304126731701665e-05, + "loss": 1.0826, + "step": 3438 + }, + { + "epoch": 0.6616443709604689, + "grad_norm": 0.28556050017271517, + "learning_rate": 5.424870199205283e-05, + "loss": 1.0472, + "step": 3439 + }, + { + "epoch": 0.6618367653690065, + "grad_norm": 0.3707419496719028, + "learning_rate": 5.4193295021681e-05, + "loss": 1.0794, + "step": 3440 + }, + { + "epoch": 0.662029159777544, + "grad_norm": 0.3303223526555379, + "learning_rate": 5.413790584210551e-05, + "loss": 1.0782, + "step": 3441 + }, + { + "epoch": 0.6622215541860814, + "grad_norm": 0.3031476381153801, + "learning_rate": 5.408253447483892e-05, + "loss": 1.0633, + "step": 3442 + }, + { + "epoch": 0.662413948594619, + "grad_norm": 0.2584004580094845, + "learning_rate": 5.4027180941386877e-05, + "loss": 1.0665, + "step": 3443 + }, + { + "epoch": 0.6626063430031565, + "grad_norm": 0.3619300719303694, + "learning_rate": 5.397184526324792e-05, + "loss": 1.0445, + "step": 3444 + }, + { + "epoch": 0.6627987374116939, + "grad_norm": 0.37435043793646805, + "learning_rate": 5.391652746191398e-05, + "loss": 1.0261, + "step": 3445 + }, + { + "epoch": 0.6629911318202315, + "grad_norm": 0.33021817770856465, + "learning_rate": 5.3861227558869695e-05, + "loss": 1.1074, + "step": 3446 + }, + { + "epoch": 0.663183526228769, + "grad_norm": 0.28730902128760144, + "learning_rate": 5.3805945575592975e-05, + "loss": 1.0366, + "step": 3447 + }, + { + "epoch": 0.6633759206373064, + "grad_norm": 0.2989894849763508, + "learning_rate": 5.3750681533554735e-05, + "loss": 1.0828, + "step": 3448 + }, + { + "epoch": 0.663568315045844, + "grad_norm": 0.3450384306105209, + "learning_rate": 5.369543545421883e-05, + "loss": 0.9548, + "step": 3449 + }, + { + "epoch": 0.6637607094543815, + "grad_norm": 0.28340658318717854, + "learning_rate": 5.3640207359042224e-05, + "loss": 0.9798, + "step": 3450 + }, + { + "epoch": 0.663953103862919, + "grad_norm": 0.29969066035073144, + "learning_rate": 5.358499726947488e-05, + "loss": 1.0045, + "step": 3451 + }, + { + "epoch": 0.6641454982714565, + "grad_norm": 0.2877993635804976, + "learning_rate": 5.352980520695974e-05, + "loss": 1.0319, + "step": 3452 + }, + { + "epoch": 0.664337892679994, + "grad_norm": 0.3597380567808401, + "learning_rate": 5.347463119293283e-05, + "loss": 0.9405, + "step": 3453 + }, + { + "epoch": 0.6645302870885315, + "grad_norm": 0.32679688968707826, + "learning_rate": 5.3419475248823014e-05, + "loss": 1.147, + "step": 3454 + }, + { + "epoch": 0.664722681497069, + "grad_norm": 0.44192355010270074, + "learning_rate": 5.3364337396052265e-05, + "loss": 1.0927, + "step": 3455 + }, + { + "epoch": 0.6649150759056065, + "grad_norm": 0.30312076736198956, + "learning_rate": 5.3309217656035496e-05, + "loss": 1.0591, + "step": 3456 + }, + { + "epoch": 0.665107470314144, + "grad_norm": 0.3016535358737794, + "learning_rate": 5.3254116050180555e-05, + "loss": 1.012, + "step": 3457 + }, + { + "epoch": 0.6652998647226815, + "grad_norm": 0.3318437075038821, + "learning_rate": 5.31990325998883e-05, + "loss": 1.0944, + "step": 3458 + }, + { + "epoch": 0.665492259131219, + "grad_norm": 0.30450078277603604, + "learning_rate": 5.314396732655253e-05, + "loss": 1.0385, + "step": 3459 + }, + { + "epoch": 0.6656846535397565, + "grad_norm": 0.2715940306666047, + "learning_rate": 5.3088920251559895e-05, + "loss": 1.1106, + "step": 3460 + }, + { + "epoch": 0.665877047948294, + "grad_norm": 0.31196214500018166, + "learning_rate": 5.303389139629007e-05, + "loss": 1.1233, + "step": 3461 + }, + { + "epoch": 0.6660694423568315, + "grad_norm": 0.416855790692469, + "learning_rate": 5.297888078211564e-05, + "loss": 1.0223, + "step": 3462 + }, + { + "epoch": 0.666261836765369, + "grad_norm": 0.3102542867239069, + "learning_rate": 5.292388843040208e-05, + "loss": 1.052, + "step": 3463 + }, + { + "epoch": 0.6664542311739065, + "grad_norm": 0.28655538199164865, + "learning_rate": 5.286891436250785e-05, + "loss": 1.0021, + "step": 3464 + }, + { + "epoch": 0.666646625582444, + "grad_norm": 0.2959347403424418, + "learning_rate": 5.281395859978414e-05, + "loss": 1.107, + "step": 3465 + }, + { + "epoch": 0.6668390199909815, + "grad_norm": 0.3403928199945639, + "learning_rate": 5.2759021163575186e-05, + "loss": 1.0195, + "step": 3466 + }, + { + "epoch": 0.667031414399519, + "grad_norm": 0.29092788411017356, + "learning_rate": 5.27041020752181e-05, + "loss": 1.1889, + "step": 3467 + }, + { + "epoch": 0.6672238088080565, + "grad_norm": 0.3396006773293743, + "learning_rate": 5.2649201356042696e-05, + "loss": 1.1107, + "step": 3468 + }, + { + "epoch": 0.667416203216594, + "grad_norm": 0.3535984837643357, + "learning_rate": 5.259431902737194e-05, + "loss": 1.0237, + "step": 3469 + }, + { + "epoch": 0.6676085976251315, + "grad_norm": 0.27173861392264975, + "learning_rate": 5.2539455110521385e-05, + "loss": 1.1442, + "step": 3470 + }, + { + "epoch": 0.667800992033669, + "grad_norm": 0.33403624635639617, + "learning_rate": 5.248460962679958e-05, + "loss": 1.0057, + "step": 3471 + }, + { + "epoch": 0.6679933864422065, + "grad_norm": 0.39720732637496525, + "learning_rate": 5.2429782597507905e-05, + "loss": 1.0161, + "step": 3472 + }, + { + "epoch": 0.668185780850744, + "grad_norm": 0.31011479046520346, + "learning_rate": 5.237497404394044e-05, + "loss": 1.0047, + "step": 3473 + }, + { + "epoch": 0.6683781752592816, + "grad_norm": 0.2985203849976459, + "learning_rate": 5.232018398738435e-05, + "loss": 1.0197, + "step": 3474 + }, + { + "epoch": 0.668570569667819, + "grad_norm": 0.31537123268295464, + "learning_rate": 5.2265412449119355e-05, + "loss": 1.0304, + "step": 3475 + }, + { + "epoch": 0.6687629640763565, + "grad_norm": 0.33981738740965567, + "learning_rate": 5.221065945041811e-05, + "loss": 1.0955, + "step": 3476 + }, + { + "epoch": 0.6689553584848941, + "grad_norm": 0.35608856289475144, + "learning_rate": 5.215592501254609e-05, + "loss": 1.0608, + "step": 3477 + }, + { + "epoch": 0.6691477528934315, + "grad_norm": 0.2864630861142581, + "learning_rate": 5.2101209156761465e-05, + "loss": 1.1058, + "step": 3478 + }, + { + "epoch": 0.669340147301969, + "grad_norm": 0.3163995990800733, + "learning_rate": 5.2046511904315265e-05, + "loss": 1.0911, + "step": 3479 + }, + { + "epoch": 0.6695325417105066, + "grad_norm": 0.3721352096299449, + "learning_rate": 5.1991833276451275e-05, + "loss": 1.0843, + "step": 3480 + }, + { + "epoch": 0.669724936119044, + "grad_norm": 0.3721352096299449, + "learning_rate": 5.1991833276451275e-05, + "loss": 1.0888, + "step": 3481 + }, + { + "epoch": 0.6699173305275815, + "grad_norm": 0.2591002969131627, + "learning_rate": 5.193717329440604e-05, + "loss": 1.0547, + "step": 3482 + }, + { + "epoch": 0.6701097249361191, + "grad_norm": 0.34860001072649993, + "learning_rate": 5.188253197940889e-05, + "loss": 1.0656, + "step": 3483 + }, + { + "epoch": 0.6703021193446566, + "grad_norm": 0.37402840864169773, + "learning_rate": 5.182790935268185e-05, + "loss": 1.0289, + "step": 3484 + }, + { + "epoch": 0.670494513753194, + "grad_norm": 0.32250155083277543, + "learning_rate": 5.177330543543971e-05, + "loss": 0.9671, + "step": 3485 + }, + { + "epoch": 0.6706869081617316, + "grad_norm": 0.34620336151375863, + "learning_rate": 5.171872024889004e-05, + "loss": 1.049, + "step": 3486 + }, + { + "epoch": 0.6708793025702691, + "grad_norm": 0.26336357452895937, + "learning_rate": 5.1664153814233064e-05, + "loss": 0.9925, + "step": 3487 + }, + { + "epoch": 0.6710716969788065, + "grad_norm": 0.317969382613369, + "learning_rate": 5.160960615266178e-05, + "loss": 0.9775, + "step": 3488 + }, + { + "epoch": 0.6712640913873441, + "grad_norm": 0.3359508723889803, + "learning_rate": 5.155507728536191e-05, + "loss": 1.0986, + "step": 3489 + }, + { + "epoch": 0.6714564857958816, + "grad_norm": 0.32933401454688754, + "learning_rate": 5.150056723351173e-05, + "loss": 0.9926, + "step": 3490 + }, + { + "epoch": 0.671648880204419, + "grad_norm": 0.2614247531279279, + "learning_rate": 5.1446076018282396e-05, + "loss": 1.0509, + "step": 3491 + }, + { + "epoch": 0.6718412746129565, + "grad_norm": 0.38743634026232615, + "learning_rate": 5.139160366083765e-05, + "loss": 1.0221, + "step": 3492 + }, + { + "epoch": 0.6720336690214941, + "grad_norm": 0.3017251897735763, + "learning_rate": 5.133715018233393e-05, + "loss": 0.9652, + "step": 3493 + }, + { + "epoch": 0.6722260634300316, + "grad_norm": 0.38869117469922626, + "learning_rate": 5.1282715603920374e-05, + "loss": 1.1011, + "step": 3494 + }, + { + "epoch": 0.672418457838569, + "grad_norm": 0.3224318578621361, + "learning_rate": 5.1228299946738655e-05, + "loss": 1.1207, + "step": 3495 + }, + { + "epoch": 0.6726108522471066, + "grad_norm": 0.3540349437109095, + "learning_rate": 5.117390323192326e-05, + "loss": 1.0257, + "step": 3496 + }, + { + "epoch": 0.6728032466556441, + "grad_norm": 0.3352667382460881, + "learning_rate": 5.111952548060126e-05, + "loss": 1.1285, + "step": 3497 + }, + { + "epoch": 0.6729956410641815, + "grad_norm": 0.4835129632139507, + "learning_rate": 5.106516671389223e-05, + "loss": 0.9994, + "step": 3498 + }, + { + "epoch": 0.6731880354727191, + "grad_norm": 0.5230661074481276, + "learning_rate": 5.101082695290865e-05, + "loss": 1.0477, + "step": 3499 + }, + { + "epoch": 0.6733804298812566, + "grad_norm": 0.3298560235714514, + "learning_rate": 5.095650621875534e-05, + "loss": 1.0949, + "step": 3500 + }, + { + "epoch": 0.673572824289794, + "grad_norm": 0.34964875895227787, + "learning_rate": 5.09022045325299e-05, + "loss": 1.0916, + "step": 3501 + }, + { + "epoch": 0.6737652186983316, + "grad_norm": 0.31618872130600445, + "learning_rate": 5.084792191532248e-05, + "loss": 1.0596, + "step": 3502 + }, + { + "epoch": 0.6739576131068691, + "grad_norm": 0.32154843125943255, + "learning_rate": 5.079365838821579e-05, + "loss": 0.9787, + "step": 3503 + }, + { + "epoch": 0.6741500075154065, + "grad_norm": 0.27095671650963016, + "learning_rate": 5.073941397228518e-05, + "loss": 1.0794, + "step": 3504 + }, + { + "epoch": 0.6743424019239441, + "grad_norm": 0.3240090981442025, + "learning_rate": 5.068518868859854e-05, + "loss": 1.0712, + "step": 3505 + }, + { + "epoch": 0.6745347963324816, + "grad_norm": 0.3172136719068298, + "learning_rate": 5.0630982558216363e-05, + "loss": 1.0907, + "step": 3506 + }, + { + "epoch": 0.6747271907410191, + "grad_norm": 0.2982821252582512, + "learning_rate": 5.057679560219173e-05, + "loss": 1.0676, + "step": 3507 + }, + { + "epoch": 0.6749195851495566, + "grad_norm": 0.30414175589834497, + "learning_rate": 5.052262784157014e-05, + "loss": 0.9762, + "step": 3508 + }, + { + "epoch": 0.6751119795580941, + "grad_norm": 0.28099959197297614, + "learning_rate": 5.046847929738971e-05, + "loss": 1.0931, + "step": 3509 + }, + { + "epoch": 0.6753043739666316, + "grad_norm": 0.2699023791158635, + "learning_rate": 5.041434999068126e-05, + "loss": 1.0864, + "step": 3510 + }, + { + "epoch": 0.6754967683751691, + "grad_norm": 0.29990433839149544, + "learning_rate": 5.036023994246787e-05, + "loss": 1.1284, + "step": 3511 + }, + { + "epoch": 0.6756891627837066, + "grad_norm": 0.3503481004861861, + "learning_rate": 5.0306149173765316e-05, + "loss": 1.0889, + "step": 3512 + }, + { + "epoch": 0.6758815571922441, + "grad_norm": 0.3040101789968439, + "learning_rate": 5.0252077705581755e-05, + "loss": 1.0654, + "step": 3513 + }, + { + "epoch": 0.6760739516007817, + "grad_norm": 0.37375892634670016, + "learning_rate": 5.019802555891798e-05, + "loss": 0.9018, + "step": 3514 + }, + { + "epoch": 0.6762663460093191, + "grad_norm": 0.31041405079337997, + "learning_rate": 5.014399275476721e-05, + "loss": 1.0327, + "step": 3515 + }, + { + "epoch": 0.6764587404178566, + "grad_norm": 0.3816054179403178, + "learning_rate": 5.008997931411517e-05, + "loss": 1.0226, + "step": 3516 + }, + { + "epoch": 0.6766511348263942, + "grad_norm": 0.4172256314260427, + "learning_rate": 5.0035985257940024e-05, + "loss": 1.0311, + "step": 3517 + }, + { + "epoch": 0.6768435292349316, + "grad_norm": 0.2819875109896288, + "learning_rate": 4.998201060721253e-05, + "loss": 1.0974, + "step": 3518 + }, + { + "epoch": 0.6770359236434691, + "grad_norm": 0.3468176654535024, + "learning_rate": 4.992805538289571e-05, + "loss": 1.097, + "step": 3519 + }, + { + "epoch": 0.6772283180520066, + "grad_norm": 0.3037974777451656, + "learning_rate": 4.98741196059452e-05, + "loss": 0.94, + "step": 3520 + }, + { + "epoch": 0.6774207124605441, + "grad_norm": 0.26826748104198406, + "learning_rate": 4.982020329730904e-05, + "loss": 1.0274, + "step": 3521 + }, + { + "epoch": 0.6776131068690816, + "grad_norm": 0.3040532996708861, + "learning_rate": 4.97663064779277e-05, + "loss": 1.0814, + "step": 3522 + }, + { + "epoch": 0.6778055012776191, + "grad_norm": 0.3539878059616163, + "learning_rate": 4.971242916873412e-05, + "loss": 0.9866, + "step": 3523 + }, + { + "epoch": 0.6779978956861566, + "grad_norm": 0.2927001674477416, + "learning_rate": 4.965857139065354e-05, + "loss": 1.0578, + "step": 3524 + }, + { + "epoch": 0.6781902900946941, + "grad_norm": 0.4008429496385881, + "learning_rate": 4.960473316460376e-05, + "loss": 1.0021, + "step": 3525 + }, + { + "epoch": 0.6783826845032316, + "grad_norm": 0.34462676651482826, + "learning_rate": 4.955091451149495e-05, + "loss": 1.0711, + "step": 3526 + }, + { + "epoch": 0.6785750789117692, + "grad_norm": 0.40334991637959616, + "learning_rate": 4.9497115452229534e-05, + "loss": 1.1514, + "step": 3527 + }, + { + "epoch": 0.6787674733203066, + "grad_norm": 0.3433344075847281, + "learning_rate": 4.944333600770261e-05, + "loss": 0.9917, + "step": 3528 + }, + { + "epoch": 0.6789598677288441, + "grad_norm": 0.30932715422008966, + "learning_rate": 4.938957619880138e-05, + "loss": 1.0267, + "step": 3529 + }, + { + "epoch": 0.6791522621373817, + "grad_norm": 0.3747694416111552, + "learning_rate": 4.933583604640557e-05, + "loss": 1.0406, + "step": 3530 + }, + { + "epoch": 0.6793446565459191, + "grad_norm": 0.39469199866750787, + "learning_rate": 4.928211557138728e-05, + "loss": 0.9877, + "step": 3531 + }, + { + "epoch": 0.6795370509544566, + "grad_norm": 0.35919781003192447, + "learning_rate": 4.922841479461083e-05, + "loss": 0.98, + "step": 3532 + }, + { + "epoch": 0.6797294453629942, + "grad_norm": 0.3120814571239843, + "learning_rate": 4.917473373693304e-05, + "loss": 1.014, + "step": 3533 + }, + { + "epoch": 0.6799218397715316, + "grad_norm": 0.39006451101516376, + "learning_rate": 4.912107241920302e-05, + "loss": 1.0582, + "step": 3534 + }, + { + "epoch": 0.6801142341800691, + "grad_norm": 0.35400627449530375, + "learning_rate": 4.906743086226218e-05, + "loss": 1.0813, + "step": 3535 + }, + { + "epoch": 0.6803066285886067, + "grad_norm": 0.3158028565257898, + "learning_rate": 4.901380908694434e-05, + "loss": 1.0287, + "step": 3536 + }, + { + "epoch": 0.6804990229971442, + "grad_norm": 0.4489557526750681, + "learning_rate": 4.8960207114075495e-05, + "loss": 1.0546, + "step": 3537 + }, + { + "epoch": 0.6806914174056816, + "grad_norm": 0.33445995832322456, + "learning_rate": 4.890662496447407e-05, + "loss": 0.9672, + "step": 3538 + }, + { + "epoch": 0.6808838118142192, + "grad_norm": 0.36170982280497127, + "learning_rate": 4.885306265895076e-05, + "loss": 0.9894, + "step": 3539 + }, + { + "epoch": 0.6810762062227567, + "grad_norm": 0.33625501551835196, + "learning_rate": 4.879952021830856e-05, + "loss": 1.1454, + "step": 3540 + }, + { + "epoch": 0.6812686006312941, + "grad_norm": 0.328461528299424, + "learning_rate": 4.8745997663342755e-05, + "loss": 1.0563, + "step": 3541 + }, + { + "epoch": 0.6814609950398317, + "grad_norm": 0.3550336805649042, + "learning_rate": 4.8692495014840825e-05, + "loss": 1.0457, + "step": 3542 + }, + { + "epoch": 0.6816533894483692, + "grad_norm": 0.33109470753925446, + "learning_rate": 4.863901229358261e-05, + "loss": 1.0013, + "step": 3543 + }, + { + "epoch": 0.6818457838569066, + "grad_norm": 0.289906702426852, + "learning_rate": 4.858554952034019e-05, + "loss": 1.0227, + "step": 3544 + }, + { + "epoch": 0.6820381782654441, + "grad_norm": 0.2893291582895103, + "learning_rate": 4.853210671587789e-05, + "loss": 1.0507, + "step": 3545 + }, + { + "epoch": 0.6822305726739817, + "grad_norm": 0.39603861748236213, + "learning_rate": 4.8478683900952274e-05, + "loss": 1.0317, + "step": 3546 + }, + { + "epoch": 0.6824229670825191, + "grad_norm": 0.29220139134084816, + "learning_rate": 4.842528109631218e-05, + "loss": 0.9988, + "step": 3547 + }, + { + "epoch": 0.6826153614910566, + "grad_norm": 0.46930101398512997, + "learning_rate": 4.8371898322698585e-05, + "loss": 1.0519, + "step": 3548 + }, + { + "epoch": 0.6828077558995942, + "grad_norm": 0.3386570994592851, + "learning_rate": 4.831853560084477e-05, + "loss": 0.9849, + "step": 3549 + }, + { + "epoch": 0.6830001503081317, + "grad_norm": 0.3711144063340892, + "learning_rate": 4.82651929514762e-05, + "loss": 0.9882, + "step": 3550 + }, + { + "epoch": 0.6831925447166691, + "grad_norm": 0.2900557521153995, + "learning_rate": 4.821187039531056e-05, + "loss": 1.0449, + "step": 3551 + }, + { + "epoch": 0.6833849391252067, + "grad_norm": 0.3049776128344479, + "learning_rate": 4.815856795305772e-05, + "loss": 1.1149, + "step": 3552 + }, + { + "epoch": 0.6835773335337442, + "grad_norm": 0.2783425655822952, + "learning_rate": 4.810528564541969e-05, + "loss": 1.0782, + "step": 3553 + }, + { + "epoch": 0.6837697279422816, + "grad_norm": 0.36016407989698923, + "learning_rate": 4.805202349309074e-05, + "loss": 0.9969, + "step": 3554 + }, + { + "epoch": 0.6839621223508192, + "grad_norm": 0.3084802855475764, + "learning_rate": 4.79987815167573e-05, + "loss": 1.062, + "step": 3555 + }, + { + "epoch": 0.6841545167593567, + "grad_norm": 0.27659298163284235, + "learning_rate": 4.7945559737097834e-05, + "loss": 1.0389, + "step": 3556 + }, + { + "epoch": 0.6843469111678941, + "grad_norm": 0.36830292708209716, + "learning_rate": 4.789235817478322e-05, + "loss": 1.051, + "step": 3557 + }, + { + "epoch": 0.6845393055764317, + "grad_norm": 0.3247416035929323, + "learning_rate": 4.7839176850476206e-05, + "loss": 1.0173, + "step": 3558 + }, + { + "epoch": 0.6847316999849692, + "grad_norm": 0.25811498348476397, + "learning_rate": 4.778601578483187e-05, + "loss": 1.0816, + "step": 3559 + }, + { + "epoch": 0.6849240943935067, + "grad_norm": 0.2975820688406356, + "learning_rate": 4.773287499849737e-05, + "loss": 0.9899, + "step": 3560 + }, + { + "epoch": 0.6851164888020442, + "grad_norm": 0.3687199888790893, + "learning_rate": 4.767975451211191e-05, + "loss": 1.1033, + "step": 3561 + }, + { + "epoch": 0.6853088832105817, + "grad_norm": 0.308506714084704, + "learning_rate": 4.7626654346306923e-05, + "loss": 1.0434, + "step": 3562 + }, + { + "epoch": 0.6855012776191192, + "grad_norm": 0.36557095390192673, + "learning_rate": 4.757357452170588e-05, + "loss": 1.0686, + "step": 3563 + }, + { + "epoch": 0.6856936720276567, + "grad_norm": 0.2873479699893437, + "learning_rate": 4.752051505892438e-05, + "loss": 1.0846, + "step": 3564 + }, + { + "epoch": 0.6858860664361942, + "grad_norm": 0.36870745999412574, + "learning_rate": 4.746747597857014e-05, + "loss": 1.0681, + "step": 3565 + }, + { + "epoch": 0.6860784608447317, + "grad_norm": 0.34317991543935006, + "learning_rate": 4.741445730124288e-05, + "loss": 1.0514, + "step": 3566 + }, + { + "epoch": 0.6862708552532693, + "grad_norm": 0.34686465917834397, + "learning_rate": 4.736145904753445e-05, + "loss": 0.9692, + "step": 3567 + }, + { + "epoch": 0.6864632496618067, + "grad_norm": 0.37449660756103303, + "learning_rate": 4.7308481238028765e-05, + "loss": 1.0592, + "step": 3568 + }, + { + "epoch": 0.6866556440703442, + "grad_norm": 0.2600848956522634, + "learning_rate": 4.725552389330182e-05, + "loss": 1.1187, + "step": 3569 + }, + { + "epoch": 0.6868480384788818, + "grad_norm": 0.29988467331887764, + "learning_rate": 4.720258703392161e-05, + "loss": 1.0976, + "step": 3570 + }, + { + "epoch": 0.6870404328874192, + "grad_norm": 0.3414643017885808, + "learning_rate": 4.7149670680448256e-05, + "loss": 0.989, + "step": 3571 + }, + { + "epoch": 0.6872328272959567, + "grad_norm": 0.28334956104705594, + "learning_rate": 4.709677485343377e-05, + "loss": 1.0395, + "step": 3572 + }, + { + "epoch": 0.6874252217044942, + "grad_norm": 0.32554643996706106, + "learning_rate": 4.704389957342237e-05, + "loss": 1.0765, + "step": 3573 + }, + { + "epoch": 0.6876176161130317, + "grad_norm": 0.3839251163213743, + "learning_rate": 4.699104486095008e-05, + "loss": 1.0684, + "step": 3574 + }, + { + "epoch": 0.6878100105215692, + "grad_norm": 0.2912143580861565, + "learning_rate": 4.6938210736545195e-05, + "loss": 1.0125, + "step": 3575 + }, + { + "epoch": 0.6880024049301067, + "grad_norm": 0.39430444951430527, + "learning_rate": 4.688539722072786e-05, + "loss": 1.06, + "step": 3576 + }, + { + "epoch": 0.6881947993386442, + "grad_norm": 0.5465363646831795, + "learning_rate": 4.6832604334010164e-05, + "loss": 1.0244, + "step": 3577 + }, + { + "epoch": 0.6883871937471817, + "grad_norm": 0.28907495375669995, + "learning_rate": 4.67798320968963e-05, + "loss": 1.056, + "step": 3578 + }, + { + "epoch": 0.6885795881557192, + "grad_norm": 0.31659417428817904, + "learning_rate": 4.672708052988239e-05, + "loss": 1.0531, + "step": 3579 + }, + { + "epoch": 0.6887719825642568, + "grad_norm": 0.3604903424708685, + "learning_rate": 4.667434965345654e-05, + "loss": 0.9765, + "step": 3580 + }, + { + "epoch": 0.6889643769727942, + "grad_norm": 0.31353467027059173, + "learning_rate": 4.6621639488098854e-05, + "loss": 1.0254, + "step": 3581 + }, + { + "epoch": 0.6891567713813317, + "grad_norm": 0.3131835511266756, + "learning_rate": 4.656895005428127e-05, + "loss": 1.1014, + "step": 3582 + }, + { + "epoch": 0.6893491657898693, + "grad_norm": 0.2960827827209148, + "learning_rate": 4.6516281372467806e-05, + "loss": 1.1676, + "step": 3583 + }, + { + "epoch": 0.6895415601984067, + "grad_norm": 0.2783333735238189, + "learning_rate": 4.64636334631144e-05, + "loss": 0.9882, + "step": 3584 + }, + { + "epoch": 0.6897339546069442, + "grad_norm": 0.34259578523253326, + "learning_rate": 4.6411006346668776e-05, + "loss": 1.0231, + "step": 3585 + }, + { + "epoch": 0.6899263490154818, + "grad_norm": 0.4171859908601556, + "learning_rate": 4.635840004357086e-05, + "loss": 1.0481, + "step": 3586 + }, + { + "epoch": 0.6901187434240192, + "grad_norm": 0.3401331914954342, + "learning_rate": 4.630581457425222e-05, + "loss": 1.069, + "step": 3587 + }, + { + "epoch": 0.6903111378325567, + "grad_norm": 0.3149676949564201, + "learning_rate": 4.625324995913648e-05, + "loss": 1.141, + "step": 3588 + }, + { + "epoch": 0.6905035322410943, + "grad_norm": 0.3252719993142569, + "learning_rate": 4.620070621863917e-05, + "loss": 1.0408, + "step": 3589 + }, + { + "epoch": 0.6906959266496318, + "grad_norm": 0.29594764723436207, + "learning_rate": 4.614818337316759e-05, + "loss": 1.0119, + "step": 3590 + }, + { + "epoch": 0.6908883210581692, + "grad_norm": 0.2833495514491127, + "learning_rate": 4.6095681443121066e-05, + "loss": 1.0631, + "step": 3591 + }, + { + "epoch": 0.6910807154667068, + "grad_norm": 0.3137384228123955, + "learning_rate": 4.604320044889072e-05, + "loss": 1.0145, + "step": 3592 + }, + { + "epoch": 0.6912731098752443, + "grad_norm": 0.3435444837394111, + "learning_rate": 4.599074041085958e-05, + "loss": 1.0179, + "step": 3593 + }, + { + "epoch": 0.6914655042837817, + "grad_norm": 0.39489853221544463, + "learning_rate": 4.593830134940256e-05, + "loss": 1.1021, + "step": 3594 + }, + { + "epoch": 0.6916578986923193, + "grad_norm": 0.27555621352680026, + "learning_rate": 4.588588328488629e-05, + "loss": 1.0369, + "step": 3595 + }, + { + "epoch": 0.6918502931008568, + "grad_norm": 0.3053879678905768, + "learning_rate": 4.583348623766941e-05, + "loss": 1.114, + "step": 3596 + }, + { + "epoch": 0.6920426875093942, + "grad_norm": 0.31209583353961534, + "learning_rate": 4.578111022810231e-05, + "loss": 0.9983, + "step": 3597 + }, + { + "epoch": 0.6922350819179317, + "grad_norm": 0.3036329270805974, + "learning_rate": 4.5728755276527226e-05, + "loss": 1.0477, + "step": 3598 + }, + { + "epoch": 0.6924274763264693, + "grad_norm": 0.2977631134446947, + "learning_rate": 4.567642140327823e-05, + "loss": 1.118, + "step": 3599 + }, + { + "epoch": 0.6926198707350067, + "grad_norm": 0.323468790462587, + "learning_rate": 4.562410862868123e-05, + "loss": 1.1009, + "step": 3600 + }, + { + "epoch": 0.6928122651435442, + "grad_norm": 0.31469573480339924, + "learning_rate": 4.5571816973053825e-05, + "loss": 1.0514, + "step": 3601 + }, + { + "epoch": 0.6930046595520818, + "grad_norm": 0.3581819043883575, + "learning_rate": 4.551954645670557e-05, + "loss": 1.0984, + "step": 3602 + }, + { + "epoch": 0.6931970539606193, + "grad_norm": 0.283659053333895, + "learning_rate": 4.5467297099937625e-05, + "loss": 1.0603, + "step": 3603 + }, + { + "epoch": 0.6933894483691567, + "grad_norm": 0.262658386806912, + "learning_rate": 4.5415068923043133e-05, + "loss": 1.072, + "step": 3604 + }, + { + "epoch": 0.6935818427776943, + "grad_norm": 0.2886571006290498, + "learning_rate": 4.5362861946306936e-05, + "loss": 1.038, + "step": 3605 + }, + { + "epoch": 0.6937742371862318, + "grad_norm": 0.2837306873457255, + "learning_rate": 4.531067619000553e-05, + "loss": 1.0021, + "step": 3606 + }, + { + "epoch": 0.6939666315947692, + "grad_norm": 0.40396626050076123, + "learning_rate": 4.525851167440731e-05, + "loss": 1.0649, + "step": 3607 + }, + { + "epoch": 0.6941590260033068, + "grad_norm": 0.27674427213065805, + "learning_rate": 4.5206368419772406e-05, + "loss": 1.0525, + "step": 3608 + }, + { + "epoch": 0.6943514204118443, + "grad_norm": 0.34574891123886986, + "learning_rate": 4.5154246446352544e-05, + "loss": 0.9968, + "step": 3609 + }, + { + "epoch": 0.6945438148203817, + "grad_norm": 0.2811762999622638, + "learning_rate": 4.5102145774391455e-05, + "loss": 0.9743, + "step": 3610 + }, + { + "epoch": 0.6947362092289193, + "grad_norm": 0.31103172756328284, + "learning_rate": 4.505006642412433e-05, + "loss": 0.9706, + "step": 3611 + }, + { + "epoch": 0.6949286036374568, + "grad_norm": 0.3319691141468076, + "learning_rate": 4.4998008415778205e-05, + "loss": 0.9846, + "step": 3612 + }, + { + "epoch": 0.6951209980459943, + "grad_norm": 0.3542622944509499, + "learning_rate": 4.494597176957186e-05, + "loss": 1.0153, + "step": 3613 + }, + { + "epoch": 0.6953133924545318, + "grad_norm": 0.27408377469551215, + "learning_rate": 4.4893956505715614e-05, + "loss": 1.0739, + "step": 3614 + }, + { + "epoch": 0.6955057868630693, + "grad_norm": 0.2900926564026216, + "learning_rate": 4.484196264441176e-05, + "loss": 1.0093, + "step": 3615 + }, + { + "epoch": 0.6956981812716068, + "grad_norm": 0.27153232008243816, + "learning_rate": 4.478999020585399e-05, + "loss": 0.9417, + "step": 3616 + }, + { + "epoch": 0.6958905756801443, + "grad_norm": 0.3506619741613685, + "learning_rate": 4.4738039210227836e-05, + "loss": 1.0928, + "step": 3617 + }, + { + "epoch": 0.6960829700886818, + "grad_norm": 0.318977313180713, + "learning_rate": 4.468610967771051e-05, + "loss": 1.086, + "step": 3618 + }, + { + "epoch": 0.6962753644972193, + "grad_norm": 0.3011208266798856, + "learning_rate": 4.463420162847076e-05, + "loss": 0.9993, + "step": 3619 + }, + { + "epoch": 0.6964677589057569, + "grad_norm": 0.3382065454700353, + "learning_rate": 4.4582315082669124e-05, + "loss": 1.0384, + "step": 3620 + }, + { + "epoch": 0.6966601533142943, + "grad_norm": 0.31788701459963614, + "learning_rate": 4.453045006045773e-05, + "loss": 1.0346, + "step": 3621 + }, + { + "epoch": 0.6968525477228318, + "grad_norm": 0.3610214002020846, + "learning_rate": 4.4478606581980354e-05, + "loss": 1.0278, + "step": 3622 + }, + { + "epoch": 0.6970449421313694, + "grad_norm": 0.31798060911254394, + "learning_rate": 4.4426784667372444e-05, + "loss": 1.0491, + "step": 3623 + }, + { + "epoch": 0.6972373365399068, + "grad_norm": 0.3165145664033429, + "learning_rate": 4.437498433676097e-05, + "loss": 1.1319, + "step": 3624 + }, + { + "epoch": 0.6974297309484443, + "grad_norm": 0.3490203065216595, + "learning_rate": 4.432320561026461e-05, + "loss": 1.0663, + "step": 3625 + }, + { + "epoch": 0.6976221253569818, + "grad_norm": 0.321369796440476, + "learning_rate": 4.4271448507993626e-05, + "loss": 1.0723, + "step": 3626 + }, + { + "epoch": 0.6978145197655193, + "grad_norm": 0.3229977161547563, + "learning_rate": 4.4219713050049884e-05, + "loss": 0.9756, + "step": 3627 + }, + { + "epoch": 0.6980069141740568, + "grad_norm": 0.3661307168782783, + "learning_rate": 4.416799925652684e-05, + "loss": 0.9889, + "step": 3628 + }, + { + "epoch": 0.6981993085825943, + "grad_norm": 0.28575720349807143, + "learning_rate": 4.4116307147509564e-05, + "loss": 1.074, + "step": 3629 + }, + { + "epoch": 0.6983917029911318, + "grad_norm": 0.3255365371927097, + "learning_rate": 4.4064636743074605e-05, + "loss": 1.1065, + "step": 3630 + }, + { + "epoch": 0.6985840973996693, + "grad_norm": 0.28518210359942675, + "learning_rate": 4.4012988063290194e-05, + "loss": 0.9744, + "step": 3631 + }, + { + "epoch": 0.6987764918082068, + "grad_norm": 0.30779999292049, + "learning_rate": 4.3961361128216084e-05, + "loss": 1.0211, + "step": 3632 + }, + { + "epoch": 0.6989688862167444, + "grad_norm": 0.2905723137000605, + "learning_rate": 4.390975595790358e-05, + "loss": 1.0688, + "step": 3633 + }, + { + "epoch": 0.6991612806252818, + "grad_norm": 0.3212770448905719, + "learning_rate": 4.385817257239556e-05, + "loss": 1.024, + "step": 3634 + }, + { + "epoch": 0.6993536750338193, + "grad_norm": 0.3174745426718206, + "learning_rate": 4.380661099172636e-05, + "loss": 1.034, + "step": 3635 + }, + { + "epoch": 0.6995460694423569, + "grad_norm": 0.3260547521158565, + "learning_rate": 4.375507123592194e-05, + "loss": 1.0072, + "step": 3636 + }, + { + "epoch": 0.6997384638508943, + "grad_norm": 0.3348747934083643, + "learning_rate": 4.370355332499977e-05, + "loss": 1.098, + "step": 3637 + }, + { + "epoch": 0.6999308582594318, + "grad_norm": 0.3083344727748014, + "learning_rate": 4.365205727896872e-05, + "loss": 1.0648, + "step": 3638 + }, + { + "epoch": 0.7001232526679694, + "grad_norm": 0.301352824560482, + "learning_rate": 4.3600583117829395e-05, + "loss": 1.0803, + "step": 3639 + }, + { + "epoch": 0.7003156470765068, + "grad_norm": 0.30552325020482884, + "learning_rate": 4.354913086157366e-05, + "loss": 1.007, + "step": 3640 + }, + { + "epoch": 0.7005080414850443, + "grad_norm": 0.3234255184277871, + "learning_rate": 4.3497700530185015e-05, + "loss": 1.0255, + "step": 3641 + }, + { + "epoch": 0.7007004358935819, + "grad_norm": 0.36707484512526145, + "learning_rate": 4.344629214363845e-05, + "loss": 1.1103, + "step": 3642 + }, + { + "epoch": 0.7008928303021194, + "grad_norm": 0.35193929458091555, + "learning_rate": 4.33949057219003e-05, + "loss": 0.9848, + "step": 3643 + }, + { + "epoch": 0.7010852247106568, + "grad_norm": 0.3691778899647307, + "learning_rate": 4.3343541284928515e-05, + "loss": 1.0949, + "step": 3644 + }, + { + "epoch": 0.7012776191191944, + "grad_norm": 0.4275166515800625, + "learning_rate": 4.329219885267244e-05, + "loss": 1.0213, + "step": 3645 + }, + { + "epoch": 0.7014700135277319, + "grad_norm": 0.3176436192150303, + "learning_rate": 4.324087844507289e-05, + "loss": 1.0829, + "step": 3646 + }, + { + "epoch": 0.7016624079362693, + "grad_norm": 0.3690684161969959, + "learning_rate": 4.318958008206214e-05, + "loss": 1.095, + "step": 3647 + }, + { + "epoch": 0.7018548023448069, + "grad_norm": 0.31173441345064723, + "learning_rate": 4.3138303783563835e-05, + "loss": 1.0174, + "step": 3648 + }, + { + "epoch": 0.7020471967533444, + "grad_norm": 0.35401815098570905, + "learning_rate": 4.308704956949313e-05, + "loss": 0.9801, + "step": 3649 + }, + { + "epoch": 0.7022395911618818, + "grad_norm": 0.2810103337747946, + "learning_rate": 4.303581745975655e-05, + "loss": 1.0704, + "step": 3650 + }, + { + "epoch": 0.7024319855704194, + "grad_norm": 0.3321944505964623, + "learning_rate": 4.298460747425208e-05, + "loss": 1.0488, + "step": 3651 + }, + { + "epoch": 0.7026243799789569, + "grad_norm": 0.35588553122779976, + "learning_rate": 4.293341963286912e-05, + "loss": 1.002, + "step": 3652 + }, + { + "epoch": 0.7028167743874943, + "grad_norm": 0.5000776742160565, + "learning_rate": 4.288225395548835e-05, + "loss": 1.0413, + "step": 3653 + }, + { + "epoch": 0.7030091687960318, + "grad_norm": 0.28922850719296755, + "learning_rate": 4.283111046198198e-05, + "loss": 1.0107, + "step": 3654 + }, + { + "epoch": 0.7032015632045694, + "grad_norm": 0.3814514918786738, + "learning_rate": 4.277998917221354e-05, + "loss": 1.1259, + "step": 3655 + }, + { + "epoch": 0.7033939576131069, + "grad_norm": 0.3195508583181781, + "learning_rate": 4.272889010603798e-05, + "loss": 1.0463, + "step": 3656 + }, + { + "epoch": 0.7035863520216443, + "grad_norm": 0.2943539622186965, + "learning_rate": 4.267781328330155e-05, + "loss": 1.0733, + "step": 3657 + }, + { + "epoch": 0.7037787464301819, + "grad_norm": 0.27271477986595266, + "learning_rate": 4.262675872384196e-05, + "loss": 1.067, + "step": 3658 + }, + { + "epoch": 0.7039711408387194, + "grad_norm": 0.3224113081689263, + "learning_rate": 4.257572644748813e-05, + "loss": 1.0631, + "step": 3659 + }, + { + "epoch": 0.7041635352472568, + "grad_norm": 0.34867810832027757, + "learning_rate": 4.2524716474060454e-05, + "loss": 1.0439, + "step": 3660 + }, + { + "epoch": 0.7043559296557944, + "grad_norm": 0.3055074411484674, + "learning_rate": 4.2473728823370604e-05, + "loss": 1.0913, + "step": 3661 + }, + { + "epoch": 0.7045483240643319, + "grad_norm": 0.2693879156783923, + "learning_rate": 4.242276351522161e-05, + "loss": 1.0455, + "step": 3662 + }, + { + "epoch": 0.7047407184728693, + "grad_norm": 0.3377767490572996, + "learning_rate": 4.237182056940784e-05, + "loss": 1.0492, + "step": 3663 + }, + { + "epoch": 0.7049331128814069, + "grad_norm": 0.3453557523605591, + "learning_rate": 4.232090000571488e-05, + "loss": 1.0132, + "step": 3664 + }, + { + "epoch": 0.7051255072899444, + "grad_norm": 0.3931272846701584, + "learning_rate": 4.227000184391972e-05, + "loss": 1.0836, + "step": 3665 + }, + { + "epoch": 0.7053179016984819, + "grad_norm": 0.2821680074923524, + "learning_rate": 4.221912610379065e-05, + "loss": 1.0373, + "step": 3666 + }, + { + "epoch": 0.7055102961070194, + "grad_norm": 0.4156339281689855, + "learning_rate": 4.2168272805087114e-05, + "loss": 0.9768, + "step": 3667 + }, + { + "epoch": 0.7057026905155569, + "grad_norm": 0.331543348965038, + "learning_rate": 4.2117441967560115e-05, + "loss": 0.9686, + "step": 3668 + }, + { + "epoch": 0.7058950849240944, + "grad_norm": 0.358836274801957, + "learning_rate": 4.2066633610951636e-05, + "loss": 1.0072, + "step": 3669 + }, + { + "epoch": 0.7060874793326319, + "grad_norm": 0.2611148059855862, + "learning_rate": 4.2015847754995086e-05, + "loss": 1.0759, + "step": 3670 + }, + { + "epoch": 0.7062798737411694, + "grad_norm": 0.3091379333248745, + "learning_rate": 4.196508441941516e-05, + "loss": 0.908, + "step": 3671 + }, + { + "epoch": 0.7064722681497069, + "grad_norm": 0.35632687792019546, + "learning_rate": 4.191434362392768e-05, + "loss": 0.9345, + "step": 3672 + }, + { + "epoch": 0.7066646625582444, + "grad_norm": 0.3338972341642524, + "learning_rate": 4.18636253882398e-05, + "loss": 1.011, + "step": 3673 + }, + { + "epoch": 0.7068570569667819, + "grad_norm": 0.27346720179809914, + "learning_rate": 4.181292973204992e-05, + "loss": 1.0944, + "step": 3674 + }, + { + "epoch": 0.7070494513753194, + "grad_norm": 0.30661424884024435, + "learning_rate": 4.176225667504765e-05, + "loss": 1.1006, + "step": 3675 + }, + { + "epoch": 0.707241845783857, + "grad_norm": 0.3247441684160884, + "learning_rate": 4.171160623691383e-05, + "loss": 1.0559, + "step": 3676 + }, + { + "epoch": 0.7074342401923944, + "grad_norm": 0.2849133293486218, + "learning_rate": 4.1660978437320474e-05, + "loss": 1.0741, + "step": 3677 + }, + { + "epoch": 0.7076266346009319, + "grad_norm": 0.3425235450710564, + "learning_rate": 4.161037329593085e-05, + "loss": 1.0535, + "step": 3678 + }, + { + "epoch": 0.7078190290094694, + "grad_norm": 0.34727074422066867, + "learning_rate": 4.155979083239942e-05, + "loss": 1.023, + "step": 3679 + }, + { + "epoch": 0.7080114234180069, + "grad_norm": 0.3755806831889047, + "learning_rate": 4.150923106637181e-05, + "loss": 1.0091, + "step": 3680 + }, + { + "epoch": 0.7082038178265444, + "grad_norm": 0.2915675120716691, + "learning_rate": 4.145869401748491e-05, + "loss": 1.0241, + "step": 3681 + }, + { + "epoch": 0.7083962122350819, + "grad_norm": 0.2646728702913228, + "learning_rate": 4.1408179705366636e-05, + "loss": 1.0533, + "step": 3682 + }, + { + "epoch": 0.7085886066436194, + "grad_norm": 0.3001666301177163, + "learning_rate": 4.135768814963622e-05, + "loss": 1.0303, + "step": 3683 + }, + { + "epoch": 0.7087810010521569, + "grad_norm": 0.360544215986514, + "learning_rate": 4.130721936990399e-05, + "loss": 1.0656, + "step": 3684 + }, + { + "epoch": 0.7089733954606944, + "grad_norm": 0.2977311383710479, + "learning_rate": 4.125677338577144e-05, + "loss": 0.9727, + "step": 3685 + }, + { + "epoch": 0.709165789869232, + "grad_norm": 0.30164742924262233, + "learning_rate": 4.1206350216831214e-05, + "loss": 0.9284, + "step": 3686 + }, + { + "epoch": 0.7093581842777694, + "grad_norm": 0.28890038409130697, + "learning_rate": 4.115594988266711e-05, + "loss": 1.0666, + "step": 3687 + }, + { + "epoch": 0.7095505786863069, + "grad_norm": 0.31157720589399535, + "learning_rate": 4.110557240285398e-05, + "loss": 1.095, + "step": 3688 + }, + { + "epoch": 0.7097429730948445, + "grad_norm": 0.3148937274621826, + "learning_rate": 4.105521779695789e-05, + "loss": 1.031, + "step": 3689 + }, + { + "epoch": 0.7099353675033819, + "grad_norm": 0.3438890727145004, + "learning_rate": 4.100488608453599e-05, + "loss": 1.1107, + "step": 3690 + }, + { + "epoch": 0.7101277619119194, + "grad_norm": 0.3141463560938008, + "learning_rate": 4.095457728513652e-05, + "loss": 1.0204, + "step": 3691 + }, + { + "epoch": 0.710320156320457, + "grad_norm": 0.30817618613909464, + "learning_rate": 4.09042914182989e-05, + "loss": 0.9865, + "step": 3692 + }, + { + "epoch": 0.7105125507289944, + "grad_norm": 0.371041857371169, + "learning_rate": 4.08540285035535e-05, + "loss": 1.0582, + "step": 3693 + }, + { + "epoch": 0.7107049451375319, + "grad_norm": 0.3289110495655149, + "learning_rate": 4.0803788560421885e-05, + "loss": 1.1455, + "step": 3694 + }, + { + "epoch": 0.7108973395460695, + "grad_norm": 0.292030176544287, + "learning_rate": 4.075357160841671e-05, + "loss": 0.9832, + "step": 3695 + }, + { + "epoch": 0.711089733954607, + "grad_norm": 0.27344448611216143, + "learning_rate": 4.0703377667041554e-05, + "loss": 0.9701, + "step": 3696 + }, + { + "epoch": 0.7112821283631444, + "grad_norm": 0.30585706691093373, + "learning_rate": 4.065320675579132e-05, + "loss": 0.9758, + "step": 3697 + }, + { + "epoch": 0.711474522771682, + "grad_norm": 0.34736995015019284, + "learning_rate": 4.060305889415168e-05, + "loss": 1.0565, + "step": 3698 + }, + { + "epoch": 0.7116669171802195, + "grad_norm": 0.3438355222148382, + "learning_rate": 4.055293410159954e-05, + "loss": 1.0976, + "step": 3699 + }, + { + "epoch": 0.7118593115887569, + "grad_norm": 0.3368300244498483, + "learning_rate": 4.050283239760282e-05, + "loss": 1.084, + "step": 3700 + }, + { + "epoch": 0.7120517059972945, + "grad_norm": 0.44270531166203037, + "learning_rate": 4.045275380162038e-05, + "loss": 1.1315, + "step": 3701 + }, + { + "epoch": 0.712244100405832, + "grad_norm": 0.3009240606340105, + "learning_rate": 4.04026983331022e-05, + "loss": 1.0968, + "step": 3702 + }, + { + "epoch": 0.7124364948143694, + "grad_norm": 0.30989802760591656, + "learning_rate": 4.035266601148924e-05, + "loss": 1.0408, + "step": 3703 + }, + { + "epoch": 0.712628889222907, + "grad_norm": 0.28894025278079455, + "learning_rate": 4.030265685621349e-05, + "loss": 1.0601, + "step": 3704 + }, + { + "epoch": 0.7128212836314445, + "grad_norm": 0.3529244750906975, + "learning_rate": 4.025267088669796e-05, + "loss": 0.944, + "step": 3705 + }, + { + "epoch": 0.713013678039982, + "grad_norm": 0.2755090473065579, + "learning_rate": 4.020270812235656e-05, + "loss": 1.0735, + "step": 3706 + }, + { + "epoch": 0.7132060724485194, + "grad_norm": 0.2756465759392184, + "learning_rate": 4.015276858259427e-05, + "loss": 0.9892, + "step": 3707 + }, + { + "epoch": 0.713398466857057, + "grad_norm": 0.27570867421132506, + "learning_rate": 4.0102852286807044e-05, + "loss": 0.9672, + "step": 3708 + }, + { + "epoch": 0.7135908612655945, + "grad_norm": 0.2769737548017259, + "learning_rate": 4.005295925438181e-05, + "loss": 1.0487, + "step": 3709 + }, + { + "epoch": 0.7137832556741319, + "grad_norm": 0.3824239219285524, + "learning_rate": 4.000308950469646e-05, + "loss": 1.0881, + "step": 3710 + }, + { + "epoch": 0.7139756500826695, + "grad_norm": 0.300542991742788, + "learning_rate": 3.995324305711976e-05, + "loss": 1.0832, + "step": 3711 + }, + { + "epoch": 0.714168044491207, + "grad_norm": 0.38485709232922743, + "learning_rate": 3.990341993101154e-05, + "loss": 0.9982, + "step": 3712 + }, + { + "epoch": 0.7143604388997444, + "grad_norm": 0.36948846441757627, + "learning_rate": 3.985362014572256e-05, + "loss": 1.0375, + "step": 3713 + }, + { + "epoch": 0.714552833308282, + "grad_norm": 0.33271159089885494, + "learning_rate": 3.980384372059438e-05, + "loss": 1.087, + "step": 3714 + }, + { + "epoch": 0.7147452277168195, + "grad_norm": 0.3070608102249598, + "learning_rate": 3.97540906749597e-05, + "loss": 1.0418, + "step": 3715 + }, + { + "epoch": 0.7149376221253569, + "grad_norm": 0.278222487182402, + "learning_rate": 3.9704361028142025e-05, + "loss": 1.0597, + "step": 3716 + }, + { + "epoch": 0.7151300165338945, + "grad_norm": 0.33907652157565016, + "learning_rate": 3.9654654799455695e-05, + "loss": 1.0245, + "step": 3717 + }, + { + "epoch": 0.715322410942432, + "grad_norm": 0.3346865490073867, + "learning_rate": 3.960497200820609e-05, + "loss": 0.9931, + "step": 3718 + }, + { + "epoch": 0.7155148053509695, + "grad_norm": 0.27766037000656824, + "learning_rate": 3.955531267368941e-05, + "loss": 1.0313, + "step": 3719 + }, + { + "epoch": 0.715707199759507, + "grad_norm": 0.32186423231949907, + "learning_rate": 3.9505676815192794e-05, + "loss": 1.1072, + "step": 3720 + }, + { + "epoch": 0.7158995941680445, + "grad_norm": 0.29875133578593793, + "learning_rate": 3.945606445199427e-05, + "loss": 1.0587, + "step": 3721 + }, + { + "epoch": 0.716091988576582, + "grad_norm": 0.44261304846040084, + "learning_rate": 3.9406475603362617e-05, + "loss": 1.0142, + "step": 3722 + }, + { + "epoch": 0.7162843829851195, + "grad_norm": 0.3336360338894305, + "learning_rate": 3.9356910288557626e-05, + "loss": 1.0217, + "step": 3723 + }, + { + "epoch": 0.716476777393657, + "grad_norm": 0.36172411772231067, + "learning_rate": 3.9307368526829924e-05, + "loss": 1.0965, + "step": 3724 + }, + { + "epoch": 0.7166691718021945, + "grad_norm": 0.32432511609127174, + "learning_rate": 3.925785033742085e-05, + "loss": 0.9878, + "step": 3725 + }, + { + "epoch": 0.716861566210732, + "grad_norm": 0.27051167653474395, + "learning_rate": 3.920835573956285e-05, + "loss": 1.0523, + "step": 3726 + }, + { + "epoch": 0.7170539606192695, + "grad_norm": 0.26130992964006505, + "learning_rate": 3.915888475247894e-05, + "loss": 1.0498, + "step": 3727 + }, + { + "epoch": 0.717246355027807, + "grad_norm": 0.34151546602238086, + "learning_rate": 3.910943739538313e-05, + "loss": 1.0417, + "step": 3728 + }, + { + "epoch": 0.7174387494363446, + "grad_norm": 0.2965158721769046, + "learning_rate": 3.906001368748022e-05, + "loss": 0.9544, + "step": 3729 + }, + { + "epoch": 0.717631143844882, + "grad_norm": 0.37161904876305424, + "learning_rate": 3.901061364796574e-05, + "loss": 0.9319, + "step": 3730 + }, + { + "epoch": 0.7178235382534195, + "grad_norm": 0.3515770974258548, + "learning_rate": 3.896123729602615e-05, + "loss": 0.9907, + "step": 3731 + }, + { + "epoch": 0.718015932661957, + "grad_norm": 0.30045487767797413, + "learning_rate": 3.891188465083865e-05, + "loss": 1.0438, + "step": 3732 + }, + { + "epoch": 0.7182083270704945, + "grad_norm": 0.28262652086020346, + "learning_rate": 3.8862555731571216e-05, + "loss": 1.0546, + "step": 3733 + }, + { + "epoch": 0.718400721479032, + "grad_norm": 0.3462546537820959, + "learning_rate": 3.88132505573827e-05, + "loss": 1.0559, + "step": 3734 + }, + { + "epoch": 0.7185931158875695, + "grad_norm": 0.27948409223515214, + "learning_rate": 3.876396914742257e-05, + "loss": 0.9437, + "step": 3735 + }, + { + "epoch": 0.718785510296107, + "grad_norm": 0.30889913743720837, + "learning_rate": 3.87147115208312e-05, + "loss": 1.0806, + "step": 3736 + }, + { + "epoch": 0.7189779047046445, + "grad_norm": 0.3472597324022128, + "learning_rate": 3.8665477696739685e-05, + "loss": 1.0979, + "step": 3737 + }, + { + "epoch": 0.719170299113182, + "grad_norm": 0.32232113001805635, + "learning_rate": 3.861626769426988e-05, + "loss": 1.0403, + "step": 3738 + }, + { + "epoch": 0.7193626935217196, + "grad_norm": 0.3208620241975116, + "learning_rate": 3.856708153253438e-05, + "loss": 1.0075, + "step": 3739 + }, + { + "epoch": 0.719555087930257, + "grad_norm": 0.3617577173368042, + "learning_rate": 3.8517919230636546e-05, + "loss": 1.0665, + "step": 3740 + }, + { + "epoch": 0.7197474823387945, + "grad_norm": 0.3985323588661623, + "learning_rate": 3.846878080767039e-05, + "loss": 0.9795, + "step": 3741 + }, + { + "epoch": 0.7199398767473321, + "grad_norm": 0.32962981865633395, + "learning_rate": 3.841966628272079e-05, + "loss": 0.966, + "step": 3742 + }, + { + "epoch": 0.7201322711558695, + "grad_norm": 0.3720995664827796, + "learning_rate": 3.837057567486314e-05, + "loss": 1.0475, + "step": 3743 + }, + { + "epoch": 0.720324665564407, + "grad_norm": 0.3428358332024321, + "learning_rate": 3.832150900316377e-05, + "loss": 1.0912, + "step": 3744 + }, + { + "epoch": 0.7205170599729446, + "grad_norm": 0.2905157740356335, + "learning_rate": 3.827246628667962e-05, + "loss": 1.0004, + "step": 3745 + }, + { + "epoch": 0.720709454381482, + "grad_norm": 0.300929230146176, + "learning_rate": 3.8223447544458256e-05, + "loss": 1.0903, + "step": 3746 + }, + { + "epoch": 0.7209018487900195, + "grad_norm": 0.301120293317064, + "learning_rate": 3.817445279553801e-05, + "loss": 1.032, + "step": 3747 + }, + { + "epoch": 0.7210942431985571, + "grad_norm": 0.33428298687700736, + "learning_rate": 3.8125482058947906e-05, + "loss": 0.9867, + "step": 3748 + }, + { + "epoch": 0.7212866376070945, + "grad_norm": 0.4316125347231823, + "learning_rate": 3.807653535370752e-05, + "loss": 1.0085, + "step": 3749 + }, + { + "epoch": 0.721479032015632, + "grad_norm": 0.2887936628347958, + "learning_rate": 3.802761269882734e-05, + "loss": 0.9862, + "step": 3750 + }, + { + "epoch": 0.7216714264241696, + "grad_norm": 0.2708283673844958, + "learning_rate": 3.797871411330824e-05, + "loss": 0.9947, + "step": 3751 + }, + { + "epoch": 0.7218638208327071, + "grad_norm": 0.31822615890375244, + "learning_rate": 3.7929839616141914e-05, + "loss": 1.0154, + "step": 3752 + }, + { + "epoch": 0.7220562152412445, + "grad_norm": 0.38637026262251845, + "learning_rate": 3.788098922631067e-05, + "loss": 1.1266, + "step": 3753 + }, + { + "epoch": 0.7222486096497821, + "grad_norm": 0.338785243238196, + "learning_rate": 3.7832162962787355e-05, + "loss": 0.9855, + "step": 3754 + }, + { + "epoch": 0.7224410040583196, + "grad_norm": 0.31961264096618797, + "learning_rate": 3.7783360844535656e-05, + "loss": 1.0101, + "step": 3755 + }, + { + "epoch": 0.722633398466857, + "grad_norm": 0.31727969585760973, + "learning_rate": 3.773458289050963e-05, + "loss": 1.0261, + "step": 3756 + }, + { + "epoch": 0.7228257928753946, + "grad_norm": 0.35221786714551934, + "learning_rate": 3.7685829119654134e-05, + "loss": 1.1314, + "step": 3757 + }, + { + "epoch": 0.7230181872839321, + "grad_norm": 0.3157238242748574, + "learning_rate": 3.763709955090461e-05, + "loss": 1.1029, + "step": 3758 + }, + { + "epoch": 0.7232105816924695, + "grad_norm": 0.2831064025789112, + "learning_rate": 3.758839420318696e-05, + "loss": 1.043, + "step": 3759 + }, + { + "epoch": 0.723402976101007, + "grad_norm": 0.43844636356805766, + "learning_rate": 3.753971309541784e-05, + "loss": 1.0919, + "step": 3760 + }, + { + "epoch": 0.7235953705095446, + "grad_norm": 0.3869646349147955, + "learning_rate": 3.7491056246504433e-05, + "loss": 1.1238, + "step": 3761 + }, + { + "epoch": 0.723787764918082, + "grad_norm": 0.321153005189178, + "learning_rate": 3.7442423675344474e-05, + "loss": 1.1164, + "step": 3762 + }, + { + "epoch": 0.7239801593266195, + "grad_norm": 0.43787386905458286, + "learning_rate": 3.739381540082635e-05, + "loss": 1.0211, + "step": 3763 + }, + { + "epoch": 0.7241725537351571, + "grad_norm": 0.3400332352368309, + "learning_rate": 3.734523144182887e-05, + "loss": 1.0387, + "step": 3764 + }, + { + "epoch": 0.7243649481436946, + "grad_norm": 0.3013033084876963, + "learning_rate": 3.729667181722154e-05, + "loss": 1.0887, + "step": 3765 + }, + { + "epoch": 0.724557342552232, + "grad_norm": 0.2726036749211284, + "learning_rate": 3.7248136545864344e-05, + "loss": 1.123, + "step": 3766 + }, + { + "epoch": 0.7247497369607696, + "grad_norm": 0.39547155414047175, + "learning_rate": 3.7199625646607825e-05, + "loss": 1.072, + "step": 3767 + }, + { + "epoch": 0.7249421313693071, + "grad_norm": 0.2665373450133298, + "learning_rate": 3.7151139138293054e-05, + "loss": 1.0858, + "step": 3768 + }, + { + "epoch": 0.7251345257778445, + "grad_norm": 0.31535191192121326, + "learning_rate": 3.7102677039751666e-05, + "loss": 1.0084, + "step": 3769 + }, + { + "epoch": 0.7253269201863821, + "grad_norm": 0.42017617585244926, + "learning_rate": 3.705423936980572e-05, + "loss": 1.0712, + "step": 3770 + }, + { + "epoch": 0.7255193145949196, + "grad_norm": 0.27582195092043776, + "learning_rate": 3.700582614726791e-05, + "loss": 1.0112, + "step": 3771 + }, + { + "epoch": 0.725711709003457, + "grad_norm": 0.28966556909723074, + "learning_rate": 3.6957437390941275e-05, + "loss": 1.0827, + "step": 3772 + }, + { + "epoch": 0.7259041034119946, + "grad_norm": 0.279856243846349, + "learning_rate": 3.690907311961955e-05, + "loss": 1.045, + "step": 3773 + }, + { + "epoch": 0.7260964978205321, + "grad_norm": 0.29136893719638884, + "learning_rate": 3.6860733352086864e-05, + "loss": 1.1533, + "step": 3774 + }, + { + "epoch": 0.7262888922290696, + "grad_norm": 0.33973439977357034, + "learning_rate": 3.681241810711776e-05, + "loss": 1.0134, + "step": 3775 + }, + { + "epoch": 0.7264812866376071, + "grad_norm": 0.31523364878187615, + "learning_rate": 3.6764127403477344e-05, + "loss": 0.9708, + "step": 3776 + }, + { + "epoch": 0.7266736810461446, + "grad_norm": 0.337605590526865, + "learning_rate": 3.671586125992123e-05, + "loss": 1.0967, + "step": 3777 + }, + { + "epoch": 0.7268660754546821, + "grad_norm": 0.32663225825593967, + "learning_rate": 3.6667619695195285e-05, + "loss": 1.0049, + "step": 3778 + }, + { + "epoch": 0.7270584698632196, + "grad_norm": 0.3487857365650158, + "learning_rate": 3.661940272803616e-05, + "loss": 1.0463, + "step": 3779 + }, + { + "epoch": 0.7272508642717571, + "grad_norm": 0.41716348293929606, + "learning_rate": 3.657121037717064e-05, + "loss": 1.0468, + "step": 3780 + }, + { + "epoch": 0.7274432586802946, + "grad_norm": 0.3096807012547874, + "learning_rate": 3.652304266131612e-05, + "loss": 1.1228, + "step": 3781 + }, + { + "epoch": 0.7276356530888322, + "grad_norm": 0.28589831071669314, + "learning_rate": 3.6474899599180423e-05, + "loss": 1.0348, + "step": 3782 + }, + { + "epoch": 0.7278280474973696, + "grad_norm": 0.44004740383958785, + "learning_rate": 3.642678120946168e-05, + "loss": 0.9969, + "step": 3783 + }, + { + "epoch": 0.7280204419059071, + "grad_norm": 0.2890710011956558, + "learning_rate": 3.6378687510848575e-05, + "loss": 1.0167, + "step": 3784 + }, + { + "epoch": 0.7282128363144446, + "grad_norm": 0.3483685844095603, + "learning_rate": 3.633061852202012e-05, + "loss": 1.0003, + "step": 3785 + }, + { + "epoch": 0.7284052307229821, + "grad_norm": 0.39234394570675146, + "learning_rate": 3.628257426164577e-05, + "loss": 1.0001, + "step": 3786 + }, + { + "epoch": 0.7285976251315196, + "grad_norm": 0.2650185580329702, + "learning_rate": 3.62345547483854e-05, + "loss": 1.0101, + "step": 3787 + }, + { + "epoch": 0.7287900195400571, + "grad_norm": 0.3376377883217269, + "learning_rate": 3.6186560000889155e-05, + "loss": 1.0657, + "step": 3788 + }, + { + "epoch": 0.7289824139485946, + "grad_norm": 0.41767604358984806, + "learning_rate": 3.613859003779769e-05, + "loss": 1.0208, + "step": 3789 + }, + { + "epoch": 0.7291748083571321, + "grad_norm": 0.29866232461856085, + "learning_rate": 3.609064487774199e-05, + "loss": 1.0896, + "step": 3790 + }, + { + "epoch": 0.7293672027656696, + "grad_norm": 0.3550225661979497, + "learning_rate": 3.6042724539343376e-05, + "loss": 1.1159, + "step": 3791 + }, + { + "epoch": 0.7295595971742072, + "grad_norm": 0.31662749067494306, + "learning_rate": 3.599482904121361e-05, + "loss": 1.0126, + "step": 3792 + }, + { + "epoch": 0.7297519915827446, + "grad_norm": 0.3046494714183812, + "learning_rate": 3.594695840195468e-05, + "loss": 1.0667, + "step": 3793 + }, + { + "epoch": 0.7299443859912821, + "grad_norm": 0.28274908213991673, + "learning_rate": 3.589911264015902e-05, + "loss": 1.0634, + "step": 3794 + }, + { + "epoch": 0.7301367803998197, + "grad_norm": 0.34078390297765465, + "learning_rate": 3.585129177440938e-05, + "loss": 1.0763, + "step": 3795 + }, + { + "epoch": 0.7303291748083571, + "grad_norm": 0.5116109315261538, + "learning_rate": 3.580349582327882e-05, + "loss": 1.0075, + "step": 3796 + }, + { + "epoch": 0.7305215692168946, + "grad_norm": 0.3205760990103904, + "learning_rate": 3.575572480533076e-05, + "loss": 1.033, + "step": 3797 + }, + { + "epoch": 0.7307139636254322, + "grad_norm": 0.33090927368722733, + "learning_rate": 3.570797873911892e-05, + "loss": 0.9595, + "step": 3798 + }, + { + "epoch": 0.7309063580339696, + "grad_norm": 0.31168680661249126, + "learning_rate": 3.5660257643187276e-05, + "loss": 0.9647, + "step": 3799 + }, + { + "epoch": 0.7310987524425071, + "grad_norm": 0.33753469324900864, + "learning_rate": 3.561256153607021e-05, + "loss": 1.0939, + "step": 3800 + }, + { + "epoch": 0.7312911468510447, + "grad_norm": 0.3055531827749562, + "learning_rate": 3.556489043629224e-05, + "loss": 1.0224, + "step": 3801 + }, + { + "epoch": 0.7314835412595821, + "grad_norm": 0.4349685632480251, + "learning_rate": 3.5517244362368365e-05, + "loss": 0.987, + "step": 3802 + }, + { + "epoch": 0.7316759356681196, + "grad_norm": 0.29842656083531044, + "learning_rate": 3.546962333280379e-05, + "loss": 0.9958, + "step": 3803 + }, + { + "epoch": 0.7318683300766572, + "grad_norm": 0.3617704818980536, + "learning_rate": 3.542202736609389e-05, + "loss": 1.04, + "step": 3804 + }, + { + "epoch": 0.7320607244851947, + "grad_norm": 0.28628580888877486, + "learning_rate": 3.5374456480724426e-05, + "loss": 0.9949, + "step": 3805 + }, + { + "epoch": 0.7322531188937321, + "grad_norm": 0.31285519070919615, + "learning_rate": 3.532691069517142e-05, + "loss": 0.9728, + "step": 3806 + }, + { + "epoch": 0.7324455133022697, + "grad_norm": 0.40701208946703527, + "learning_rate": 3.527939002790101e-05, + "loss": 1.0585, + "step": 3807 + }, + { + "epoch": 0.7326379077108072, + "grad_norm": 0.31365602556048144, + "learning_rate": 3.52318944973698e-05, + "loss": 0.8284, + "step": 3808 + }, + { + "epoch": 0.7328303021193446, + "grad_norm": 0.3465266680547385, + "learning_rate": 3.518442412202441e-05, + "loss": 0.9903, + "step": 3809 + }, + { + "epoch": 0.7330226965278822, + "grad_norm": 0.30421390179652025, + "learning_rate": 3.5136978920301825e-05, + "loss": 0.9775, + "step": 3810 + }, + { + "epoch": 0.7332150909364197, + "grad_norm": 0.37369570297910537, + "learning_rate": 3.508955891062924e-05, + "loss": 1.1015, + "step": 3811 + }, + { + "epoch": 0.7334074853449571, + "grad_norm": 0.3694988509279056, + "learning_rate": 3.504216411142398e-05, + "loss": 0.9695, + "step": 3812 + }, + { + "epoch": 0.7335998797534946, + "grad_norm": 0.2809510869138268, + "learning_rate": 3.499479454109367e-05, + "loss": 1.0136, + "step": 3813 + }, + { + "epoch": 0.7337922741620322, + "grad_norm": 0.32389434632168307, + "learning_rate": 3.4947450218036106e-05, + "loss": 1.1118, + "step": 3814 + }, + { + "epoch": 0.7339846685705697, + "grad_norm": 0.2935391619293556, + "learning_rate": 3.490013116063928e-05, + "loss": 1.0271, + "step": 3815 + }, + { + "epoch": 0.7341770629791071, + "grad_norm": 0.3841594257340414, + "learning_rate": 3.485283738728139e-05, + "loss": 1.0188, + "step": 3816 + }, + { + "epoch": 0.7343694573876447, + "grad_norm": 0.3392109881109757, + "learning_rate": 3.480556891633074e-05, + "loss": 1.0101, + "step": 3817 + }, + { + "epoch": 0.7345618517961822, + "grad_norm": 0.33709491767906086, + "learning_rate": 3.475832576614589e-05, + "loss": 1.0193, + "step": 3818 + }, + { + "epoch": 0.7347542462047196, + "grad_norm": 0.42010514938698673, + "learning_rate": 3.4711107955075536e-05, + "loss": 1.0292, + "step": 3819 + }, + { + "epoch": 0.7349466406132572, + "grad_norm": 0.3482491943345016, + "learning_rate": 3.466391550145852e-05, + "loss": 0.9591, + "step": 3820 + }, + { + "epoch": 0.7351390350217947, + "grad_norm": 0.2903810344626169, + "learning_rate": 3.461674842362389e-05, + "loss": 0.9785, + "step": 3821 + }, + { + "epoch": 0.7353314294303321, + "grad_norm": 0.3051341660022261, + "learning_rate": 3.456960673989074e-05, + "loss": 1.0277, + "step": 3822 + }, + { + "epoch": 0.7355238238388697, + "grad_norm": 0.32840846531318874, + "learning_rate": 3.452249046856836e-05, + "loss": 1.0349, + "step": 3823 + }, + { + "epoch": 0.7357162182474072, + "grad_norm": 0.3086958749095765, + "learning_rate": 3.447539962795619e-05, + "loss": 0.9733, + "step": 3824 + }, + { + "epoch": 0.7359086126559446, + "grad_norm": 0.29347635630651686, + "learning_rate": 3.442833423634377e-05, + "loss": 1.0347, + "step": 3825 + }, + { + "epoch": 0.7361010070644822, + "grad_norm": 0.2727939117876327, + "learning_rate": 3.4381294312010745e-05, + "loss": 0.9761, + "step": 3826 + }, + { + "epoch": 0.7362934014730197, + "grad_norm": 0.34392564930358976, + "learning_rate": 3.433427987322693e-05, + "loss": 1.0714, + "step": 3827 + }, + { + "epoch": 0.7364857958815572, + "grad_norm": 0.29948809123823283, + "learning_rate": 3.4287290938252106e-05, + "loss": 0.9936, + "step": 3828 + }, + { + "epoch": 0.7366781902900947, + "grad_norm": 0.3825672834034742, + "learning_rate": 3.424032752533627e-05, + "loss": 1.1535, + "step": 3829 + }, + { + "epoch": 0.7368705846986322, + "grad_norm": 0.270002640652531, + "learning_rate": 3.4193389652719476e-05, + "loss": 1.0852, + "step": 3830 + }, + { + "epoch": 0.7370629791071697, + "grad_norm": 0.26936672858848226, + "learning_rate": 3.414647733863185e-05, + "loss": 1.0606, + "step": 3831 + }, + { + "epoch": 0.7372553735157072, + "grad_norm": 0.3856095111550712, + "learning_rate": 3.4099590601293626e-05, + "loss": 0.9965, + "step": 3832 + }, + { + "epoch": 0.7374477679242447, + "grad_norm": 0.28977195742475814, + "learning_rate": 3.4052729458915023e-05, + "loss": 0.991, + "step": 3833 + }, + { + "epoch": 0.7376401623327822, + "grad_norm": 0.29524355198726887, + "learning_rate": 3.4005893929696377e-05, + "loss": 1.1655, + "step": 3834 + }, + { + "epoch": 0.7378325567413198, + "grad_norm": 0.26160246240202195, + "learning_rate": 3.395908403182811e-05, + "loss": 1.0802, + "step": 3835 + }, + { + "epoch": 0.7380249511498572, + "grad_norm": 0.36378109459526253, + "learning_rate": 3.391229978349056e-05, + "loss": 1.0913, + "step": 3836 + }, + { + "epoch": 0.7382173455583947, + "grad_norm": 0.3885348797468423, + "learning_rate": 3.386554120285431e-05, + "loss": 0.9901, + "step": 3837 + }, + { + "epoch": 0.7384097399669323, + "grad_norm": 0.3113625643214789, + "learning_rate": 3.381880830807975e-05, + "loss": 1.0248, + "step": 3838 + }, + { + "epoch": 0.7386021343754697, + "grad_norm": 0.3314536204084258, + "learning_rate": 3.3772101117317435e-05, + "loss": 0.9322, + "step": 3839 + }, + { + "epoch": 0.7387945287840072, + "grad_norm": 0.29580523255470403, + "learning_rate": 3.372541964870795e-05, + "loss": 1.0826, + "step": 3840 + }, + { + "epoch": 0.7389869231925447, + "grad_norm": 0.33361814715852317, + "learning_rate": 3.367876392038174e-05, + "loss": 1.0389, + "step": 3841 + }, + { + "epoch": 0.7391793176010822, + "grad_norm": 0.3084181829564584, + "learning_rate": 3.363213395045941e-05, + "loss": 1.0238, + "step": 3842 + }, + { + "epoch": 0.7393717120096197, + "grad_norm": 0.3401781008304183, + "learning_rate": 3.3585529757051504e-05, + "loss": 1.003, + "step": 3843 + }, + { + "epoch": 0.7395641064181572, + "grad_norm": 0.3145841059695358, + "learning_rate": 3.353895135825854e-05, + "loss": 1.0826, + "step": 3844 + }, + { + "epoch": 0.7397565008266948, + "grad_norm": 0.33358532570006877, + "learning_rate": 3.349239877217108e-05, + "loss": 1.1339, + "step": 3845 + }, + { + "epoch": 0.7399488952352322, + "grad_norm": 0.282023660392982, + "learning_rate": 3.3445872016869516e-05, + "loss": 1.0499, + "step": 3846 + }, + { + "epoch": 0.7401412896437697, + "grad_norm": 0.2601702279941731, + "learning_rate": 3.339937111042437e-05, + "loss": 1.0775, + "step": 3847 + }, + { + "epoch": 0.7403336840523073, + "grad_norm": 0.34465223879362933, + "learning_rate": 3.3352896070896054e-05, + "loss": 0.9581, + "step": 3848 + }, + { + "epoch": 0.7405260784608447, + "grad_norm": 0.31119601244903744, + "learning_rate": 3.330644691633492e-05, + "loss": 1.0303, + "step": 3849 + }, + { + "epoch": 0.7407184728693822, + "grad_norm": 0.29162235865954, + "learning_rate": 3.3260023664781326e-05, + "loss": 0.9509, + "step": 3850 + }, + { + "epoch": 0.7409108672779198, + "grad_norm": 0.27088071763972155, + "learning_rate": 3.321362633426547e-05, + "loss": 1.0597, + "step": 3851 + }, + { + "epoch": 0.7411032616864572, + "grad_norm": 0.2968302711086647, + "learning_rate": 3.316725494280757e-05, + "loss": 1.0536, + "step": 3852 + }, + { + "epoch": 0.7412956560949947, + "grad_norm": 0.2881510912997479, + "learning_rate": 3.312090950841775e-05, + "loss": 1.0461, + "step": 3853 + }, + { + "epoch": 0.7414880505035323, + "grad_norm": 0.41564915405098773, + "learning_rate": 3.307459004909599e-05, + "loss": 0.9426, + "step": 3854 + }, + { + "epoch": 0.7416804449120697, + "grad_norm": 0.34597871012365344, + "learning_rate": 3.3028296582832284e-05, + "loss": 1.0022, + "step": 3855 + }, + { + "epoch": 0.7418728393206072, + "grad_norm": 0.385068863284406, + "learning_rate": 3.298202912760652e-05, + "loss": 1.0388, + "step": 3856 + }, + { + "epoch": 0.7420652337291448, + "grad_norm": 0.34505895322585384, + "learning_rate": 3.2935787701388344e-05, + "loss": 0.9887, + "step": 3857 + }, + { + "epoch": 0.7422576281376823, + "grad_norm": 0.4414506370320824, + "learning_rate": 3.288957232213745e-05, + "loss": 0.9956, + "step": 3858 + }, + { + "epoch": 0.7424500225462197, + "grad_norm": 0.3406219814756699, + "learning_rate": 3.284338300780336e-05, + "loss": 1.0236, + "step": 3859 + }, + { + "epoch": 0.7426424169547573, + "grad_norm": 0.31442420535558924, + "learning_rate": 3.279721977632546e-05, + "loss": 1.1384, + "step": 3860 + }, + { + "epoch": 0.7428348113632948, + "grad_norm": 0.31375976906486563, + "learning_rate": 3.275108264563306e-05, + "loss": 1.03, + "step": 3861 + }, + { + "epoch": 0.7430272057718322, + "grad_norm": 0.31300874958598546, + "learning_rate": 3.270497163364521e-05, + "loss": 0.9889, + "step": 3862 + }, + { + "epoch": 0.7432196001803698, + "grad_norm": 0.2705761982144218, + "learning_rate": 3.2658886758270943e-05, + "loss": 0.9692, + "step": 3863 + }, + { + "epoch": 0.7434119945889073, + "grad_norm": 0.30297177469103076, + "learning_rate": 3.261282803740911e-05, + "loss": 0.9891, + "step": 3864 + }, + { + "epoch": 0.7436043889974447, + "grad_norm": 0.38406072684084736, + "learning_rate": 3.256679548894831e-05, + "loss": 1.0813, + "step": 3865 + }, + { + "epoch": 0.7437967834059822, + "grad_norm": 0.29591747237840993, + "learning_rate": 3.252078913076718e-05, + "loss": 1.0739, + "step": 3866 + }, + { + "epoch": 0.7439891778145198, + "grad_norm": 0.28621020136855685, + "learning_rate": 3.247480898073395e-05, + "loss": 0.9873, + "step": 3867 + }, + { + "epoch": 0.7441815722230573, + "grad_norm": 0.3644907579246381, + "learning_rate": 3.242885505670681e-05, + "loss": 1.0777, + "step": 3868 + }, + { + "epoch": 0.7443739666315947, + "grad_norm": 0.28254687455682775, + "learning_rate": 3.238292737653379e-05, + "loss": 0.9716, + "step": 3869 + }, + { + "epoch": 0.7445663610401323, + "grad_norm": 0.38046555828035444, + "learning_rate": 3.233702595805258e-05, + "loss": 1.0137, + "step": 3870 + }, + { + "epoch": 0.7447587554486698, + "grad_norm": 0.3342343396210659, + "learning_rate": 3.229115081909082e-05, + "loss": 1.0624, + "step": 3871 + }, + { + "epoch": 0.7449511498572072, + "grad_norm": 0.3530750681785038, + "learning_rate": 3.224530197746587e-05, + "loss": 1.0749, + "step": 3872 + }, + { + "epoch": 0.7451435442657448, + "grad_norm": 0.2819775581917406, + "learning_rate": 3.219947945098489e-05, + "loss": 1.0432, + "step": 3873 + }, + { + "epoch": 0.7453359386742823, + "grad_norm": 0.28390548768316626, + "learning_rate": 3.215368325744485e-05, + "loss": 1.091, + "step": 3874 + }, + { + "epoch": 0.7455283330828197, + "grad_norm": 0.35488816437454646, + "learning_rate": 3.2107913414632426e-05, + "loss": 1.0809, + "step": 3875 + }, + { + "epoch": 0.7457207274913573, + "grad_norm": 0.3100437466636983, + "learning_rate": 3.206216994032411e-05, + "loss": 1.0866, + "step": 3876 + }, + { + "epoch": 0.7459131218998948, + "grad_norm": 0.33137820929544354, + "learning_rate": 3.2016452852286127e-05, + "loss": 1.0101, + "step": 3877 + }, + { + "epoch": 0.7461055163084322, + "grad_norm": 0.4217550504657977, + "learning_rate": 3.197076216827449e-05, + "loss": 1.116, + "step": 3878 + }, + { + "epoch": 0.7462979107169698, + "grad_norm": 0.316701280473312, + "learning_rate": 3.192509790603496e-05, + "loss": 1.1168, + "step": 3879 + }, + { + "epoch": 0.7464903051255073, + "grad_norm": 0.2863085336509443, + "learning_rate": 3.187946008330295e-05, + "loss": 1.1201, + "step": 3880 + }, + { + "epoch": 0.7466826995340448, + "grad_norm": 0.3388751009514164, + "learning_rate": 3.183384871780367e-05, + "loss": 0.9836, + "step": 3881 + }, + { + "epoch": 0.7468750939425823, + "grad_norm": 0.27931731976612073, + "learning_rate": 3.178826382725212e-05, + "loss": 1.0166, + "step": 3882 + }, + { + "epoch": 0.7470674883511198, + "grad_norm": 0.40005571496549147, + "learning_rate": 3.1742705429352826e-05, + "loss": 1.0421, + "step": 3883 + }, + { + "epoch": 0.7472598827596573, + "grad_norm": 0.33287206561056276, + "learning_rate": 3.169717354180025e-05, + "loss": 1.0173, + "step": 3884 + }, + { + "epoch": 0.7474522771681948, + "grad_norm": 0.27804798886789184, + "learning_rate": 3.165166818227845e-05, + "loss": 1.0846, + "step": 3885 + }, + { + "epoch": 0.7476446715767323, + "grad_norm": 0.28062620713526976, + "learning_rate": 3.160618936846111e-05, + "loss": 1.0661, + "step": 3886 + }, + { + "epoch": 0.7478370659852698, + "grad_norm": 0.33588095161644405, + "learning_rate": 3.156073711801172e-05, + "loss": 1.0366, + "step": 3887 + }, + { + "epoch": 0.7480294603938074, + "grad_norm": 0.3675736406382842, + "learning_rate": 3.151531144858344e-05, + "loss": 1.0801, + "step": 3888 + }, + { + "epoch": 0.7482218548023448, + "grad_norm": 0.3139361084371443, + "learning_rate": 3.1469912377818986e-05, + "loss": 1.0132, + "step": 3889 + }, + { + "epoch": 0.7484142492108823, + "grad_norm": 0.2738547897392836, + "learning_rate": 3.142453992335096e-05, + "loss": 1.0006, + "step": 3890 + }, + { + "epoch": 0.7486066436194199, + "grad_norm": 0.31015001363801614, + "learning_rate": 3.137919410280139e-05, + "loss": 0.9684, + "step": 3891 + }, + { + "epoch": 0.7487990380279573, + "grad_norm": 0.33435989965073243, + "learning_rate": 3.1333874933782115e-05, + "loss": 0.9925, + "step": 3892 + }, + { + "epoch": 0.7489914324364948, + "grad_norm": 0.27193849841389395, + "learning_rate": 3.128858243389461e-05, + "loss": 1.0905, + "step": 3893 + }, + { + "epoch": 0.7491838268450323, + "grad_norm": 0.3005729293682963, + "learning_rate": 3.124331662072987e-05, + "loss": 1.0217, + "step": 3894 + }, + { + "epoch": 0.7493762212535698, + "grad_norm": 0.28884142465361506, + "learning_rate": 3.119807751186872e-05, + "loss": 1.0116, + "step": 3895 + }, + { + "epoch": 0.7495686156621073, + "grad_norm": 0.3888879568439692, + "learning_rate": 3.115286512488144e-05, + "loss": 1.0656, + "step": 3896 + }, + { + "epoch": 0.7497610100706448, + "grad_norm": 0.3283868279599659, + "learning_rate": 3.110767947732801e-05, + "loss": 1.0659, + "step": 3897 + }, + { + "epoch": 0.7499534044791824, + "grad_norm": 0.29821654741921305, + "learning_rate": 3.106252058675806e-05, + "loss": 1.0394, + "step": 3898 + }, + { + "epoch": 0.7501457988877198, + "grad_norm": 0.39433267274864786, + "learning_rate": 3.1017388470710716e-05, + "loss": 0.9995, + "step": 3899 + }, + { + "epoch": 0.7503381932962573, + "grad_norm": 0.3221394384989637, + "learning_rate": 3.097228314671481e-05, + "loss": 1.0771, + "step": 3900 + }, + { + "epoch": 0.7505305877047949, + "grad_norm": 0.28913411844908465, + "learning_rate": 3.092720463228872e-05, + "loss": 1.1104, + "step": 3901 + }, + { + "epoch": 0.7507229821133323, + "grad_norm": 0.32193391627664836, + "learning_rate": 3.0882152944940424e-05, + "loss": 1.057, + "step": 3902 + }, + { + "epoch": 0.7509153765218698, + "grad_norm": 0.28515267815000667, + "learning_rate": 3.0837128102167515e-05, + "loss": 1.0559, + "step": 3903 + }, + { + "epoch": 0.7511077709304074, + "grad_norm": 0.3467078382476179, + "learning_rate": 3.079213012145705e-05, + "loss": 1.0364, + "step": 3904 + }, + { + "epoch": 0.7513001653389448, + "grad_norm": 0.31431388025175716, + "learning_rate": 3.0747159020285765e-05, + "loss": 1.0523, + "step": 3905 + }, + { + "epoch": 0.7514925597474823, + "grad_norm": 0.3110355117013477, + "learning_rate": 3.070221481611992e-05, + "loss": 1.0449, + "step": 3906 + }, + { + "epoch": 0.7516849541560199, + "grad_norm": 0.33757083018869277, + "learning_rate": 3.065729752641532e-05, + "loss": 1.0544, + "step": 3907 + }, + { + "epoch": 0.7518773485645573, + "grad_norm": 0.30862324850587997, + "learning_rate": 3.061240716861735e-05, + "loss": 1.0093, + "step": 3908 + }, + { + "epoch": 0.7520697429730948, + "grad_norm": 0.3579949480363705, + "learning_rate": 3.056754376016087e-05, + "loss": 1.1937, + "step": 3909 + }, + { + "epoch": 0.7522621373816324, + "grad_norm": 0.2808916414164792, + "learning_rate": 3.05227073184703e-05, + "loss": 1.0566, + "step": 3910 + }, + { + "epoch": 0.7524545317901699, + "grad_norm": 0.31708005491534186, + "learning_rate": 3.047789786095967e-05, + "loss": 0.9274, + "step": 3911 + }, + { + "epoch": 0.7526469261987073, + "grad_norm": 0.2577452537286519, + "learning_rate": 3.043311540503233e-05, + "loss": 1.0037, + "step": 3912 + }, + { + "epoch": 0.7528393206072449, + "grad_norm": 0.36581308032506854, + "learning_rate": 3.0388359968081392e-05, + "loss": 1.0002, + "step": 3913 + }, + { + "epoch": 0.7530317150157824, + "grad_norm": 0.3511404345581437, + "learning_rate": 3.0343631567489327e-05, + "loss": 1.088, + "step": 3914 + }, + { + "epoch": 0.7532241094243198, + "grad_norm": 0.4218062449676212, + "learning_rate": 3.0298930220628086e-05, + "loss": 0.998, + "step": 3915 + }, + { + "epoch": 0.7534165038328574, + "grad_norm": 0.34087832583861805, + "learning_rate": 3.0254255944859187e-05, + "loss": 1.1089, + "step": 3916 + }, + { + "epoch": 0.7536088982413949, + "grad_norm": 0.2992297630211009, + "learning_rate": 3.0209608757533625e-05, + "loss": 1.0122, + "step": 3917 + }, + { + "epoch": 0.7538012926499323, + "grad_norm": 0.3383432519795849, + "learning_rate": 3.0164988675991768e-05, + "loss": 1.1073, + "step": 3918 + }, + { + "epoch": 0.7539936870584698, + "grad_norm": 0.2708614192043716, + "learning_rate": 3.0120395717563653e-05, + "loss": 1.04, + "step": 3919 + }, + { + "epoch": 0.7541860814670074, + "grad_norm": 0.31613015846150794, + "learning_rate": 3.0075829899568597e-05, + "loss": 1.0496, + "step": 3920 + }, + { + "epoch": 0.7543784758755449, + "grad_norm": 0.278483091937346, + "learning_rate": 3.0031291239315475e-05, + "loss": 1.0537, + "step": 3921 + }, + { + "epoch": 0.7545708702840823, + "grad_norm": 0.33118449322408994, + "learning_rate": 2.9986779754102613e-05, + "loss": 1.0932, + "step": 3922 + }, + { + "epoch": 0.7547632646926199, + "grad_norm": 0.27147766967211845, + "learning_rate": 2.9942295461217696e-05, + "loss": 1.0697, + "step": 3923 + }, + { + "epoch": 0.7549556591011574, + "grad_norm": 0.2959924821180381, + "learning_rate": 2.9897838377937947e-05, + "loss": 1.0275, + "step": 3924 + }, + { + "epoch": 0.7551480535096948, + "grad_norm": 0.29312862332293554, + "learning_rate": 2.985340852152999e-05, + "loss": 0.9969, + "step": 3925 + }, + { + "epoch": 0.7553404479182324, + "grad_norm": 0.29010737144046506, + "learning_rate": 2.9809005909249864e-05, + "loss": 0.9731, + "step": 3926 + }, + { + "epoch": 0.7555328423267699, + "grad_norm": 0.2893645304415515, + "learning_rate": 2.9764630558343067e-05, + "loss": 0.9537, + "step": 3927 + }, + { + "epoch": 0.7557252367353073, + "grad_norm": 0.3111618087743285, + "learning_rate": 2.9720282486044403e-05, + "loss": 1.0596, + "step": 3928 + }, + { + "epoch": 0.7559176311438449, + "grad_norm": 0.34666246722633387, + "learning_rate": 2.9675961709578194e-05, + "loss": 1.0351, + "step": 3929 + }, + { + "epoch": 0.7561100255523824, + "grad_norm": 0.40738874844638784, + "learning_rate": 2.9631668246158107e-05, + "loss": 1.168, + "step": 3930 + }, + { + "epoch": 0.7563024199609198, + "grad_norm": 0.44619290769415537, + "learning_rate": 2.958740211298722e-05, + "loss": 0.9292, + "step": 3931 + }, + { + "epoch": 0.7564948143694574, + "grad_norm": 0.3505806871130929, + "learning_rate": 2.9543163327258017e-05, + "loss": 1.0345, + "step": 3932 + }, + { + "epoch": 0.7566872087779949, + "grad_norm": 0.3199085047775709, + "learning_rate": 2.9498951906152272e-05, + "loss": 1.0892, + "step": 3933 + }, + { + "epoch": 0.7568796031865324, + "grad_norm": 0.3700027640340649, + "learning_rate": 2.9454767866841227e-05, + "loss": 1.0796, + "step": 3934 + }, + { + "epoch": 0.7570719975950699, + "grad_norm": 0.3488826836303916, + "learning_rate": 2.9410611226485452e-05, + "loss": 1.0706, + "step": 3935 + }, + { + "epoch": 0.7572643920036074, + "grad_norm": 0.2997754622357488, + "learning_rate": 2.9366482002234874e-05, + "loss": 1.0116, + "step": 3936 + }, + { + "epoch": 0.7574567864121449, + "grad_norm": 0.35128922618999814, + "learning_rate": 2.932238021122877e-05, + "loss": 0.9734, + "step": 3937 + }, + { + "epoch": 0.7576491808206824, + "grad_norm": 0.28937176822519944, + "learning_rate": 2.9278305870595812e-05, + "loss": 1.0275, + "step": 3938 + }, + { + "epoch": 0.7578415752292199, + "grad_norm": 0.3409422796756947, + "learning_rate": 2.92342589974539e-05, + "loss": 1.0885, + "step": 3939 + }, + { + "epoch": 0.7580339696377574, + "grad_norm": 0.284463571268749, + "learning_rate": 2.9190239608910387e-05, + "loss": 0.9913, + "step": 3940 + }, + { + "epoch": 0.758226364046295, + "grad_norm": 0.2989666518432513, + "learning_rate": 2.9146247722061802e-05, + "loss": 1.0357, + "step": 3941 + }, + { + "epoch": 0.7584187584548324, + "grad_norm": 0.3633222837419775, + "learning_rate": 2.9102283353994186e-05, + "loss": 1.0235, + "step": 3942 + }, + { + "epoch": 0.7586111528633699, + "grad_norm": 0.33979945860743493, + "learning_rate": 2.9058346521782797e-05, + "loss": 0.9876, + "step": 3943 + }, + { + "epoch": 0.7588035472719075, + "grad_norm": 0.3062559852702967, + "learning_rate": 2.9014437242492132e-05, + "loss": 1.0306, + "step": 3944 + }, + { + "epoch": 0.7589959416804449, + "grad_norm": 0.37490172386932863, + "learning_rate": 2.897055553317607e-05, + "loss": 0.9268, + "step": 3945 + }, + { + "epoch": 0.7591883360889824, + "grad_norm": 0.3175710898752762, + "learning_rate": 2.8926701410877798e-05, + "loss": 1.0257, + "step": 3946 + }, + { + "epoch": 0.7593807304975199, + "grad_norm": 0.3396488431854097, + "learning_rate": 2.8882874892629652e-05, + "loss": 1.0612, + "step": 3947 + }, + { + "epoch": 0.7595731249060574, + "grad_norm": 0.31035183217118995, + "learning_rate": 2.883907599545348e-05, + "loss": 1.0694, + "step": 3948 + }, + { + "epoch": 0.7597655193145949, + "grad_norm": 0.2860007827521177, + "learning_rate": 2.8795304736360184e-05, + "loss": 1.1063, + "step": 3949 + }, + { + "epoch": 0.7599579137231324, + "grad_norm": 0.3330913290487703, + "learning_rate": 2.8751561132350024e-05, + "loss": 1.1057, + "step": 3950 + }, + { + "epoch": 0.76015030813167, + "grad_norm": 0.34261009963535755, + "learning_rate": 2.8707845200412565e-05, + "loss": 1.0867, + "step": 3951 + }, + { + "epoch": 0.7603427025402074, + "grad_norm": 0.37266434923093356, + "learning_rate": 2.866415695752649e-05, + "loss": 1.0558, + "step": 3952 + }, + { + "epoch": 0.7605350969487449, + "grad_norm": 0.31175911979080434, + "learning_rate": 2.862049642065986e-05, + "loss": 1.031, + "step": 3953 + }, + { + "epoch": 0.7607274913572825, + "grad_norm": 0.370050910254662, + "learning_rate": 2.857686360676991e-05, + "loss": 1.1007, + "step": 3954 + }, + { + "epoch": 0.7609198857658199, + "grad_norm": 0.3421057723878658, + "learning_rate": 2.853325853280312e-05, + "loss": 1.0797, + "step": 3955 + }, + { + "epoch": 0.7611122801743574, + "grad_norm": 0.3832631987897684, + "learning_rate": 2.848968121569524e-05, + "loss": 1.054, + "step": 3956 + }, + { + "epoch": 0.761304674582895, + "grad_norm": 0.37275664296860284, + "learning_rate": 2.8446131672371135e-05, + "loss": 1.0912, + "step": 3957 + }, + { + "epoch": 0.7614970689914324, + "grad_norm": 0.3304387465465502, + "learning_rate": 2.8402609919744972e-05, + "loss": 0.983, + "step": 3958 + }, + { + "epoch": 0.7616894633999699, + "grad_norm": 0.30194453818165623, + "learning_rate": 2.8359115974720096e-05, + "loss": 1.0302, + "step": 3959 + }, + { + "epoch": 0.7618818578085075, + "grad_norm": 0.28967776736045325, + "learning_rate": 2.8315649854189062e-05, + "loss": 1.129, + "step": 3960 + }, + { + "epoch": 0.762074252217045, + "grad_norm": 0.30055465406949444, + "learning_rate": 2.8272211575033636e-05, + "loss": 1.069, + "step": 3961 + }, + { + "epoch": 0.7622666466255824, + "grad_norm": 0.3180402868348469, + "learning_rate": 2.8228801154124685e-05, + "loss": 1.0631, + "step": 3962 + }, + { + "epoch": 0.76245904103412, + "grad_norm": 0.3229325701714879, + "learning_rate": 2.8185418608322344e-05, + "loss": 1.0492, + "step": 3963 + }, + { + "epoch": 0.7626514354426575, + "grad_norm": 0.3128977147863264, + "learning_rate": 2.8142063954475927e-05, + "loss": 1.0094, + "step": 3964 + }, + { + "epoch": 0.7628438298511949, + "grad_norm": 0.3463474623140369, + "learning_rate": 2.8098737209423797e-05, + "loss": 1.0431, + "step": 3965 + }, + { + "epoch": 0.7630362242597325, + "grad_norm": 0.35924171537174326, + "learning_rate": 2.8055438389993637e-05, + "loss": 1.0167, + "step": 3966 + }, + { + "epoch": 0.76322861866827, + "grad_norm": 0.4179875926715584, + "learning_rate": 2.801216751300223e-05, + "loss": 1.1283, + "step": 3967 + }, + { + "epoch": 0.7634210130768074, + "grad_norm": 0.3081530657285735, + "learning_rate": 2.7968924595255407e-05, + "loss": 1.0301, + "step": 3968 + }, + { + "epoch": 0.763613407485345, + "grad_norm": 0.31872859290972033, + "learning_rate": 2.792570965354829e-05, + "loss": 1.1135, + "step": 3969 + }, + { + "epoch": 0.7638058018938825, + "grad_norm": 0.3244520426746989, + "learning_rate": 2.788252270466497e-05, + "loss": 1.1205, + "step": 3970 + }, + { + "epoch": 0.7639981963024199, + "grad_norm": 0.3705981929460267, + "learning_rate": 2.7839363765378858e-05, + "loss": 0.9965, + "step": 3971 + }, + { + "epoch": 0.7641905907109575, + "grad_norm": 0.31379350959289637, + "learning_rate": 2.7796232852452377e-05, + "loss": 0.9563, + "step": 3972 + }, + { + "epoch": 0.764382985119495, + "grad_norm": 0.31576895213515366, + "learning_rate": 2.7753129982637026e-05, + "loss": 1.0151, + "step": 3973 + }, + { + "epoch": 0.7645753795280325, + "grad_norm": 0.2757703721846459, + "learning_rate": 2.7710055172673488e-05, + "loss": 1.0403, + "step": 3974 + }, + { + "epoch": 0.7647677739365699, + "grad_norm": 0.3968575039251857, + "learning_rate": 2.7667008439291554e-05, + "loss": 0.9872, + "step": 3975 + }, + { + "epoch": 0.7649601683451075, + "grad_norm": 0.3630510456845327, + "learning_rate": 2.762398979920998e-05, + "loss": 1.1546, + "step": 3976 + }, + { + "epoch": 0.765152562753645, + "grad_norm": 0.26759335699516074, + "learning_rate": 2.7580999269136855e-05, + "loss": 1.0407, + "step": 3977 + }, + { + "epoch": 0.7653449571621824, + "grad_norm": 0.2884702975494463, + "learning_rate": 2.753803686576909e-05, + "loss": 1.0551, + "step": 3978 + }, + { + "epoch": 0.76553735157072, + "grad_norm": 0.3156002348613051, + "learning_rate": 2.7495102605792823e-05, + "loss": 1.0669, + "step": 3979 + }, + { + "epoch": 0.7657297459792575, + "grad_norm": 0.2885612326710155, + "learning_rate": 2.7452196505883265e-05, + "loss": 1.0386, + "step": 3980 + }, + { + "epoch": 0.7659221403877949, + "grad_norm": 0.3047927811751598, + "learning_rate": 2.740931858270459e-05, + "loss": 1.087, + "step": 3981 + }, + { + "epoch": 0.7661145347963325, + "grad_norm": 0.30871845918423163, + "learning_rate": 2.736646885291011e-05, + "loss": 1.0866, + "step": 3982 + }, + { + "epoch": 0.76630692920487, + "grad_norm": 0.3388384889666327, + "learning_rate": 2.7323647333142177e-05, + "loss": 1.0817, + "step": 3983 + }, + { + "epoch": 0.7664993236134074, + "grad_norm": 0.46564906665760697, + "learning_rate": 2.7280854040032165e-05, + "loss": 1.0577, + "step": 3984 + }, + { + "epoch": 0.766691718021945, + "grad_norm": 0.38134204213507994, + "learning_rate": 2.723808899020054e-05, + "loss": 0.9901, + "step": 3985 + }, + { + "epoch": 0.7668841124304825, + "grad_norm": 0.2965798398979628, + "learning_rate": 2.7195352200256674e-05, + "loss": 1.0966, + "step": 3986 + }, + { + "epoch": 0.76707650683902, + "grad_norm": 0.32761091388500674, + "learning_rate": 2.7152643686799094e-05, + "loss": 1.0445, + "step": 3987 + }, + { + "epoch": 0.7672689012475575, + "grad_norm": 0.2864715208624762, + "learning_rate": 2.710996346641528e-05, + "loss": 1.0355, + "step": 3988 + }, + { + "epoch": 0.767461295656095, + "grad_norm": 0.29936767644953005, + "learning_rate": 2.7067311555681753e-05, + "loss": 1.0557, + "step": 3989 + }, + { + "epoch": 0.7676536900646325, + "grad_norm": 0.3538820552764843, + "learning_rate": 2.702468797116403e-05, + "loss": 1.0068, + "step": 3990 + }, + { + "epoch": 0.76784608447317, + "grad_norm": 0.2966248862206346, + "learning_rate": 2.6982092729416587e-05, + "loss": 1.0498, + "step": 3991 + }, + { + "epoch": 0.7680384788817075, + "grad_norm": 0.29188308974065, + "learning_rate": 2.693952584698294e-05, + "loss": 1.0997, + "step": 3992 + }, + { + "epoch": 0.768230873290245, + "grad_norm": 0.3067091517693415, + "learning_rate": 2.6896987340395607e-05, + "loss": 1.0556, + "step": 3993 + }, + { + "epoch": 0.7684232676987826, + "grad_norm": 0.355518303275076, + "learning_rate": 2.6854477226175966e-05, + "loss": 1.0518, + "step": 3994 + }, + { + "epoch": 0.76861566210732, + "grad_norm": 0.39579897449486906, + "learning_rate": 2.6811995520834542e-05, + "loss": 1.1298, + "step": 3995 + }, + { + "epoch": 0.7688080565158575, + "grad_norm": 0.30281406871292027, + "learning_rate": 2.676954224087075e-05, + "loss": 1.021, + "step": 3996 + }, + { + "epoch": 0.7690004509243951, + "grad_norm": 0.3343113865114453, + "learning_rate": 2.6727117402772884e-05, + "loss": 1.0501, + "step": 3997 + }, + { + "epoch": 0.7691928453329325, + "grad_norm": 0.33618485022413086, + "learning_rate": 2.668472102301829e-05, + "loss": 0.9161, + "step": 3998 + }, + { + "epoch": 0.76938523974147, + "grad_norm": 0.3314610417572652, + "learning_rate": 2.664235311807327e-05, + "loss": 0.9741, + "step": 3999 + }, + { + "epoch": 0.7695776341500075, + "grad_norm": 0.3814914781027189, + "learning_rate": 2.6600013704392946e-05, + "loss": 1.1172, + "step": 4000 + }, + { + "epoch": 0.769770028558545, + "grad_norm": 0.37555382252086283, + "learning_rate": 2.6557702798421568e-05, + "loss": 0.9957, + "step": 4001 + }, + { + "epoch": 0.7699624229670825, + "grad_norm": 0.3285949684646368, + "learning_rate": 2.6515420416592106e-05, + "loss": 0.9902, + "step": 4002 + }, + { + "epoch": 0.77015481737562, + "grad_norm": 0.3272101517196965, + "learning_rate": 2.6473166575326604e-05, + "loss": 1.0029, + "step": 4003 + }, + { + "epoch": 0.7703472117841575, + "grad_norm": 0.3428322977780469, + "learning_rate": 2.643094129103598e-05, + "loss": 1.0134, + "step": 4004 + }, + { + "epoch": 0.770539606192695, + "grad_norm": 0.38987663832369335, + "learning_rate": 2.6388744580119974e-05, + "loss": 1.0692, + "step": 4005 + }, + { + "epoch": 0.7707320006012325, + "grad_norm": 0.30997584813617124, + "learning_rate": 2.6346576458967398e-05, + "loss": 1.077, + "step": 4006 + }, + { + "epoch": 0.7709243950097701, + "grad_norm": 0.32291783511857325, + "learning_rate": 2.630443694395579e-05, + "loss": 1.026, + "step": 4007 + }, + { + "epoch": 0.7711167894183075, + "grad_norm": 0.3869910695556213, + "learning_rate": 2.626232605145168e-05, + "loss": 1.0728, + "step": 4008 + }, + { + "epoch": 0.771309183826845, + "grad_norm": 0.2626535001714557, + "learning_rate": 2.6220243797810485e-05, + "loss": 1.0662, + "step": 4009 + }, + { + "epoch": 0.7715015782353826, + "grad_norm": 0.28559239083046223, + "learning_rate": 2.6178190199376396e-05, + "loss": 1.0688, + "step": 4010 + }, + { + "epoch": 0.77169397264392, + "grad_norm": 0.2975260127829443, + "learning_rate": 2.6136165272482594e-05, + "loss": 1.0007, + "step": 4011 + }, + { + "epoch": 0.7718863670524575, + "grad_norm": 0.2799224205086977, + "learning_rate": 2.6094169033451066e-05, + "loss": 1.1006, + "step": 4012 + }, + { + "epoch": 0.7720787614609951, + "grad_norm": 0.3384434653268322, + "learning_rate": 2.6052201498592667e-05, + "loss": 1.0642, + "step": 4013 + }, + { + "epoch": 0.7722711558695325, + "grad_norm": 0.38372593099633906, + "learning_rate": 2.6010262684207133e-05, + "loss": 1.1053, + "step": 4014 + }, + { + "epoch": 0.77246355027807, + "grad_norm": 0.3479139414621463, + "learning_rate": 2.596835260658297e-05, + "loss": 1.062, + "step": 4015 + }, + { + "epoch": 0.7726559446866076, + "grad_norm": 0.35415062773453876, + "learning_rate": 2.5926471281997577e-05, + "loss": 1.1097, + "step": 4016 + }, + { + "epoch": 0.772848339095145, + "grad_norm": 0.3110110335495718, + "learning_rate": 2.588461872671719e-05, + "loss": 1.1365, + "step": 4017 + }, + { + "epoch": 0.7730407335036825, + "grad_norm": 0.36186703670247633, + "learning_rate": 2.5842794956996865e-05, + "loss": 1.1057, + "step": 4018 + }, + { + "epoch": 0.7732331279122201, + "grad_norm": 0.33453047473056935, + "learning_rate": 2.5800999989080487e-05, + "loss": 1.0175, + "step": 4019 + }, + { + "epoch": 0.7734255223207576, + "grad_norm": 0.308594799556121, + "learning_rate": 2.575923383920069e-05, + "loss": 1.0826, + "step": 4020 + }, + { + "epoch": 0.773617916729295, + "grad_norm": 0.3469832755249014, + "learning_rate": 2.5717496523578998e-05, + "loss": 1.198, + "step": 4021 + }, + { + "epoch": 0.7738103111378326, + "grad_norm": 0.3534675435459272, + "learning_rate": 2.567578805842572e-05, + "loss": 1.1401, + "step": 4022 + }, + { + "epoch": 0.7740027055463701, + "grad_norm": 0.33708336044036163, + "learning_rate": 2.5634108459939877e-05, + "loss": 0.9757, + "step": 4023 + }, + { + "epoch": 0.7741950999549075, + "grad_norm": 0.3608360995548934, + "learning_rate": 2.5592457744309404e-05, + "loss": 1.0324, + "step": 4024 + }, + { + "epoch": 0.7743874943634451, + "grad_norm": 0.4737744576119984, + "learning_rate": 2.555083592771098e-05, + "loss": 1.0569, + "step": 4025 + }, + { + "epoch": 0.7745798887719826, + "grad_norm": 0.3067064860873511, + "learning_rate": 2.5509243026309982e-05, + "loss": 1.0173, + "step": 4026 + }, + { + "epoch": 0.77477228318052, + "grad_norm": 0.3430453853679413, + "learning_rate": 2.546767905626063e-05, + "loss": 1.0577, + "step": 4027 + }, + { + "epoch": 0.7749646775890575, + "grad_norm": 0.3397715439844128, + "learning_rate": 2.5426144033705935e-05, + "loss": 0.9836, + "step": 4028 + }, + { + "epoch": 0.7751570719975951, + "grad_norm": 0.417246128979903, + "learning_rate": 2.5384637974777515e-05, + "loss": 0.9902, + "step": 4029 + }, + { + "epoch": 0.7753494664061326, + "grad_norm": 0.27043056158554457, + "learning_rate": 2.5343160895595974e-05, + "loss": 1.0361, + "step": 4030 + }, + { + "epoch": 0.77554186081467, + "grad_norm": 0.27470490079667864, + "learning_rate": 2.530171281227044e-05, + "loss": 0.9939, + "step": 4031 + }, + { + "epoch": 0.7757342552232076, + "grad_norm": 0.2983011796892629, + "learning_rate": 2.52602937408989e-05, + "loss": 0.9877, + "step": 4032 + }, + { + "epoch": 0.7759266496317451, + "grad_norm": 0.3408758864071013, + "learning_rate": 2.5218903697568076e-05, + "loss": 1.0961, + "step": 4033 + }, + { + "epoch": 0.7761190440402825, + "grad_norm": 0.4308523035375894, + "learning_rate": 2.5177542698353317e-05, + "loss": 1.0146, + "step": 4034 + }, + { + "epoch": 0.7763114384488201, + "grad_norm": 0.2615637980872723, + "learning_rate": 2.5136210759318812e-05, + "loss": 1.035, + "step": 4035 + }, + { + "epoch": 0.7765038328573576, + "grad_norm": 0.30360119620260373, + "learning_rate": 2.5094907896517383e-05, + "loss": 1.0835, + "step": 4036 + }, + { + "epoch": 0.776696227265895, + "grad_norm": 0.348795839967339, + "learning_rate": 2.5053634125990587e-05, + "loss": 1.0246, + "step": 4037 + }, + { + "epoch": 0.7768886216744326, + "grad_norm": 0.33124235723714646, + "learning_rate": 2.5012389463768737e-05, + "loss": 1.0569, + "step": 4038 + }, + { + "epoch": 0.7770810160829701, + "grad_norm": 0.3895946928595779, + "learning_rate": 2.4971173925870693e-05, + "loss": 1.0102, + "step": 4039 + }, + { + "epoch": 0.7772734104915076, + "grad_norm": 0.27675749777566394, + "learning_rate": 2.4929987528304144e-05, + "loss": 1.0166, + "step": 4040 + }, + { + "epoch": 0.7774658049000451, + "grad_norm": 0.29956076371373347, + "learning_rate": 2.4888830287065412e-05, + "loss": 1.1142, + "step": 4041 + }, + { + "epoch": 0.7776581993085826, + "grad_norm": 0.3301533640561284, + "learning_rate": 2.484770221813949e-05, + "loss": 1.0976, + "step": 4042 + }, + { + "epoch": 0.7778505937171201, + "grad_norm": 0.34265420369693467, + "learning_rate": 2.4806603337500067e-05, + "loss": 0.9647, + "step": 4043 + }, + { + "epoch": 0.7780429881256576, + "grad_norm": 0.2734141583041449, + "learning_rate": 2.476553366110944e-05, + "loss": 1.0359, + "step": 4044 + }, + { + "epoch": 0.7782353825341951, + "grad_norm": 0.3015593986202266, + "learning_rate": 2.4724493204918596e-05, + "loss": 1.1342, + "step": 4045 + }, + { + "epoch": 0.7784277769427326, + "grad_norm": 0.2836461622925106, + "learning_rate": 2.4683481984867207e-05, + "loss": 1.074, + "step": 4046 + }, + { + "epoch": 0.7786201713512702, + "grad_norm": 0.43075273223704935, + "learning_rate": 2.4642500016883528e-05, + "loss": 1.0518, + "step": 4047 + }, + { + "epoch": 0.7788125657598076, + "grad_norm": 0.2819826605565669, + "learning_rate": 2.4601547316884543e-05, + "loss": 0.9599, + "step": 4048 + }, + { + "epoch": 0.7790049601683451, + "grad_norm": 0.3592609629587011, + "learning_rate": 2.4560623900775727e-05, + "loss": 1.0204, + "step": 4049 + }, + { + "epoch": 0.7791973545768827, + "grad_norm": 0.31598771590263175, + "learning_rate": 2.4519729784451295e-05, + "loss": 1.0321, + "step": 4050 + }, + { + "epoch": 0.7793897489854201, + "grad_norm": 0.3115038335678325, + "learning_rate": 2.4478864983794093e-05, + "loss": 1.0711, + "step": 4051 + }, + { + "epoch": 0.7795821433939576, + "grad_norm": 0.3357033311514998, + "learning_rate": 2.443802951467544e-05, + "loss": 1.0346, + "step": 4052 + }, + { + "epoch": 0.7797745378024951, + "grad_norm": 0.3419375048866406, + "learning_rate": 2.4397223392955447e-05, + "loss": 1.0076, + "step": 4053 + }, + { + "epoch": 0.7799669322110326, + "grad_norm": 0.39253540462547415, + "learning_rate": 2.4356446634482754e-05, + "loss": 1.0256, + "step": 4054 + }, + { + "epoch": 0.7801593266195701, + "grad_norm": 0.3394767314481405, + "learning_rate": 2.4315699255094515e-05, + "loss": 1.1117, + "step": 4055 + }, + { + "epoch": 0.7803517210281076, + "grad_norm": 0.3528472401618156, + "learning_rate": 2.4274981270616583e-05, + "loss": 0.9493, + "step": 4056 + }, + { + "epoch": 0.7805441154366451, + "grad_norm": 0.2999978646617924, + "learning_rate": 2.4234292696863358e-05, + "loss": 1.0493, + "step": 4057 + }, + { + "epoch": 0.7807365098451826, + "grad_norm": 0.3305828017367092, + "learning_rate": 2.419363354963776e-05, + "loss": 0.934, + "step": 4058 + }, + { + "epoch": 0.7809289042537201, + "grad_norm": 0.3131905305565756, + "learning_rate": 2.4153003844731425e-05, + "loss": 1.0465, + "step": 4059 + }, + { + "epoch": 0.7811212986622577, + "grad_norm": 0.311768628899431, + "learning_rate": 2.4112403597924384e-05, + "loss": 1.0466, + "step": 4060 + }, + { + "epoch": 0.7813136930707951, + "grad_norm": 0.31447781093658556, + "learning_rate": 2.407183282498534e-05, + "loss": 1.0875, + "step": 4061 + }, + { + "epoch": 0.7815060874793326, + "grad_norm": 0.31905288290593364, + "learning_rate": 2.403129154167153e-05, + "loss": 1.0108, + "step": 4062 + }, + { + "epoch": 0.7816984818878702, + "grad_norm": 0.2985873251330728, + "learning_rate": 2.3990779763728666e-05, + "loss": 1.0744, + "step": 4063 + }, + { + "epoch": 0.7818908762964076, + "grad_norm": 0.2806404781368688, + "learning_rate": 2.3950297506891084e-05, + "loss": 1.0115, + "step": 4064 + }, + { + "epoch": 0.7820832707049451, + "grad_norm": 0.30439516972919456, + "learning_rate": 2.390984478688164e-05, + "loss": 1.0799, + "step": 4065 + }, + { + "epoch": 0.7822756651134827, + "grad_norm": 0.28342634146353857, + "learning_rate": 2.386942161941169e-05, + "loss": 1.0225, + "step": 4066 + }, + { + "epoch": 0.7824680595220201, + "grad_norm": 0.38117154873694775, + "learning_rate": 2.3829028020181155e-05, + "loss": 1.1151, + "step": 4067 + }, + { + "epoch": 0.7826604539305576, + "grad_norm": 0.36278378623171886, + "learning_rate": 2.37886640048784e-05, + "loss": 0.9536, + "step": 4068 + }, + { + "epoch": 0.7828528483390952, + "grad_norm": 0.33845556870268984, + "learning_rate": 2.374832958918035e-05, + "loss": 1.0774, + "step": 4069 + }, + { + "epoch": 0.7830452427476327, + "grad_norm": 0.2963930998501609, + "learning_rate": 2.370802478875245e-05, + "loss": 1.1321, + "step": 4070 + }, + { + "epoch": 0.7832376371561701, + "grad_norm": 0.2681621272910231, + "learning_rate": 2.3667749619248614e-05, + "loss": 1.1041, + "step": 4071 + }, + { + "epoch": 0.7834300315647077, + "grad_norm": 0.27603200007243145, + "learning_rate": 2.362750409631127e-05, + "loss": 1.1226, + "step": 4072 + }, + { + "epoch": 0.7836224259732452, + "grad_norm": 0.28788382478046276, + "learning_rate": 2.3587288235571258e-05, + "loss": 1.0804, + "step": 4073 + }, + { + "epoch": 0.7838148203817826, + "grad_norm": 0.3370535206192947, + "learning_rate": 2.3547102052648006e-05, + "loss": 0.9467, + "step": 4074 + }, + { + "epoch": 0.7840072147903202, + "grad_norm": 0.34986406364633094, + "learning_rate": 2.350694556314934e-05, + "loss": 1.0536, + "step": 4075 + }, + { + "epoch": 0.7841996091988577, + "grad_norm": 0.27240378127577797, + "learning_rate": 2.3466818782671596e-05, + "loss": 1.0921, + "step": 4076 + }, + { + "epoch": 0.7843920036073951, + "grad_norm": 0.2812033355321229, + "learning_rate": 2.3426721726799573e-05, + "loss": 1.0325, + "step": 4077 + }, + { + "epoch": 0.7845843980159327, + "grad_norm": 0.34031903232887356, + "learning_rate": 2.3386654411106447e-05, + "loss": 1.0514, + "step": 4078 + }, + { + "epoch": 0.7847767924244702, + "grad_norm": 0.3046854755934307, + "learning_rate": 2.3346616851153933e-05, + "loss": 1.0023, + "step": 4079 + }, + { + "epoch": 0.7849691868330076, + "grad_norm": 0.32665786393550217, + "learning_rate": 2.330660906249218e-05, + "loss": 1.0162, + "step": 4080 + }, + { + "epoch": 0.7851615812415451, + "grad_norm": 0.3214540785873001, + "learning_rate": 2.3266631060659682e-05, + "loss": 1.0427, + "step": 4081 + }, + { + "epoch": 0.7853539756500827, + "grad_norm": 0.35325189378993976, + "learning_rate": 2.3226682861183503e-05, + "loss": 0.9747, + "step": 4082 + }, + { + "epoch": 0.7855463700586202, + "grad_norm": 0.3210495994588188, + "learning_rate": 2.318676447957907e-05, + "loss": 1.0729, + "step": 4083 + }, + { + "epoch": 0.7857387644671576, + "grad_norm": 0.2880568003418139, + "learning_rate": 2.3146875931350166e-05, + "loss": 0.9654, + "step": 4084 + }, + { + "epoch": 0.7859311588756952, + "grad_norm": 0.28606074795437947, + "learning_rate": 2.310701723198908e-05, + "loss": 1.0605, + "step": 4085 + }, + { + "epoch": 0.7861235532842327, + "grad_norm": 0.28690140399214414, + "learning_rate": 2.306718839697648e-05, + "loss": 1.0703, + "step": 4086 + }, + { + "epoch": 0.7863159476927701, + "grad_norm": 0.28690140399214414, + "learning_rate": 2.306718839697648e-05, + "loss": 1.102, + "step": 4087 + }, + { + "epoch": 0.7865083421013077, + "grad_norm": 0.2901468566759315, + "learning_rate": 2.3027389441781366e-05, + "loss": 1.0635, + "step": 4088 + }, + { + "epoch": 0.7867007365098452, + "grad_norm": 0.4363590695428236, + "learning_rate": 2.2987620381861285e-05, + "loss": 1.0649, + "step": 4089 + }, + { + "epoch": 0.7868931309183826, + "grad_norm": 0.30619329026243897, + "learning_rate": 2.2947881232662006e-05, + "loss": 1.0209, + "step": 4090 + }, + { + "epoch": 0.7870855253269202, + "grad_norm": 0.3140748196324708, + "learning_rate": 2.290817200961779e-05, + "loss": 0.9419, + "step": 4091 + }, + { + "epoch": 0.7872779197354577, + "grad_norm": 0.276800857021627, + "learning_rate": 2.2868492728151258e-05, + "loss": 1.0503, + "step": 4092 + }, + { + "epoch": 0.7874703141439952, + "grad_norm": 0.2720990516313734, + "learning_rate": 2.282884340367334e-05, + "loss": 1.0954, + "step": 4093 + }, + { + "epoch": 0.7876627085525327, + "grad_norm": 0.3307703329262707, + "learning_rate": 2.2789224051583403e-05, + "loss": 0.9635, + "step": 4094 + }, + { + "epoch": 0.7878551029610702, + "grad_norm": 0.33231629737588114, + "learning_rate": 2.274963468726914e-05, + "loss": 1.0863, + "step": 4095 + }, + { + "epoch": 0.7880474973696077, + "grad_norm": 0.3252652585237453, + "learning_rate": 2.2710075326106617e-05, + "loss": 1.0173, + "step": 4096 + }, + { + "epoch": 0.7882398917781452, + "grad_norm": 0.3220119322351853, + "learning_rate": 2.2670545983460243e-05, + "loss": 1.0327, + "step": 4097 + }, + { + "epoch": 0.7884322861866827, + "grad_norm": 0.3501618840649719, + "learning_rate": 2.263104667468272e-05, + "loss": 1.0293, + "step": 4098 + }, + { + "epoch": 0.7886246805952202, + "grad_norm": 0.31475961614477027, + "learning_rate": 2.259157741511515e-05, + "loss": 1.0332, + "step": 4099 + }, + { + "epoch": 0.7888170750037578, + "grad_norm": 0.37200819679911085, + "learning_rate": 2.2552138220086927e-05, + "loss": 1.0637, + "step": 4100 + }, + { + "epoch": 0.7890094694122952, + "grad_norm": 0.3278409661272568, + "learning_rate": 2.2512729104915786e-05, + "loss": 0.9842, + "step": 4101 + }, + { + "epoch": 0.7892018638208327, + "grad_norm": 0.2960526646228673, + "learning_rate": 2.2473350084907806e-05, + "loss": 1.0734, + "step": 4102 + }, + { + "epoch": 0.7893942582293703, + "grad_norm": 0.392294481516786, + "learning_rate": 2.243400117535729e-05, + "loss": 1.0329, + "step": 4103 + }, + { + "epoch": 0.7895866526379077, + "grad_norm": 0.4708160441664302, + "learning_rate": 2.2394682391546928e-05, + "loss": 0.9351, + "step": 4104 + }, + { + "epoch": 0.7897790470464452, + "grad_norm": 0.34827263568985245, + "learning_rate": 2.23553937487477e-05, + "loss": 1.0339, + "step": 4105 + }, + { + "epoch": 0.7899714414549827, + "grad_norm": 0.3523262710672966, + "learning_rate": 2.2316135262218785e-05, + "loss": 0.9533, + "step": 4106 + }, + { + "epoch": 0.7901638358635202, + "grad_norm": 0.3467557651511863, + "learning_rate": 2.2276906947207844e-05, + "loss": 1.0628, + "step": 4107 + }, + { + "epoch": 0.7903562302720577, + "grad_norm": 0.2813250676517662, + "learning_rate": 2.2237708818950607e-05, + "loss": 0.9986, + "step": 4108 + }, + { + "epoch": 0.7905486246805952, + "grad_norm": 0.30823412400173916, + "learning_rate": 2.2198540892671215e-05, + "loss": 1.0536, + "step": 4109 + }, + { + "epoch": 0.7907410190891327, + "grad_norm": 0.29080862082233805, + "learning_rate": 2.215940318358206e-05, + "loss": 1.0727, + "step": 4110 + }, + { + "epoch": 0.7909334134976702, + "grad_norm": 0.3680696143843968, + "learning_rate": 2.2120295706883698e-05, + "loss": 1.0495, + "step": 4111 + }, + { + "epoch": 0.7911258079062077, + "grad_norm": 0.31380449212150596, + "learning_rate": 2.20812184777651e-05, + "loss": 1.0288, + "step": 4112 + }, + { + "epoch": 0.7913182023147453, + "grad_norm": 0.2743720210322785, + "learning_rate": 2.204217151140342e-05, + "loss": 1.0336, + "step": 4113 + }, + { + "epoch": 0.7915105967232827, + "grad_norm": 0.34140327841539325, + "learning_rate": 2.2003154822963978e-05, + "loss": 1.0649, + "step": 4114 + }, + { + "epoch": 0.7917029911318202, + "grad_norm": 0.3045718665457332, + "learning_rate": 2.196416842760046e-05, + "loss": 0.9731, + "step": 4115 + }, + { + "epoch": 0.7918953855403578, + "grad_norm": 0.33949965571767204, + "learning_rate": 2.1925212340454736e-05, + "loss": 0.9842, + "step": 4116 + }, + { + "epoch": 0.7920877799488952, + "grad_norm": 0.3102855794383734, + "learning_rate": 2.1886286576656835e-05, + "loss": 1.0188, + "step": 4117 + }, + { + "epoch": 0.7922801743574327, + "grad_norm": 0.3383555322216625, + "learning_rate": 2.184739115132517e-05, + "loss": 1.0952, + "step": 4118 + }, + { + "epoch": 0.7924725687659703, + "grad_norm": 0.302114197381981, + "learning_rate": 2.1808526079566217e-05, + "loss": 1.0001, + "step": 4119 + }, + { + "epoch": 0.7926649631745077, + "grad_norm": 0.30712967246705486, + "learning_rate": 2.176969137647472e-05, + "loss": 1.0103, + "step": 4120 + }, + { + "epoch": 0.7928573575830452, + "grad_norm": 0.33138108721276416, + "learning_rate": 2.1730887057133677e-05, + "loss": 1.0247, + "step": 4121 + }, + { + "epoch": 0.7930497519915828, + "grad_norm": 0.34536020755544034, + "learning_rate": 2.1692113136614177e-05, + "loss": 0.9655, + "step": 4122 + }, + { + "epoch": 0.7932421464001203, + "grad_norm": 0.2878807027628804, + "learning_rate": 2.1653369629975595e-05, + "loss": 1.0556, + "step": 4123 + }, + { + "epoch": 0.7934345408086577, + "grad_norm": 0.302434305619991, + "learning_rate": 2.1614656552265456e-05, + "loss": 1.0239, + "step": 4124 + }, + { + "epoch": 0.7936269352171953, + "grad_norm": 0.2625604788862428, + "learning_rate": 2.1575973918519486e-05, + "loss": 1.0518, + "step": 4125 + }, + { + "epoch": 0.7938193296257328, + "grad_norm": 0.3250231557601072, + "learning_rate": 2.1537321743761584e-05, + "loss": 0.9883, + "step": 4126 + }, + { + "epoch": 0.7940117240342702, + "grad_norm": 0.3161924556206067, + "learning_rate": 2.1498700043003773e-05, + "loss": 1.1181, + "step": 4127 + }, + { + "epoch": 0.7942041184428078, + "grad_norm": 0.36467580323113497, + "learning_rate": 2.1460108831246296e-05, + "loss": 1.0927, + "step": 4128 + }, + { + "epoch": 0.7943965128513453, + "grad_norm": 0.30205927433122226, + "learning_rate": 2.1421548123477532e-05, + "loss": 1.1026, + "step": 4129 + }, + { + "epoch": 0.7945889072598827, + "grad_norm": 0.3389016658532511, + "learning_rate": 2.138301793467401e-05, + "loss": 1.0773, + "step": 4130 + }, + { + "epoch": 0.7947813016684203, + "grad_norm": 0.2936165561136769, + "learning_rate": 2.1344518279800452e-05, + "loss": 1.0376, + "step": 4131 + }, + { + "epoch": 0.7949736960769578, + "grad_norm": 0.3244203334235697, + "learning_rate": 2.1306049173809615e-05, + "loss": 1.0074, + "step": 4132 + }, + { + "epoch": 0.7951660904854952, + "grad_norm": 0.3179106524364337, + "learning_rate": 2.1267610631642498e-05, + "loss": 1.0484, + "step": 4133 + }, + { + "epoch": 0.7953584848940327, + "grad_norm": 0.2872263205387523, + "learning_rate": 2.1229202668228197e-05, + "loss": 1.086, + "step": 4134 + }, + { + "epoch": 0.7955508793025703, + "grad_norm": 0.31792386893201335, + "learning_rate": 2.1190825298483852e-05, + "loss": 1.0655, + "step": 4135 + }, + { + "epoch": 0.7957432737111078, + "grad_norm": 0.3239416101435148, + "learning_rate": 2.115247853731488e-05, + "loss": 1.0877, + "step": 4136 + }, + { + "epoch": 0.7959356681196452, + "grad_norm": 0.28517445114722245, + "learning_rate": 2.11141623996147e-05, + "loss": 1.102, + "step": 4137 + }, + { + "epoch": 0.7961280625281828, + "grad_norm": 0.32037924197412265, + "learning_rate": 2.107587690026481e-05, + "loss": 0.9985, + "step": 4138 + }, + { + "epoch": 0.7963204569367203, + "grad_norm": 0.2873254753145235, + "learning_rate": 2.103762205413493e-05, + "loss": 1.0813, + "step": 4139 + }, + { + "epoch": 0.7965128513452577, + "grad_norm": 0.3233192744702412, + "learning_rate": 2.0999397876082728e-05, + "loss": 1.035, + "step": 4140 + }, + { + "epoch": 0.7967052457537953, + "grad_norm": 0.2961188079461089, + "learning_rate": 2.0961204380954036e-05, + "loss": 0.9644, + "step": 4141 + }, + { + "epoch": 0.7968976401623328, + "grad_norm": 0.33631847165289963, + "learning_rate": 2.092304158358286e-05, + "loss": 1.102, + "step": 4142 + }, + { + "epoch": 0.7970900345708702, + "grad_norm": 0.33500412017339415, + "learning_rate": 2.0884909498791104e-05, + "loss": 1.004, + "step": 4143 + }, + { + "epoch": 0.7972824289794078, + "grad_norm": 0.30883720658660496, + "learning_rate": 2.084680814138885e-05, + "loss": 0.9892, + "step": 4144 + }, + { + "epoch": 0.7974748233879453, + "grad_norm": 0.2818898328146198, + "learning_rate": 2.080873752617426e-05, + "loss": 0.9751, + "step": 4145 + }, + { + "epoch": 0.7976672177964828, + "grad_norm": 0.3585731462953241, + "learning_rate": 2.0770697667933437e-05, + "loss": 1.1006, + "step": 4146 + }, + { + "epoch": 0.7978596122050203, + "grad_norm": 0.2926286408042117, + "learning_rate": 2.073268858144074e-05, + "loss": 0.9301, + "step": 4147 + }, + { + "epoch": 0.7980520066135578, + "grad_norm": 0.29597234117131, + "learning_rate": 2.0694710281458373e-05, + "loss": 1.0619, + "step": 4148 + }, + { + "epoch": 0.7982444010220953, + "grad_norm": 0.3089719433180061, + "learning_rate": 2.0656762782736693e-05, + "loss": 1.1035, + "step": 4149 + }, + { + "epoch": 0.7984367954306328, + "grad_norm": 0.3454499371124246, + "learning_rate": 2.0618846100014112e-05, + "loss": 0.986, + "step": 4150 + }, + { + "epoch": 0.7986291898391703, + "grad_norm": 0.2938694679943425, + "learning_rate": 2.058096024801697e-05, + "loss": 1.2005, + "step": 4151 + }, + { + "epoch": 0.7988215842477078, + "grad_norm": 0.3510400028153644, + "learning_rate": 2.0543105241459715e-05, + "loss": 1.0692, + "step": 4152 + }, + { + "epoch": 0.7990139786562453, + "grad_norm": 0.3770395642067201, + "learning_rate": 2.0505281095044804e-05, + "loss": 1.0484, + "step": 4153 + }, + { + "epoch": 0.7992063730647828, + "grad_norm": 0.361746577092918, + "learning_rate": 2.0467487823462695e-05, + "loss": 1.0363, + "step": 4154 + }, + { + "epoch": 0.7993987674733203, + "grad_norm": 0.31233626713597756, + "learning_rate": 2.0429725441391888e-05, + "loss": 0.9825, + "step": 4155 + }, + { + "epoch": 0.7995911618818579, + "grad_norm": 0.3245864247833704, + "learning_rate": 2.0391993963498813e-05, + "loss": 0.9743, + "step": 4156 + }, + { + "epoch": 0.7997835562903953, + "grad_norm": 0.3105149021097417, + "learning_rate": 2.0354293404437965e-05, + "loss": 0.9958, + "step": 4157 + }, + { + "epoch": 0.7999759506989328, + "grad_norm": 0.3379959066219544, + "learning_rate": 2.0316623778851783e-05, + "loss": 0.969, + "step": 4158 + }, + { + "epoch": 0.8001683451074704, + "grad_norm": 0.2859122086892328, + "learning_rate": 2.027898510137075e-05, + "loss": 1.0724, + "step": 4159 + }, + { + "epoch": 0.8003607395160078, + "grad_norm": 0.34975418711012796, + "learning_rate": 2.024137738661329e-05, + "loss": 0.9916, + "step": 4160 + }, + { + "epoch": 0.8005531339245453, + "grad_norm": 0.283130966063517, + "learning_rate": 2.0203800649185788e-05, + "loss": 1.0596, + "step": 4161 + }, + { + "epoch": 0.8007455283330828, + "grad_norm": 0.4038353386203545, + "learning_rate": 2.0166254903682603e-05, + "loss": 1.1049, + "step": 4162 + }, + { + "epoch": 0.8009379227416203, + "grad_norm": 0.3347735083989096, + "learning_rate": 2.0128740164686134e-05, + "loss": 1.0415, + "step": 4163 + }, + { + "epoch": 0.8011303171501578, + "grad_norm": 0.33174471154216084, + "learning_rate": 2.009125644676656e-05, + "loss": 1.0897, + "step": 4164 + }, + { + "epoch": 0.8013227115586953, + "grad_norm": 0.2889547781948744, + "learning_rate": 2.0053803764482227e-05, + "loss": 1.0109, + "step": 4165 + }, + { + "epoch": 0.8015151059672329, + "grad_norm": 0.3208742641100502, + "learning_rate": 2.0016382132379318e-05, + "loss": 1.0773, + "step": 4166 + }, + { + "epoch": 0.8017075003757703, + "grad_norm": 0.3036094637011469, + "learning_rate": 1.9978991564991913e-05, + "loss": 1.0771, + "step": 4167 + }, + { + "epoch": 0.8018998947843078, + "grad_norm": 0.4080670188996166, + "learning_rate": 1.994163207684212e-05, + "loss": 1.0422, + "step": 4168 + }, + { + "epoch": 0.8020922891928454, + "grad_norm": 0.3492900620252333, + "learning_rate": 1.9904303682439897e-05, + "loss": 1.063, + "step": 4169 + }, + { + "epoch": 0.8022846836013828, + "grad_norm": 0.2751590063723222, + "learning_rate": 1.986700639628316e-05, + "loss": 0.9833, + "step": 4170 + }, + { + "epoch": 0.8024770780099203, + "grad_norm": 0.34622534863558896, + "learning_rate": 1.9829740232857808e-05, + "loss": 0.9761, + "step": 4171 + }, + { + "epoch": 0.8026694724184579, + "grad_norm": 0.3159037009911428, + "learning_rate": 1.9792505206637524e-05, + "loss": 0.944, + "step": 4172 + }, + { + "epoch": 0.8028618668269953, + "grad_norm": 0.2983641727337406, + "learning_rate": 1.9755301332083997e-05, + "loss": 0.945, + "step": 4173 + }, + { + "epoch": 0.8030542612355328, + "grad_norm": 0.3546053658805946, + "learning_rate": 1.971812862364679e-05, + "loss": 1.0628, + "step": 4174 + }, + { + "epoch": 0.8032466556440704, + "grad_norm": 0.44468887222521297, + "learning_rate": 1.9680987095763313e-05, + "loss": 1.1259, + "step": 4175 + }, + { + "epoch": 0.8034390500526079, + "grad_norm": 0.30665208772499136, + "learning_rate": 1.9643876762858937e-05, + "loss": 1.0578, + "step": 4176 + }, + { + "epoch": 0.8036314444611453, + "grad_norm": 0.2890170031227219, + "learning_rate": 1.9606797639346874e-05, + "loss": 1.0149, + "step": 4177 + }, + { + "epoch": 0.8038238388696829, + "grad_norm": 0.30784286792350757, + "learning_rate": 1.9569749739628242e-05, + "loss": 1.0046, + "step": 4178 + }, + { + "epoch": 0.8040162332782204, + "grad_norm": 0.2784803979754186, + "learning_rate": 1.9532733078092037e-05, + "loss": 1.0657, + "step": 4179 + }, + { + "epoch": 0.8042086276867578, + "grad_norm": 0.3399332301498935, + "learning_rate": 1.949574766911506e-05, + "loss": 1.0492, + "step": 4180 + }, + { + "epoch": 0.8044010220952954, + "grad_norm": 0.32112326353734333, + "learning_rate": 1.9458793527062035e-05, + "loss": 1.108, + "step": 4181 + }, + { + "epoch": 0.8045934165038329, + "grad_norm": 0.30822347698308206, + "learning_rate": 1.9421870666285524e-05, + "loss": 1.0282, + "step": 4182 + }, + { + "epoch": 0.8047858109123703, + "grad_norm": 0.3359397490488531, + "learning_rate": 1.9384979101125943e-05, + "loss": 1.0438, + "step": 4183 + }, + { + "epoch": 0.8049782053209079, + "grad_norm": 0.2557747986998689, + "learning_rate": 1.934811884591159e-05, + "loss": 1.1201, + "step": 4184 + }, + { + "epoch": 0.8051705997294454, + "grad_norm": 0.32207592881392766, + "learning_rate": 1.9311289914958497e-05, + "loss": 0.961, + "step": 4185 + }, + { + "epoch": 0.8053629941379828, + "grad_norm": 0.36412405685639604, + "learning_rate": 1.9274492322570615e-05, + "loss": 1.0364, + "step": 4186 + }, + { + "epoch": 0.8055553885465203, + "grad_norm": 0.2712936491840502, + "learning_rate": 1.923772608303972e-05, + "loss": 1.0981, + "step": 4187 + }, + { + "epoch": 0.8057477829550579, + "grad_norm": 0.31081075169339534, + "learning_rate": 1.920099121064539e-05, + "loss": 1.0806, + "step": 4188 + }, + { + "epoch": 0.8059401773635954, + "grad_norm": 0.3567761356839382, + "learning_rate": 1.9164287719655062e-05, + "loss": 1.0246, + "step": 4189 + }, + { + "epoch": 0.8061325717721328, + "grad_norm": 0.3404313493082168, + "learning_rate": 1.912761562432388e-05, + "loss": 1.0629, + "step": 4190 + }, + { + "epoch": 0.8063249661806704, + "grad_norm": 0.31730127105323097, + "learning_rate": 1.90909749388949e-05, + "loss": 1.1147, + "step": 4191 + }, + { + "epoch": 0.8065173605892079, + "grad_norm": 0.3250360068257254, + "learning_rate": 1.905436567759896e-05, + "loss": 1.007, + "step": 4192 + }, + { + "epoch": 0.8067097549977453, + "grad_norm": 0.3563080686918488, + "learning_rate": 1.901778785465461e-05, + "loss": 0.9851, + "step": 4193 + }, + { + "epoch": 0.8069021494062829, + "grad_norm": 0.3655226410932478, + "learning_rate": 1.898124148426832e-05, + "loss": 0.9737, + "step": 4194 + }, + { + "epoch": 0.8070945438148204, + "grad_norm": 0.31423297386602744, + "learning_rate": 1.8944726580634288e-05, + "loss": 1.1074, + "step": 4195 + }, + { + "epoch": 0.8072869382233578, + "grad_norm": 0.31576186727571104, + "learning_rate": 1.8908243157934423e-05, + "loss": 1.0248, + "step": 4196 + }, + { + "epoch": 0.8074793326318954, + "grad_norm": 0.3002823181128077, + "learning_rate": 1.8871791230338497e-05, + "loss": 1.1222, + "step": 4197 + }, + { + "epoch": 0.8076717270404329, + "grad_norm": 0.4105801184366847, + "learning_rate": 1.883537081200404e-05, + "loss": 0.9563, + "step": 4198 + }, + { + "epoch": 0.8078641214489704, + "grad_norm": 0.392641757221708, + "learning_rate": 1.8798981917076252e-05, + "loss": 1.0261, + "step": 4199 + }, + { + "epoch": 0.8080565158575079, + "grad_norm": 0.39782525902805865, + "learning_rate": 1.8762624559688256e-05, + "loss": 1.097, + "step": 4200 + }, + { + "epoch": 0.8082489102660454, + "grad_norm": 0.34534136262599896, + "learning_rate": 1.872629875396076e-05, + "loss": 1.0925, + "step": 4201 + }, + { + "epoch": 0.8084413046745829, + "grad_norm": 0.3509857390532199, + "learning_rate": 1.8690004514002313e-05, + "loss": 0.9611, + "step": 4202 + }, + { + "epoch": 0.8086336990831204, + "grad_norm": 0.3678830763617161, + "learning_rate": 1.86537418539092e-05, + "loss": 1.059, + "step": 4203 + }, + { + "epoch": 0.8088260934916579, + "grad_norm": 0.3260728921005822, + "learning_rate": 1.861751078776538e-05, + "loss": 0.9822, + "step": 4204 + }, + { + "epoch": 0.8090184879001954, + "grad_norm": 0.2638828516373688, + "learning_rate": 1.858131132964259e-05, + "loss": 1.0576, + "step": 4205 + }, + { + "epoch": 0.809210882308733, + "grad_norm": 0.2785485216506586, + "learning_rate": 1.8545143493600292e-05, + "loss": 0.9734, + "step": 4206 + }, + { + "epoch": 0.8094032767172704, + "grad_norm": 0.3809734279293067, + "learning_rate": 1.8509007293685667e-05, + "loss": 1.0106, + "step": 4207 + }, + { + "epoch": 0.8095956711258079, + "grad_norm": 0.36715539055737184, + "learning_rate": 1.8472902743933607e-05, + "loss": 1.0611, + "step": 4208 + }, + { + "epoch": 0.8097880655343455, + "grad_norm": 0.35580377380689066, + "learning_rate": 1.8436829858366657e-05, + "loss": 1.0038, + "step": 4209 + }, + { + "epoch": 0.8099804599428829, + "grad_norm": 0.39956965072590633, + "learning_rate": 1.840078865099514e-05, + "loss": 1.0844, + "step": 4210 + }, + { + "epoch": 0.8101728543514204, + "grad_norm": 0.31385576585429487, + "learning_rate": 1.8364779135817044e-05, + "loss": 1.0548, + "step": 4211 + }, + { + "epoch": 0.810365248759958, + "grad_norm": 0.35592901429668644, + "learning_rate": 1.8328801326818046e-05, + "loss": 1.0441, + "step": 4212 + }, + { + "epoch": 0.8105576431684954, + "grad_norm": 0.3017258135852835, + "learning_rate": 1.829285523797155e-05, + "loss": 1.0783, + "step": 4213 + }, + { + "epoch": 0.8107500375770329, + "grad_norm": 0.35817488309160284, + "learning_rate": 1.8256940883238538e-05, + "loss": 0.988, + "step": 4214 + }, + { + "epoch": 0.8109424319855704, + "grad_norm": 0.3878324351241288, + "learning_rate": 1.822105827656776e-05, + "loss": 1.159, + "step": 4215 + }, + { + "epoch": 0.8111348263941079, + "grad_norm": 0.3971895888591802, + "learning_rate": 1.818520743189561e-05, + "loss": 1.0366, + "step": 4216 + }, + { + "epoch": 0.8113272208026454, + "grad_norm": 0.36828337174297815, + "learning_rate": 1.814938836314615e-05, + "loss": 1.0499, + "step": 4217 + }, + { + "epoch": 0.8115196152111829, + "grad_norm": 0.31104518170028983, + "learning_rate": 1.8113601084231092e-05, + "loss": 1.0339, + "step": 4218 + }, + { + "epoch": 0.8117120096197205, + "grad_norm": 0.3135971832808061, + "learning_rate": 1.8077845609049782e-05, + "loss": 0.9906, + "step": 4219 + }, + { + "epoch": 0.8119044040282579, + "grad_norm": 0.35736961857313027, + "learning_rate": 1.8042121951489256e-05, + "loss": 1.0318, + "step": 4220 + }, + { + "epoch": 0.8120967984367954, + "grad_norm": 0.34467707918278906, + "learning_rate": 1.800643012542418e-05, + "loss": 1.0819, + "step": 4221 + }, + { + "epoch": 0.812289192845333, + "grad_norm": 0.3352034971155479, + "learning_rate": 1.7970770144716774e-05, + "loss": 1.0092, + "step": 4222 + }, + { + "epoch": 0.8124815872538704, + "grad_norm": 0.31872179140101714, + "learning_rate": 1.7935142023217057e-05, + "loss": 0.8971, + "step": 4223 + }, + { + "epoch": 0.8126739816624079, + "grad_norm": 0.34400960317966045, + "learning_rate": 1.789954577476257e-05, + "loss": 1.0387, + "step": 4224 + }, + { + "epoch": 0.8128663760709455, + "grad_norm": 0.3223066208945814, + "learning_rate": 1.786398141317843e-05, + "loss": 1.0419, + "step": 4225 + }, + { + "epoch": 0.8130587704794829, + "grad_norm": 0.3153892303556073, + "learning_rate": 1.7828448952277453e-05, + "loss": 1.1225, + "step": 4226 + }, + { + "epoch": 0.8132511648880204, + "grad_norm": 0.2751318071489437, + "learning_rate": 1.7792948405860077e-05, + "loss": 1.0039, + "step": 4227 + }, + { + "epoch": 0.813443559296558, + "grad_norm": 0.30815973679032055, + "learning_rate": 1.7757479787714217e-05, + "loss": 1.0134, + "step": 4228 + }, + { + "epoch": 0.8136359537050954, + "grad_norm": 0.34134710946116986, + "learning_rate": 1.7722043111615573e-05, + "loss": 1.0606, + "step": 4229 + }, + { + "epoch": 0.8138283481136329, + "grad_norm": 0.3277704416233078, + "learning_rate": 1.7686638391327272e-05, + "loss": 1.0352, + "step": 4230 + }, + { + "epoch": 0.8140207425221705, + "grad_norm": 0.3642671718728077, + "learning_rate": 1.765126564060011e-05, + "loss": 1.05, + "step": 4231 + }, + { + "epoch": 0.814213136930708, + "grad_norm": 0.322402312179894, + "learning_rate": 1.7615924873172507e-05, + "loss": 1.0651, + "step": 4232 + }, + { + "epoch": 0.8144055313392454, + "grad_norm": 0.2989501305355325, + "learning_rate": 1.7580616102770354e-05, + "loss": 1.0064, + "step": 4233 + }, + { + "epoch": 0.814597925747783, + "grad_norm": 0.30865533082422814, + "learning_rate": 1.754533934310717e-05, + "loss": 1.0269, + "step": 4234 + }, + { + "epoch": 0.8147903201563205, + "grad_norm": 0.2813133994460399, + "learning_rate": 1.7510094607884074e-05, + "loss": 1.0484, + "step": 4235 + }, + { + "epoch": 0.8149827145648579, + "grad_norm": 0.29309108104577763, + "learning_rate": 1.74748819107897e-05, + "loss": 1.0361, + "step": 4236 + }, + { + "epoch": 0.8151751089733955, + "grad_norm": 0.31017364253929985, + "learning_rate": 1.7439701265500273e-05, + "loss": 1.0103, + "step": 4237 + }, + { + "epoch": 0.815367503381933, + "grad_norm": 0.33639039321781367, + "learning_rate": 1.7404552685679508e-05, + "loss": 1.0795, + "step": 4238 + }, + { + "epoch": 0.8155598977904704, + "grad_norm": 0.31604691099661286, + "learning_rate": 1.7369436184978736e-05, + "loss": 1.0319, + "step": 4239 + }, + { + "epoch": 0.8157522921990079, + "grad_norm": 0.33841243657213893, + "learning_rate": 1.7334351777036805e-05, + "loss": 1.0887, + "step": 4240 + }, + { + "epoch": 0.8159446866075455, + "grad_norm": 0.30609860047683907, + "learning_rate": 1.729929947548008e-05, + "loss": 1.0226, + "step": 4241 + }, + { + "epoch": 0.816137081016083, + "grad_norm": 0.293788790133934, + "learning_rate": 1.7264279293922502e-05, + "loss": 1.0551, + "step": 4242 + }, + { + "epoch": 0.8163294754246204, + "grad_norm": 0.33061289289998036, + "learning_rate": 1.7229291245965462e-05, + "loss": 1.0399, + "step": 4243 + }, + { + "epoch": 0.816521869833158, + "grad_norm": 0.35072322030946385, + "learning_rate": 1.7194335345197932e-05, + "loss": 0.9791, + "step": 4244 + }, + { + "epoch": 0.8167142642416955, + "grad_norm": 0.32148579613729894, + "learning_rate": 1.715941160519641e-05, + "loss": 0.959, + "step": 4245 + }, + { + "epoch": 0.8169066586502329, + "grad_norm": 0.29383959213907057, + "learning_rate": 1.7124520039524803e-05, + "loss": 1.0015, + "step": 4246 + }, + { + "epoch": 0.8170990530587705, + "grad_norm": 0.2820336703221806, + "learning_rate": 1.7089660661734685e-05, + "loss": 1.0566, + "step": 4247 + }, + { + "epoch": 0.817291447467308, + "grad_norm": 0.2942055912019547, + "learning_rate": 1.705483348536496e-05, + "loss": 1.0596, + "step": 4248 + }, + { + "epoch": 0.8174838418758454, + "grad_norm": 0.3349384521380425, + "learning_rate": 1.702003852394214e-05, + "loss": 1.0737, + "step": 4249 + }, + { + "epoch": 0.817676236284383, + "grad_norm": 0.29960967337845307, + "learning_rate": 1.6985275790980203e-05, + "loss": 1.1079, + "step": 4250 + }, + { + "epoch": 0.8178686306929205, + "grad_norm": 0.3228212536301577, + "learning_rate": 1.6950545299980526e-05, + "loss": 1.041, + "step": 4251 + }, + { + "epoch": 0.818061025101458, + "grad_norm": 0.3152113373953018, + "learning_rate": 1.691584706443209e-05, + "loss": 1.0687, + "step": 4252 + }, + { + "epoch": 0.8182534195099955, + "grad_norm": 0.390827064483229, + "learning_rate": 1.6881181097811304e-05, + "loss": 1.0104, + "step": 4253 + }, + { + "epoch": 0.818445813918533, + "grad_norm": 0.3317988171243961, + "learning_rate": 1.684654741358198e-05, + "loss": 1.1326, + "step": 4254 + }, + { + "epoch": 0.8186382083270705, + "grad_norm": 0.3001968785209655, + "learning_rate": 1.681194602519546e-05, + "loss": 1.1085, + "step": 4255 + }, + { + "epoch": 0.818830602735608, + "grad_norm": 0.40915101868692316, + "learning_rate": 1.677737694609055e-05, + "loss": 1.0702, + "step": 4256 + }, + { + "epoch": 0.8190229971441455, + "grad_norm": 0.36230968687389703, + "learning_rate": 1.674284018969342e-05, + "loss": 1.0588, + "step": 4257 + }, + { + "epoch": 0.819215391552683, + "grad_norm": 0.3530382872668718, + "learning_rate": 1.6708335769417827e-05, + "loss": 0.9842, + "step": 4258 + }, + { + "epoch": 0.8194077859612205, + "grad_norm": 0.3191849370261009, + "learning_rate": 1.667386369866484e-05, + "loss": 1.128, + "step": 4259 + }, + { + "epoch": 0.819600180369758, + "grad_norm": 0.34008937395332556, + "learning_rate": 1.6639423990823012e-05, + "loss": 0.9227, + "step": 4260 + }, + { + "epoch": 0.8197925747782955, + "grad_norm": 0.2841751541218453, + "learning_rate": 1.660501665926838e-05, + "loss": 1.035, + "step": 4261 + }, + { + "epoch": 0.8199849691868331, + "grad_norm": 0.34085841556893776, + "learning_rate": 1.6570641717364277e-05, + "loss": 1.0777, + "step": 4262 + }, + { + "epoch": 0.8201773635953705, + "grad_norm": 0.4086163305010936, + "learning_rate": 1.653629917846159e-05, + "loss": 1.086, + "step": 4263 + }, + { + "epoch": 0.820369758003908, + "grad_norm": 0.3371792459661734, + "learning_rate": 1.6501989055898535e-05, + "loss": 1.1721, + "step": 4264 + }, + { + "epoch": 0.8205621524124456, + "grad_norm": 0.2815788276431441, + "learning_rate": 1.6467711363000793e-05, + "loss": 1.0934, + "step": 4265 + }, + { + "epoch": 0.820754546820983, + "grad_norm": 0.3628916186036442, + "learning_rate": 1.643346611308144e-05, + "loss": 1.0818, + "step": 4266 + }, + { + "epoch": 0.8209469412295205, + "grad_norm": 0.29654224487139946, + "learning_rate": 1.6399253319440887e-05, + "loss": 1.0465, + "step": 4267 + }, + { + "epoch": 0.821139335638058, + "grad_norm": 0.2937533177962177, + "learning_rate": 1.6365072995367004e-05, + "loss": 0.9724, + "step": 4268 + }, + { + "epoch": 0.8213317300465955, + "grad_norm": 0.3265450810091649, + "learning_rate": 1.6330925154135058e-05, + "loss": 1.0522, + "step": 4269 + }, + { + "epoch": 0.821524124455133, + "grad_norm": 0.4547358240015819, + "learning_rate": 1.629680980900765e-05, + "loss": 1.0914, + "step": 4270 + }, + { + "epoch": 0.8217165188636705, + "grad_norm": 0.2686243073655931, + "learning_rate": 1.6262726973234843e-05, + "loss": 1.0879, + "step": 4271 + }, + { + "epoch": 0.821908913272208, + "grad_norm": 0.3189454370898761, + "learning_rate": 1.622867666005393e-05, + "loss": 1.0132, + "step": 4272 + }, + { + "epoch": 0.8221013076807455, + "grad_norm": 0.3747357956313708, + "learning_rate": 1.619465888268972e-05, + "loss": 0.9731, + "step": 4273 + }, + { + "epoch": 0.822293702089283, + "grad_norm": 0.32076015561625526, + "learning_rate": 1.616067365435433e-05, + "loss": 1.1012, + "step": 4274 + }, + { + "epoch": 0.8224860964978206, + "grad_norm": 0.3542059668112701, + "learning_rate": 1.6126720988247167e-05, + "loss": 0.9566, + "step": 4275 + }, + { + "epoch": 0.822678490906358, + "grad_norm": 0.30753471248970976, + "learning_rate": 1.609280089755515e-05, + "loss": 0.9947, + "step": 4276 + }, + { + "epoch": 0.8228708853148955, + "grad_norm": 0.3010294221868473, + "learning_rate": 1.605891339545237e-05, + "loss": 1.1124, + "step": 4277 + }, + { + "epoch": 0.8230632797234331, + "grad_norm": 0.3522633893705517, + "learning_rate": 1.6025058495100385e-05, + "loss": 1.0783, + "step": 4278 + }, + { + "epoch": 0.8232556741319705, + "grad_norm": 0.3547404714593905, + "learning_rate": 1.5991236209648053e-05, + "loss": 1.0576, + "step": 4279 + }, + { + "epoch": 0.823448068540508, + "grad_norm": 0.2914039621274619, + "learning_rate": 1.5957446552231526e-05, + "loss": 1.0655, + "step": 4280 + }, + { + "epoch": 0.8236404629490456, + "grad_norm": 0.2785104060807799, + "learning_rate": 1.5923689535974305e-05, + "loss": 1.0728, + "step": 4281 + }, + { + "epoch": 0.823832857357583, + "grad_norm": 0.4817029438779027, + "learning_rate": 1.588996517398731e-05, + "loss": 1.0681, + "step": 4282 + }, + { + "epoch": 0.8240252517661205, + "grad_norm": 0.3406957508689732, + "learning_rate": 1.5856273479368612e-05, + "loss": 1.0252, + "step": 4283 + }, + { + "epoch": 0.8242176461746581, + "grad_norm": 0.3832770419294321, + "learning_rate": 1.582261446520371e-05, + "loss": 1.0021, + "step": 4284 + }, + { + "epoch": 0.8244100405831956, + "grad_norm": 0.35433853652434877, + "learning_rate": 1.5788988144565398e-05, + "loss": 1.0356, + "step": 4285 + }, + { + "epoch": 0.824602434991733, + "grad_norm": 0.313297882112553, + "learning_rate": 1.575539453051369e-05, + "loss": 0.9684, + "step": 4286 + }, + { + "epoch": 0.8247948294002706, + "grad_norm": 0.34122480873000194, + "learning_rate": 1.5721833636096027e-05, + "loss": 1.0628, + "step": 4287 + }, + { + "epoch": 0.8249872238088081, + "grad_norm": 0.30057871198672154, + "learning_rate": 1.568830547434703e-05, + "loss": 1.0253, + "step": 4288 + }, + { + "epoch": 0.8251796182173455, + "grad_norm": 0.31207426829107815, + "learning_rate": 1.565481005828866e-05, + "loss": 1.0598, + "step": 4289 + }, + { + "epoch": 0.8253720126258831, + "grad_norm": 0.3398292973627893, + "learning_rate": 1.5621347400930175e-05, + "loss": 1.0325, + "step": 4290 + }, + { + "epoch": 0.8255644070344206, + "grad_norm": 0.4328416035109433, + "learning_rate": 1.5587917515268048e-05, + "loss": 1.0173, + "step": 4291 + }, + { + "epoch": 0.825756801442958, + "grad_norm": 0.29339438472408647, + "learning_rate": 1.5554520414286067e-05, + "loss": 1.014, + "step": 4292 + }, + { + "epoch": 0.8259491958514956, + "grad_norm": 0.347747159471914, + "learning_rate": 1.552115611095529e-05, + "loss": 1.0482, + "step": 4293 + }, + { + "epoch": 0.8261415902600331, + "grad_norm": 0.34020470481192366, + "learning_rate": 1.5487824618234047e-05, + "loss": 1.0218, + "step": 4294 + }, + { + "epoch": 0.8263339846685706, + "grad_norm": 0.27092672233754667, + "learning_rate": 1.54545259490679e-05, + "loss": 1.0925, + "step": 4295 + }, + { + "epoch": 0.826526379077108, + "grad_norm": 0.28364747899207743, + "learning_rate": 1.5421260116389637e-05, + "loss": 1.1335, + "step": 4296 + }, + { + "epoch": 0.8267187734856456, + "grad_norm": 0.2954520583421916, + "learning_rate": 1.5388027133119342e-05, + "loss": 1.0517, + "step": 4297 + }, + { + "epoch": 0.8269111678941831, + "grad_norm": 0.32338285222766205, + "learning_rate": 1.535482701216433e-05, + "loss": 1.1162, + "step": 4298 + }, + { + "epoch": 0.8271035623027205, + "grad_norm": 0.3834289541311469, + "learning_rate": 1.5321659766419128e-05, + "loss": 1.0339, + "step": 4299 + }, + { + "epoch": 0.8272959567112581, + "grad_norm": 0.32922442128799073, + "learning_rate": 1.5288525408765564e-05, + "loss": 1.0048, + "step": 4300 + }, + { + "epoch": 0.8274883511197956, + "grad_norm": 0.3496787458178123, + "learning_rate": 1.5255423952072567e-05, + "loss": 0.963, + "step": 4301 + }, + { + "epoch": 0.827680745528333, + "grad_norm": 0.28275325227655657, + "learning_rate": 1.5222355409196398e-05, + "loss": 0.9594, + "step": 4302 + }, + { + "epoch": 0.8278731399368706, + "grad_norm": 0.36837139051448053, + "learning_rate": 1.5189319792980516e-05, + "loss": 1.0367, + "step": 4303 + }, + { + "epoch": 0.8280655343454081, + "grad_norm": 0.2954420350130657, + "learning_rate": 1.5156317116255513e-05, + "loss": 1.0374, + "step": 4304 + }, + { + "epoch": 0.8282579287539455, + "grad_norm": 0.38732593372836654, + "learning_rate": 1.5123347391839305e-05, + "loss": 1.0311, + "step": 4305 + }, + { + "epoch": 0.8284503231624831, + "grad_norm": 0.3092292055949576, + "learning_rate": 1.5090410632536967e-05, + "loss": 0.9978, + "step": 4306 + }, + { + "epoch": 0.8286427175710206, + "grad_norm": 0.3002129319227162, + "learning_rate": 1.5057506851140702e-05, + "loss": 1.088, + "step": 4307 + }, + { + "epoch": 0.8288351119795581, + "grad_norm": 0.310423549819509, + "learning_rate": 1.5024636060429997e-05, + "loss": 1.0912, + "step": 4308 + }, + { + "epoch": 0.8290275063880956, + "grad_norm": 0.30640879170819296, + "learning_rate": 1.4991798273171465e-05, + "loss": 1.0805, + "step": 4309 + }, + { + "epoch": 0.8292199007966331, + "grad_norm": 0.3112398536626859, + "learning_rate": 1.4958993502118901e-05, + "loss": 1.0377, + "step": 4310 + }, + { + "epoch": 0.8294122952051706, + "grad_norm": 0.31469996971858366, + "learning_rate": 1.4926221760013392e-05, + "loss": 1.0506, + "step": 4311 + }, + { + "epoch": 0.8296046896137081, + "grad_norm": 0.4022606241327596, + "learning_rate": 1.4893483059583014e-05, + "loss": 1.067, + "step": 4312 + }, + { + "epoch": 0.8297970840222456, + "grad_norm": 0.3062598702182294, + "learning_rate": 1.4860777413543137e-05, + "loss": 1.1611, + "step": 4313 + }, + { + "epoch": 0.8299894784307831, + "grad_norm": 0.3889458385107906, + "learning_rate": 1.4828104834596268e-05, + "loss": 0.9347, + "step": 4314 + }, + { + "epoch": 0.8301818728393207, + "grad_norm": 0.33928997942988326, + "learning_rate": 1.4795465335432035e-05, + "loss": 0.945, + "step": 4315 + }, + { + "epoch": 0.8303742672478581, + "grad_norm": 0.37590035648628306, + "learning_rate": 1.4762858928727241e-05, + "loss": 1.0919, + "step": 4316 + }, + { + "epoch": 0.8305666616563956, + "grad_norm": 0.28229467482843107, + "learning_rate": 1.4730285627145856e-05, + "loss": 1.0615, + "step": 4317 + }, + { + "epoch": 0.8307590560649332, + "grad_norm": 0.3403883019779579, + "learning_rate": 1.4697745443338984e-05, + "loss": 1.0085, + "step": 4318 + }, + { + "epoch": 0.8309514504734706, + "grad_norm": 0.3108788465793099, + "learning_rate": 1.4665238389944857e-05, + "loss": 1.0103, + "step": 4319 + }, + { + "epoch": 0.8311438448820081, + "grad_norm": 0.3495461101060005, + "learning_rate": 1.4632764479588801e-05, + "loss": 1.0028, + "step": 4320 + }, + { + "epoch": 0.8313362392905456, + "grad_norm": 0.3453501409524611, + "learning_rate": 1.4600323724883335e-05, + "loss": 0.9827, + "step": 4321 + }, + { + "epoch": 0.8315286336990831, + "grad_norm": 0.295247115020464, + "learning_rate": 1.4567916138428072e-05, + "loss": 1.0298, + "step": 4322 + }, + { + "epoch": 0.8317210281076206, + "grad_norm": 0.3712462561953952, + "learning_rate": 1.4535541732809754e-05, + "loss": 1.0807, + "step": 4323 + }, + { + "epoch": 0.8319134225161581, + "grad_norm": 0.34911513546897444, + "learning_rate": 1.4503200520602245e-05, + "loss": 0.9743, + "step": 4324 + }, + { + "epoch": 0.8321058169246957, + "grad_norm": 0.34512041064861837, + "learning_rate": 1.4470892514366441e-05, + "loss": 1.0319, + "step": 4325 + }, + { + "epoch": 0.8322982113332331, + "grad_norm": 0.30514220474916104, + "learning_rate": 1.4438617726650439e-05, + "loss": 1.0587, + "step": 4326 + }, + { + "epoch": 0.8324906057417706, + "grad_norm": 0.2873211724007842, + "learning_rate": 1.4406376169989388e-05, + "loss": 1.0059, + "step": 4327 + }, + { + "epoch": 0.8326830001503082, + "grad_norm": 0.29469615819157635, + "learning_rate": 1.437416785690554e-05, + "loss": 1.0403, + "step": 4328 + }, + { + "epoch": 0.8328753945588456, + "grad_norm": 0.3553241588287279, + "learning_rate": 1.4341992799908255e-05, + "loss": 0.9453, + "step": 4329 + }, + { + "epoch": 0.8330677889673831, + "grad_norm": 0.30591594191161253, + "learning_rate": 1.4309851011493903e-05, + "loss": 1.0181, + "step": 4330 + }, + { + "epoch": 0.8332601833759207, + "grad_norm": 0.2885401639784365, + "learning_rate": 1.4277742504146008e-05, + "loss": 1.0681, + "step": 4331 + }, + { + "epoch": 0.8334525777844581, + "grad_norm": 0.2939893159795581, + "learning_rate": 1.4245667290335174e-05, + "loss": 0.9951, + "step": 4332 + }, + { + "epoch": 0.8336449721929956, + "grad_norm": 0.28758006529882046, + "learning_rate": 1.4213625382518968e-05, + "loss": 1.1012, + "step": 4333 + }, + { + "epoch": 0.8338373666015332, + "grad_norm": 0.28569834353715723, + "learning_rate": 1.4181616793142172e-05, + "loss": 1.0933, + "step": 4334 + }, + { + "epoch": 0.8340297610100706, + "grad_norm": 0.35958759492633907, + "learning_rate": 1.4149641534636549e-05, + "loss": 1.0671, + "step": 4335 + }, + { + "epoch": 0.8342221554186081, + "grad_norm": 0.2852292793421516, + "learning_rate": 1.4117699619420877e-05, + "loss": 1.0427, + "step": 4336 + }, + { + "epoch": 0.8344145498271457, + "grad_norm": 0.31080925371030704, + "learning_rate": 1.4085791059901076e-05, + "loss": 1.0799, + "step": 4337 + }, + { + "epoch": 0.8346069442356832, + "grad_norm": 0.31890650968048345, + "learning_rate": 1.4053915868470013e-05, + "loss": 1.0675, + "step": 4338 + }, + { + "epoch": 0.8347993386442206, + "grad_norm": 0.3143199905768238, + "learning_rate": 1.402207405750765e-05, + "loss": 1.0101, + "step": 4339 + }, + { + "epoch": 0.8349917330527582, + "grad_norm": 0.31384670029948, + "learning_rate": 1.399026563938105e-05, + "loss": 0.9138, + "step": 4340 + }, + { + "epoch": 0.8351841274612957, + "grad_norm": 0.2978722899973814, + "learning_rate": 1.3958490626444154e-05, + "loss": 1.021, + "step": 4341 + }, + { + "epoch": 0.8353765218698331, + "grad_norm": 0.286398059175232, + "learning_rate": 1.3926749031038056e-05, + "loss": 0.9644, + "step": 4342 + }, + { + "epoch": 0.8355689162783707, + "grad_norm": 0.3273241169556011, + "learning_rate": 1.3895040865490816e-05, + "loss": 1.0414, + "step": 4343 + }, + { + "epoch": 0.8357613106869082, + "grad_norm": 0.5008963752393932, + "learning_rate": 1.3863366142117506e-05, + "loss": 1.0676, + "step": 4344 + }, + { + "epoch": 0.8359537050954456, + "grad_norm": 0.40182739734768624, + "learning_rate": 1.383172487322023e-05, + "loss": 1.0994, + "step": 4345 + }, + { + "epoch": 0.8361460995039832, + "grad_norm": 0.32240367638137035, + "learning_rate": 1.3800117071088104e-05, + "loss": 1.0284, + "step": 4346 + }, + { + "epoch": 0.8363384939125207, + "grad_norm": 0.30346940699395386, + "learning_rate": 1.3768542747997215e-05, + "loss": 1.0364, + "step": 4347 + }, + { + "epoch": 0.8365308883210582, + "grad_norm": 0.35952692288069216, + "learning_rate": 1.3737001916210713e-05, + "loss": 1.0285, + "step": 4348 + }, + { + "epoch": 0.8367232827295956, + "grad_norm": 0.3280341536673114, + "learning_rate": 1.3705494587978628e-05, + "loss": 0.9689, + "step": 4349 + }, + { + "epoch": 0.8369156771381332, + "grad_norm": 0.29841163661727804, + "learning_rate": 1.3674020775538077e-05, + "loss": 1.0264, + "step": 4350 + }, + { + "epoch": 0.8371080715466707, + "grad_norm": 0.3300173606199424, + "learning_rate": 1.3642580491113122e-05, + "loss": 1.0486, + "step": 4351 + }, + { + "epoch": 0.8373004659552081, + "grad_norm": 0.29583793172814193, + "learning_rate": 1.3611173746914795e-05, + "loss": 1.1074, + "step": 4352 + }, + { + "epoch": 0.8374928603637457, + "grad_norm": 0.3259445866010344, + "learning_rate": 1.3579800555141165e-05, + "loss": 1.0406, + "step": 4353 + }, + { + "epoch": 0.8376852547722832, + "grad_norm": 0.3081893561443836, + "learning_rate": 1.3548460927977159e-05, + "loss": 1.0318, + "step": 4354 + }, + { + "epoch": 0.8378776491808206, + "grad_norm": 0.39999903393994896, + "learning_rate": 1.351715487759474e-05, + "loss": 1.0302, + "step": 4355 + }, + { + "epoch": 0.8380700435893582, + "grad_norm": 0.3317252535529246, + "learning_rate": 1.3485882416152818e-05, + "loss": 1.0327, + "step": 4356 + }, + { + "epoch": 0.8382624379978957, + "grad_norm": 0.34708946151619857, + "learning_rate": 1.3454643555797274e-05, + "loss": 1.1173, + "step": 4357 + }, + { + "epoch": 0.8384548324064331, + "grad_norm": 0.40143747456039425, + "learning_rate": 1.342343830866093e-05, + "loss": 1.0181, + "step": 4358 + }, + { + "epoch": 0.8386472268149707, + "grad_norm": 0.28548195570072665, + "learning_rate": 1.3392266686863509e-05, + "loss": 1.0654, + "step": 4359 + }, + { + "epoch": 0.8388396212235082, + "grad_norm": 0.31076940655849183, + "learning_rate": 1.3361128702511716e-05, + "loss": 0.9509, + "step": 4360 + }, + { + "epoch": 0.8390320156320457, + "grad_norm": 0.2757276326788152, + "learning_rate": 1.3330024367699223e-05, + "loss": 1.1054, + "step": 4361 + }, + { + "epoch": 0.8392244100405832, + "grad_norm": 0.3736669343343043, + "learning_rate": 1.3298953694506522e-05, + "loss": 1.1259, + "step": 4362 + }, + { + "epoch": 0.8394168044491207, + "grad_norm": 0.3804990149363424, + "learning_rate": 1.326791669500117e-05, + "loss": 1.0513, + "step": 4363 + }, + { + "epoch": 0.8396091988576582, + "grad_norm": 0.3154312484184179, + "learning_rate": 1.3236913381237592e-05, + "loss": 0.9786, + "step": 4364 + }, + { + "epoch": 0.8398015932661957, + "grad_norm": 0.3857616024777847, + "learning_rate": 1.3205943765257055e-05, + "loss": 1.0056, + "step": 4365 + }, + { + "epoch": 0.8399939876747332, + "grad_norm": 0.3399896988079937, + "learning_rate": 1.317500785908783e-05, + "loss": 1.0466, + "step": 4366 + }, + { + "epoch": 0.8401863820832707, + "grad_norm": 0.2961339211810674, + "learning_rate": 1.31441056747451e-05, + "loss": 1.0089, + "step": 4367 + }, + { + "epoch": 0.8403787764918083, + "grad_norm": 0.27330043522945013, + "learning_rate": 1.3113237224230834e-05, + "loss": 0.9903, + "step": 4368 + }, + { + "epoch": 0.8405711709003457, + "grad_norm": 0.302922675005817, + "learning_rate": 1.3082402519534076e-05, + "loss": 0.9964, + "step": 4369 + }, + { + "epoch": 0.8407635653088832, + "grad_norm": 0.3113824707386979, + "learning_rate": 1.3051601572630612e-05, + "loss": 1.0192, + "step": 4370 + }, + { + "epoch": 0.8409559597174208, + "grad_norm": 0.39076202495571843, + "learning_rate": 1.3020834395483195e-05, + "loss": 1.0051, + "step": 4371 + }, + { + "epoch": 0.8411483541259582, + "grad_norm": 0.3388408607420535, + "learning_rate": 1.2990101000041444e-05, + "loss": 0.9489, + "step": 4372 + }, + { + "epoch": 0.8413407485344957, + "grad_norm": 0.3241563187854764, + "learning_rate": 1.2959401398241843e-05, + "loss": 1.0729, + "step": 4373 + }, + { + "epoch": 0.8415331429430332, + "grad_norm": 0.2942550788767924, + "learning_rate": 1.2928735602007769e-05, + "loss": 1.0623, + "step": 4374 + }, + { + "epoch": 0.8417255373515707, + "grad_norm": 0.2972173317913256, + "learning_rate": 1.2898103623249457e-05, + "loss": 1.0782, + "step": 4375 + }, + { + "epoch": 0.8419179317601082, + "grad_norm": 0.31317053641199444, + "learning_rate": 1.2867505473864028e-05, + "loss": 1.0302, + "step": 4376 + }, + { + "epoch": 0.8421103261686457, + "grad_norm": 0.3347588087456929, + "learning_rate": 1.2836941165735462e-05, + "loss": 1.0332, + "step": 4377 + }, + { + "epoch": 0.8423027205771833, + "grad_norm": 0.34002030210517803, + "learning_rate": 1.280641071073455e-05, + "loss": 1.0942, + "step": 4378 + }, + { + "epoch": 0.8424951149857207, + "grad_norm": 0.31416757185788263, + "learning_rate": 1.277591412071899e-05, + "loss": 1.181, + "step": 4379 + }, + { + "epoch": 0.8426875093942582, + "grad_norm": 0.2513049540304737, + "learning_rate": 1.2745451407533293e-05, + "loss": 0.9742, + "step": 4380 + }, + { + "epoch": 0.8428799038027958, + "grad_norm": 0.33396854133990356, + "learning_rate": 1.271502258300885e-05, + "loss": 1.0648, + "step": 4381 + }, + { + "epoch": 0.8430722982113332, + "grad_norm": 0.28811172966431414, + "learning_rate": 1.2684627658963867e-05, + "loss": 0.9838, + "step": 4382 + }, + { + "epoch": 0.8432646926198707, + "grad_norm": 0.3344094463958978, + "learning_rate": 1.2654266647203339e-05, + "loss": 1.0583, + "step": 4383 + }, + { + "epoch": 0.8434570870284083, + "grad_norm": 0.29059186098623796, + "learning_rate": 1.262393955951916e-05, + "loss": 0.9516, + "step": 4384 + }, + { + "epoch": 0.8436494814369457, + "grad_norm": 0.3154616925093502, + "learning_rate": 1.2593646407690051e-05, + "loss": 1.0943, + "step": 4385 + }, + { + "epoch": 0.8438418758454832, + "grad_norm": 0.4595138985021085, + "learning_rate": 1.2563387203481448e-05, + "loss": 1.078, + "step": 4386 + }, + { + "epoch": 0.8440342702540208, + "grad_norm": 0.34485173350393894, + "learning_rate": 1.2533161958645756e-05, + "loss": 0.9703, + "step": 4387 + }, + { + "epoch": 0.8442266646625582, + "grad_norm": 0.29043708174197574, + "learning_rate": 1.2502970684922066e-05, + "loss": 1.0914, + "step": 4388 + }, + { + "epoch": 0.8444190590710957, + "grad_norm": 0.33807419121247195, + "learning_rate": 1.2472813394036343e-05, + "loss": 1.1189, + "step": 4389 + }, + { + "epoch": 0.8446114534796333, + "grad_norm": 0.3627737516281703, + "learning_rate": 1.2442690097701327e-05, + "loss": 1.1022, + "step": 4390 + }, + { + "epoch": 0.8448038478881708, + "grad_norm": 0.300897351451403, + "learning_rate": 1.2412600807616525e-05, + "loss": 1.061, + "step": 4391 + }, + { + "epoch": 0.8449962422967082, + "grad_norm": 0.30350554626579285, + "learning_rate": 1.2382545535468315e-05, + "loss": 1.0349, + "step": 4392 + }, + { + "epoch": 0.8451886367052458, + "grad_norm": 0.3102167279099889, + "learning_rate": 1.2352524292929824e-05, + "loss": 1.0592, + "step": 4393 + }, + { + "epoch": 0.8453810311137833, + "grad_norm": 0.2902124229608211, + "learning_rate": 1.2322537091660912e-05, + "loss": 0.9995, + "step": 4394 + }, + { + "epoch": 0.8455734255223207, + "grad_norm": 0.35997306625094294, + "learning_rate": 1.22925839433083e-05, + "loss": 1.0911, + "step": 4395 + }, + { + "epoch": 0.8457658199308583, + "grad_norm": 0.2790983922193345, + "learning_rate": 1.2262664859505435e-05, + "loss": 1.0048, + "step": 4396 + }, + { + "epoch": 0.8459582143393958, + "grad_norm": 0.31723225473807776, + "learning_rate": 1.223277985187251e-05, + "loss": 1.029, + "step": 4397 + }, + { + "epoch": 0.8461506087479332, + "grad_norm": 0.34931807081170924, + "learning_rate": 1.2202928932016589e-05, + "loss": 1.0319, + "step": 4398 + }, + { + "epoch": 0.8463430031564708, + "grad_norm": 0.25205369282964313, + "learning_rate": 1.217311211153137e-05, + "loss": 1.0531, + "step": 4399 + }, + { + "epoch": 0.8465353975650083, + "grad_norm": 0.28878951052224605, + "learning_rate": 1.2143329401997372e-05, + "loss": 1.0261, + "step": 4400 + }, + { + "epoch": 0.8467277919735458, + "grad_norm": 0.30475683176157253, + "learning_rate": 1.2113580814981884e-05, + "loss": 1.0791, + "step": 4401 + }, + { + "epoch": 0.8469201863820832, + "grad_norm": 0.3590265373440576, + "learning_rate": 1.2083866362038865e-05, + "loss": 1.0454, + "step": 4402 + }, + { + "epoch": 0.8471125807906208, + "grad_norm": 0.3147493282361996, + "learning_rate": 1.2054186054709105e-05, + "loss": 1.0376, + "step": 4403 + }, + { + "epoch": 0.8473049751991583, + "grad_norm": 0.35656484326336557, + "learning_rate": 1.2024539904520072e-05, + "loss": 1.1018, + "step": 4404 + }, + { + "epoch": 0.8474973696076957, + "grad_norm": 0.31804220118379734, + "learning_rate": 1.1994927922985998e-05, + "loss": 0.9712, + "step": 4405 + }, + { + "epoch": 0.8476897640162333, + "grad_norm": 0.2815920389719055, + "learning_rate": 1.1965350121607865e-05, + "loss": 1.0473, + "step": 4406 + }, + { + "epoch": 0.8478821584247708, + "grad_norm": 0.3697235808703823, + "learning_rate": 1.1935806511873304e-05, + "loss": 1.0633, + "step": 4407 + }, + { + "epoch": 0.8480745528333082, + "grad_norm": 0.33882787882348747, + "learning_rate": 1.1906297105256725e-05, + "loss": 0.9101, + "step": 4408 + }, + { + "epoch": 0.8482669472418458, + "grad_norm": 0.38126204769137373, + "learning_rate": 1.187682191321925e-05, + "loss": 0.9732, + "step": 4409 + }, + { + "epoch": 0.8484593416503833, + "grad_norm": 0.32230133575299447, + "learning_rate": 1.1847380947208698e-05, + "loss": 1.1817, + "step": 4410 + }, + { + "epoch": 0.8486517360589207, + "grad_norm": 0.2687968482367284, + "learning_rate": 1.1817974218659622e-05, + "loss": 1.0705, + "step": 4411 + }, + { + "epoch": 0.8488441304674583, + "grad_norm": 0.29467017204237417, + "learning_rate": 1.178860173899321e-05, + "loss": 1.0357, + "step": 4412 + }, + { + "epoch": 0.8490365248759958, + "grad_norm": 0.3513843597260413, + "learning_rate": 1.1759263519617437e-05, + "loss": 0.9443, + "step": 4413 + }, + { + "epoch": 0.8492289192845333, + "grad_norm": 0.33476851316917355, + "learning_rate": 1.172995957192693e-05, + "loss": 1.0129, + "step": 4414 + }, + { + "epoch": 0.8494213136930708, + "grad_norm": 0.31263259081617994, + "learning_rate": 1.1700689907302953e-05, + "loss": 1.0095, + "step": 4415 + }, + { + "epoch": 0.8496137081016083, + "grad_norm": 0.3430615587125829, + "learning_rate": 1.167145453711358e-05, + "loss": 1.0359, + "step": 4416 + }, + { + "epoch": 0.8498061025101458, + "grad_norm": 0.3062578995978995, + "learning_rate": 1.1642253472713426e-05, + "loss": 1.003, + "step": 4417 + }, + { + "epoch": 0.8499984969186833, + "grad_norm": 0.33071943347271965, + "learning_rate": 1.161308672544389e-05, + "loss": 0.9642, + "step": 4418 + }, + { + "epoch": 0.8501908913272208, + "grad_norm": 0.3224894187239732, + "learning_rate": 1.1583954306633005e-05, + "loss": 1.0617, + "step": 4419 + }, + { + "epoch": 0.8503832857357583, + "grad_norm": 0.331184047299763, + "learning_rate": 1.1554856227595433e-05, + "loss": 1.197, + "step": 4420 + }, + { + "epoch": 0.8505756801442959, + "grad_norm": 0.48725607724834413, + "learning_rate": 1.1525792499632527e-05, + "loss": 1.0276, + "step": 4421 + }, + { + "epoch": 0.8507680745528333, + "grad_norm": 0.396870438466712, + "learning_rate": 1.1496763134032363e-05, + "loss": 0.9782, + "step": 4422 + }, + { + "epoch": 0.8509604689613708, + "grad_norm": 0.30087970612157156, + "learning_rate": 1.1467768142069546e-05, + "loss": 0.9694, + "step": 4423 + }, + { + "epoch": 0.8511528633699084, + "grad_norm": 0.3325945603099869, + "learning_rate": 1.1438807535005436e-05, + "loss": 0.9962, + "step": 4424 + }, + { + "epoch": 0.8513452577784458, + "grad_norm": 0.33261011360842196, + "learning_rate": 1.1409881324088011e-05, + "loss": 1.0458, + "step": 4425 + }, + { + "epoch": 0.8515376521869833, + "grad_norm": 0.3049445624066469, + "learning_rate": 1.138098952055181e-05, + "loss": 1.0112, + "step": 4426 + }, + { + "epoch": 0.8517300465955208, + "grad_norm": 0.38766045486114176, + "learning_rate": 1.1352132135618166e-05, + "loss": 1.0492, + "step": 4427 + }, + { + "epoch": 0.8519224410040583, + "grad_norm": 0.31099305666207355, + "learning_rate": 1.1323309180494901e-05, + "loss": 0.9956, + "step": 4428 + }, + { + "epoch": 0.8521148354125958, + "grad_norm": 0.42968829303413725, + "learning_rate": 1.1294520666376518e-05, + "loss": 1.1178, + "step": 4429 + }, + { + "epoch": 0.8523072298211333, + "grad_norm": 0.36525606478135414, + "learning_rate": 1.1265766604444172e-05, + "loss": 0.9432, + "step": 4430 + }, + { + "epoch": 0.8524996242296708, + "grad_norm": 0.33189102350562616, + "learning_rate": 1.1237047005865576e-05, + "loss": 1.1322, + "step": 4431 + }, + { + "epoch": 0.8526920186382083, + "grad_norm": 0.3414862663003093, + "learning_rate": 1.1208361881795116e-05, + "loss": 0.9851, + "step": 4432 + }, + { + "epoch": 0.8528844130467458, + "grad_norm": 0.28830398683799496, + "learning_rate": 1.1179711243373736e-05, + "loss": 0.9595, + "step": 4433 + }, + { + "epoch": 0.8530768074552834, + "grad_norm": 0.3474831865506262, + "learning_rate": 1.1151095101729047e-05, + "loss": 1.0068, + "step": 4434 + }, + { + "epoch": 0.8532692018638208, + "grad_norm": 0.3107078594784587, + "learning_rate": 1.1122513467975237e-05, + "loss": 0.9611, + "step": 4435 + }, + { + "epoch": 0.8534615962723583, + "grad_norm": 0.33472913486005623, + "learning_rate": 1.1093966353213037e-05, + "loss": 1.0413, + "step": 4436 + }, + { + "epoch": 0.8536539906808959, + "grad_norm": 0.3484026202151843, + "learning_rate": 1.1065453768529843e-05, + "loss": 1.0699, + "step": 4437 + }, + { + "epoch": 0.8538463850894333, + "grad_norm": 0.32119362521413797, + "learning_rate": 1.1036975724999609e-05, + "loss": 0.9327, + "step": 4438 + }, + { + "epoch": 0.8540387794979708, + "grad_norm": 0.28734280143011365, + "learning_rate": 1.1008532233682877e-05, + "loss": 1.101, + "step": 4439 + }, + { + "epoch": 0.8542311739065084, + "grad_norm": 0.3014044660527849, + "learning_rate": 1.098012330562681e-05, + "loss": 1.0343, + "step": 4440 + }, + { + "epoch": 0.8544235683150458, + "grad_norm": 0.29342925649486995, + "learning_rate": 1.0951748951865048e-05, + "loss": 1.0573, + "step": 4441 + }, + { + "epoch": 0.8546159627235833, + "grad_norm": 0.32888223382561116, + "learning_rate": 1.0923409183417887e-05, + "loss": 1.1137, + "step": 4442 + }, + { + "epoch": 0.8548083571321209, + "grad_norm": 0.3158956758836071, + "learning_rate": 1.08951040112922e-05, + "loss": 1.0781, + "step": 4443 + }, + { + "epoch": 0.8550007515406584, + "grad_norm": 0.36033958326935306, + "learning_rate": 1.0866833446481317e-05, + "loss": 1.0316, + "step": 4444 + }, + { + "epoch": 0.8551931459491958, + "grad_norm": 0.3338713449986007, + "learning_rate": 1.0838597499965275e-05, + "loss": 1.0277, + "step": 4445 + }, + { + "epoch": 0.8553855403577334, + "grad_norm": 0.33614920307354396, + "learning_rate": 1.0810396182710536e-05, + "loss": 0.9098, + "step": 4446 + }, + { + "epoch": 0.8555779347662709, + "grad_norm": 0.33525744216441417, + "learning_rate": 1.0782229505670193e-05, + "loss": 1.0694, + "step": 4447 + }, + { + "epoch": 0.8557703291748083, + "grad_norm": 0.3394241324598925, + "learning_rate": 1.0754097479783876e-05, + "loss": 1.0566, + "step": 4448 + }, + { + "epoch": 0.8559627235833459, + "grad_norm": 0.3018279365084254, + "learning_rate": 1.0726000115977696e-05, + "loss": 0.9997, + "step": 4449 + }, + { + "epoch": 0.8561551179918834, + "grad_norm": 0.40704980939486435, + "learning_rate": 1.069793742516435e-05, + "loss": 1.123, + "step": 4450 + }, + { + "epoch": 0.8563475124004208, + "grad_norm": 0.2919924860493612, + "learning_rate": 1.0669909418243118e-05, + "loss": 0.9917, + "step": 4451 + }, + { + "epoch": 0.8565399068089584, + "grad_norm": 0.2790355169328536, + "learning_rate": 1.0641916106099692e-05, + "loss": 1.0248, + "step": 4452 + }, + { + "epoch": 0.8567323012174959, + "grad_norm": 0.37835937772774725, + "learning_rate": 1.0613957499606386e-05, + "loss": 1.0291, + "step": 4453 + }, + { + "epoch": 0.8569246956260334, + "grad_norm": 0.42342398844313034, + "learning_rate": 1.0586033609622004e-05, + "loss": 1.0717, + "step": 4454 + }, + { + "epoch": 0.8571170900345708, + "grad_norm": 0.317702221107483, + "learning_rate": 1.0558144446991836e-05, + "loss": 0.974, + "step": 4455 + }, + { + "epoch": 0.8573094844431084, + "grad_norm": 0.373112834970984, + "learning_rate": 1.0530290022547728e-05, + "loss": 0.9997, + "step": 4456 + }, + { + "epoch": 0.8575018788516459, + "grad_norm": 0.3300060489963877, + "learning_rate": 1.0502470347108018e-05, + "loss": 1.0404, + "step": 4457 + }, + { + "epoch": 0.8576942732601833, + "grad_norm": 0.33122842325647844, + "learning_rate": 1.0474685431477539e-05, + "loss": 1.0045, + "step": 4458 + }, + { + "epoch": 0.8578866676687209, + "grad_norm": 0.3178185048306649, + "learning_rate": 1.0446935286447656e-05, + "loss": 0.992, + "step": 4459 + }, + { + "epoch": 0.8580790620772584, + "grad_norm": 0.31261366375069094, + "learning_rate": 1.0419219922796176e-05, + "loss": 1.0118, + "step": 4460 + }, + { + "epoch": 0.8582714564857958, + "grad_norm": 0.30546684428292226, + "learning_rate": 1.0391539351287439e-05, + "loss": 1.084, + "step": 4461 + }, + { + "epoch": 0.8584638508943334, + "grad_norm": 0.2832597379638999, + "learning_rate": 1.0363893582672247e-05, + "loss": 1.0793, + "step": 4462 + }, + { + "epoch": 0.8586562453028709, + "grad_norm": 0.3228207422481123, + "learning_rate": 1.0336282627687921e-05, + "loss": 1.0698, + "step": 4463 + }, + { + "epoch": 0.8588486397114083, + "grad_norm": 0.3454503461978924, + "learning_rate": 1.030870649705825e-05, + "loss": 1.0485, + "step": 4464 + }, + { + "epoch": 0.8590410341199459, + "grad_norm": 0.30329830668170643, + "learning_rate": 1.0281165201493438e-05, + "loss": 0.962, + "step": 4465 + }, + { + "epoch": 0.8592334285284834, + "grad_norm": 0.3168778069988205, + "learning_rate": 1.025365875169023e-05, + "loss": 1.0196, + "step": 4466 + }, + { + "epoch": 0.8594258229370209, + "grad_norm": 0.3360349260654396, + "learning_rate": 1.0226187158331823e-05, + "loss": 1.0133, + "step": 4467 + }, + { + "epoch": 0.8596182173455584, + "grad_norm": 0.2829251034812311, + "learning_rate": 1.0198750432087855e-05, + "loss": 1.098, + "step": 4468 + }, + { + "epoch": 0.8598106117540959, + "grad_norm": 0.33258295644506225, + "learning_rate": 1.0171348583614459e-05, + "loss": 0.9965, + "step": 4469 + }, + { + "epoch": 0.8600030061626334, + "grad_norm": 0.3873773548625855, + "learning_rate": 1.0143981623554156e-05, + "loss": 1.0556, + "step": 4470 + }, + { + "epoch": 0.8601954005711709, + "grad_norm": 0.3466738624280973, + "learning_rate": 1.0116649562535985e-05, + "loss": 0.998, + "step": 4471 + }, + { + "epoch": 0.8603877949797084, + "grad_norm": 0.3646238098517531, + "learning_rate": 1.0089352411175423e-05, + "loss": 1.0928, + "step": 4472 + }, + { + "epoch": 0.8605801893882459, + "grad_norm": 0.27685121866733553, + "learning_rate": 1.0062090180074291e-05, + "loss": 1.037, + "step": 4473 + }, + { + "epoch": 0.8607725837967835, + "grad_norm": 0.34181690377305196, + "learning_rate": 1.0034862879821027e-05, + "loss": 1.0747, + "step": 4474 + }, + { + "epoch": 0.8609649782053209, + "grad_norm": 0.3083060809958894, + "learning_rate": 1.000767052099033e-05, + "loss": 1.088, + "step": 4475 + }, + { + "epoch": 0.8611573726138584, + "grad_norm": 0.3537574821480526, + "learning_rate": 9.98051311414342e-06, + "loss": 1.0503, + "step": 4476 + }, + { + "epoch": 0.861349767022396, + "grad_norm": 0.34305103563259565, + "learning_rate": 9.953390669827945e-06, + "loss": 0.9898, + "step": 4477 + }, + { + "epoch": 0.8615421614309334, + "grad_norm": 0.3032818888889282, + "learning_rate": 9.926303198577913e-06, + "loss": 1.0187, + "step": 4478 + }, + { + "epoch": 0.8617345558394709, + "grad_norm": 0.29898397552424566, + "learning_rate": 9.899250710913765e-06, + "loss": 1.0202, + "step": 4479 + }, + { + "epoch": 0.8619269502480085, + "grad_norm": 0.3177122180422633, + "learning_rate": 9.872233217342463e-06, + "loss": 1.0814, + "step": 4480 + }, + { + "epoch": 0.8621193446565459, + "grad_norm": 0.2985889291179135, + "learning_rate": 9.845250728357215e-06, + "loss": 1.0034, + "step": 4481 + }, + { + "epoch": 0.8623117390650834, + "grad_norm": 0.36401806520978847, + "learning_rate": 9.818303254437722e-06, + "loss": 0.9344, + "step": 4482 + }, + { + "epoch": 0.8625041334736209, + "grad_norm": 0.2835813223927997, + "learning_rate": 9.791390806050115e-06, + "loss": 1.0655, + "step": 4483 + }, + { + "epoch": 0.8626965278821584, + "grad_norm": 0.30858071706827567, + "learning_rate": 9.764513393646812e-06, + "loss": 1.0191, + "step": 4484 + }, + { + "epoch": 0.8628889222906959, + "grad_norm": 0.3089258645673938, + "learning_rate": 9.737671027666728e-06, + "loss": 0.9848, + "step": 4485 + }, + { + "epoch": 0.8630813166992334, + "grad_norm": 0.36369330480420137, + "learning_rate": 9.710863718535135e-06, + "loss": 0.9762, + "step": 4486 + }, + { + "epoch": 0.863273711107771, + "grad_norm": 0.3027704579909306, + "learning_rate": 9.684091476663659e-06, + "loss": 0.9363, + "step": 4487 + }, + { + "epoch": 0.8634661055163084, + "grad_norm": 0.33084617800809374, + "learning_rate": 9.657354312450362e-06, + "loss": 1.007, + "step": 4488 + }, + { + "epoch": 0.8636584999248459, + "grad_norm": 0.3315757145446283, + "learning_rate": 9.630652236279625e-06, + "loss": 1.071, + "step": 4489 + }, + { + "epoch": 0.8638508943333835, + "grad_norm": 0.3172335488087789, + "learning_rate": 9.603985258522218e-06, + "loss": 1.0596, + "step": 4490 + }, + { + "epoch": 0.8640432887419209, + "grad_norm": 0.3501319764249724, + "learning_rate": 9.577353389535315e-06, + "loss": 1.0877, + "step": 4491 + }, + { + "epoch": 0.8642356831504584, + "grad_norm": 0.37297691990062776, + "learning_rate": 9.550756639662417e-06, + "loss": 1.0372, + "step": 4492 + }, + { + "epoch": 0.864428077558996, + "grad_norm": 0.38715522066816577, + "learning_rate": 9.524195019233407e-06, + "loss": 1.1032, + "step": 4493 + }, + { + "epoch": 0.8646204719675334, + "grad_norm": 0.349409455189122, + "learning_rate": 9.497668538564474e-06, + "loss": 1.0524, + "step": 4494 + }, + { + "epoch": 0.8648128663760709, + "grad_norm": 0.3200069302607972, + "learning_rate": 9.471177207958237e-06, + "loss": 0.9815, + "step": 4495 + }, + { + "epoch": 0.8650052607846085, + "grad_norm": 0.2894757497068607, + "learning_rate": 9.444721037703597e-06, + "loss": 1.0473, + "step": 4496 + }, + { + "epoch": 0.865197655193146, + "grad_norm": 0.25782538962289675, + "learning_rate": 9.418300038075845e-06, + "loss": 1.0448, + "step": 4497 + }, + { + "epoch": 0.8653900496016834, + "grad_norm": 0.25219221351477167, + "learning_rate": 9.391914219336606e-06, + "loss": 1.0396, + "step": 4498 + }, + { + "epoch": 0.865582444010221, + "grad_norm": 0.3577759140001364, + "learning_rate": 9.365563591733784e-06, + "loss": 1.0004, + "step": 4499 + }, + { + "epoch": 0.8657748384187585, + "grad_norm": 0.3580821477834573, + "learning_rate": 9.33924816550168e-06, + "loss": 1.0903, + "step": 4500 + }, + { + "epoch": 0.8659672328272959, + "grad_norm": 0.42143140171027066, + "learning_rate": 9.31296795086093e-06, + "loss": 1.0454, + "step": 4501 + }, + { + "epoch": 0.8661596272358335, + "grad_norm": 0.33710189601644214, + "learning_rate": 9.28672295801839e-06, + "loss": 0.9482, + "step": 4502 + }, + { + "epoch": 0.866352021644371, + "grad_norm": 0.26576983296396556, + "learning_rate": 9.260513197167397e-06, + "loss": 1.0863, + "step": 4503 + }, + { + "epoch": 0.8665444160529084, + "grad_norm": 0.3011347453154511, + "learning_rate": 9.234338678487509e-06, + "loss": 1.0969, + "step": 4504 + }, + { + "epoch": 0.866736810461446, + "grad_norm": 0.29311712535303436, + "learning_rate": 9.208199412144558e-06, + "loss": 1.0271, + "step": 4505 + }, + { + "epoch": 0.8669292048699835, + "grad_norm": 0.3459572268262598, + "learning_rate": 9.182095408290781e-06, + "loss": 1.1073, + "step": 4506 + }, + { + "epoch": 0.867121599278521, + "grad_norm": 0.2678475190803086, + "learning_rate": 9.156026677064632e-06, + "loss": 0.9845, + "step": 4507 + }, + { + "epoch": 0.8673139936870584, + "grad_norm": 0.35735510750568455, + "learning_rate": 9.129993228590916e-06, + "loss": 1.1411, + "step": 4508 + }, + { + "epoch": 0.867506388095596, + "grad_norm": 0.4133557860551575, + "learning_rate": 9.103995072980765e-06, + "loss": 1.0795, + "step": 4509 + }, + { + "epoch": 0.8676987825041335, + "grad_norm": 0.2867012334098863, + "learning_rate": 9.078032220331522e-06, + "loss": 1.0124, + "step": 4510 + }, + { + "epoch": 0.8678911769126709, + "grad_norm": 0.2612459563249222, + "learning_rate": 9.052104680726858e-06, + "loss": 1.0226, + "step": 4511 + }, + { + "epoch": 0.8680835713212085, + "grad_norm": 0.2875637050035914, + "learning_rate": 9.026212464236772e-06, + "loss": 0.9402, + "step": 4512 + }, + { + "epoch": 0.868275965729746, + "grad_norm": 0.3676081864459413, + "learning_rate": 9.000355580917463e-06, + "loss": 0.9936, + "step": 4513 + }, + { + "epoch": 0.8684683601382834, + "grad_norm": 0.33022578201744995, + "learning_rate": 8.974534040811445e-06, + "loss": 0.9945, + "step": 4514 + }, + { + "epoch": 0.868660754546821, + "grad_norm": 0.2643701704320734, + "learning_rate": 8.948747853947526e-06, + "loss": 0.9493, + "step": 4515 + }, + { + "epoch": 0.8688531489553585, + "grad_norm": 0.3177045285295756, + "learning_rate": 8.922997030340752e-06, + "loss": 1.0616, + "step": 4516 + }, + { + "epoch": 0.869045543363896, + "grad_norm": 0.2909658218247964, + "learning_rate": 8.897281579992467e-06, + "loss": 1.0885, + "step": 4517 + }, + { + "epoch": 0.8692379377724335, + "grad_norm": 0.3174905933862179, + "learning_rate": 8.871601512890238e-06, + "loss": 1.0604, + "step": 4518 + }, + { + "epoch": 0.869430332180971, + "grad_norm": 0.3716458273941534, + "learning_rate": 8.845956839007896e-06, + "loss": 0.9767, + "step": 4519 + }, + { + "epoch": 0.8696227265895085, + "grad_norm": 0.3361585594569247, + "learning_rate": 8.820347568305542e-06, + "loss": 1.043, + "step": 4520 + }, + { + "epoch": 0.869815120998046, + "grad_norm": 0.3424274173679851, + "learning_rate": 8.794773710729542e-06, + "loss": 0.9721, + "step": 4521 + }, + { + "epoch": 0.8700075154065835, + "grad_norm": 0.2796008606602198, + "learning_rate": 8.769235276212495e-06, + "loss": 0.9811, + "step": 4522 + }, + { + "epoch": 0.870199909815121, + "grad_norm": 0.36144247503136384, + "learning_rate": 8.74373227467319e-06, + "loss": 1.0633, + "step": 4523 + }, + { + "epoch": 0.8703923042236585, + "grad_norm": 0.3732452968777644, + "learning_rate": 8.718264716016722e-06, + "loss": 1.0302, + "step": 4524 + }, + { + "epoch": 0.870584698632196, + "grad_norm": 0.2953599648420802, + "learning_rate": 8.692832610134427e-06, + "loss": 1.105, + "step": 4525 + }, + { + "epoch": 0.8707770930407335, + "grad_norm": 0.26832474254996325, + "learning_rate": 8.66743596690377e-06, + "loss": 1.0644, + "step": 4526 + }, + { + "epoch": 0.870969487449271, + "grad_norm": 0.3027354183839997, + "learning_rate": 8.642074796188592e-06, + "loss": 1.0099, + "step": 4527 + }, + { + "epoch": 0.8711618818578085, + "grad_norm": 0.27651364483762547, + "learning_rate": 8.61674910783884e-06, + "loss": 1.0008, + "step": 4528 + }, + { + "epoch": 0.871354276266346, + "grad_norm": 0.3557336831020569, + "learning_rate": 8.59145891169072e-06, + "loss": 0.9959, + "step": 4529 + }, + { + "epoch": 0.8715466706748836, + "grad_norm": 0.34495167014227857, + "learning_rate": 8.566204217566664e-06, + "loss": 1.0266, + "step": 4530 + }, + { + "epoch": 0.871739065083421, + "grad_norm": 0.29654934009419176, + "learning_rate": 8.540985035275272e-06, + "loss": 1.085, + "step": 4531 + }, + { + "epoch": 0.8719314594919585, + "grad_norm": 0.3617659512440195, + "learning_rate": 8.515801374611432e-06, + "loss": 1.1141, + "step": 4532 + }, + { + "epoch": 0.8721238539004961, + "grad_norm": 0.30700110714809614, + "learning_rate": 8.490653245356185e-06, + "loss": 0.9967, + "step": 4533 + }, + { + "epoch": 0.8723162483090335, + "grad_norm": 0.29819207582559937, + "learning_rate": 8.465540657276727e-06, + "loss": 1.0536, + "step": 4534 + }, + { + "epoch": 0.872508642717571, + "grad_norm": 0.3569705737680106, + "learning_rate": 8.44046362012656e-06, + "loss": 0.8938, + "step": 4535 + }, + { + "epoch": 0.8727010371261085, + "grad_norm": 0.3579119333163085, + "learning_rate": 8.415422143645245e-06, + "loss": 1.0691, + "step": 4536 + }, + { + "epoch": 0.872893431534646, + "grad_norm": 0.30332627861015554, + "learning_rate": 8.39041623755864e-06, + "loss": 0.9906, + "step": 4537 + }, + { + "epoch": 0.8730858259431835, + "grad_norm": 0.3289154092150236, + "learning_rate": 8.365445911578785e-06, + "loss": 0.9788, + "step": 4538 + }, + { + "epoch": 0.873278220351721, + "grad_norm": 0.31544088970501094, + "learning_rate": 8.340511175403809e-06, + "loss": 1.0725, + "step": 4539 + }, + { + "epoch": 0.8734706147602586, + "grad_norm": 0.3000155065610411, + "learning_rate": 8.315612038718101e-06, + "loss": 0.9786, + "step": 4540 + }, + { + "epoch": 0.873663009168796, + "grad_norm": 0.2997212359600715, + "learning_rate": 8.290748511192215e-06, + "loss": 1.0573, + "step": 4541 + }, + { + "epoch": 0.8738554035773335, + "grad_norm": 0.38680784507249677, + "learning_rate": 8.265920602482824e-06, + "loss": 1.06, + "step": 4542 + }, + { + "epoch": 0.8740477979858711, + "grad_norm": 0.3134152454131542, + "learning_rate": 8.241128322232817e-06, + "loss": 1.0345, + "step": 4543 + }, + { + "epoch": 0.8742401923944085, + "grad_norm": 0.30801958987495803, + "learning_rate": 8.216371680071244e-06, + "loss": 1.0268, + "step": 4544 + }, + { + "epoch": 0.874432586802946, + "grad_norm": 0.4017090241313671, + "learning_rate": 8.191650685613272e-06, + "loss": 1.1076, + "step": 4545 + }, + { + "epoch": 0.8746249812114836, + "grad_norm": 0.28283552919225213, + "learning_rate": 8.166965348460299e-06, + "loss": 0.992, + "step": 4546 + }, + { + "epoch": 0.874817375620021, + "grad_norm": 0.30768950704447573, + "learning_rate": 8.142315678199763e-06, + "loss": 1.1308, + "step": 4547 + }, + { + "epoch": 0.8750097700285585, + "grad_norm": 0.29105323878644324, + "learning_rate": 8.117701684405343e-06, + "loss": 1.1231, + "step": 4548 + }, + { + "epoch": 0.8752021644370961, + "grad_norm": 0.3444143380118533, + "learning_rate": 8.093123376636836e-06, + "loss": 1.0038, + "step": 4549 + }, + { + "epoch": 0.8753945588456336, + "grad_norm": 0.37826490436062077, + "learning_rate": 8.06858076444017e-06, + "loss": 1.0436, + "step": 4550 + }, + { + "epoch": 0.875586953254171, + "grad_norm": 0.31686602377310846, + "learning_rate": 8.044073857347423e-06, + "loss": 1.0862, + "step": 4551 + }, + { + "epoch": 0.8757793476627086, + "grad_norm": 0.3167485328632148, + "learning_rate": 8.019602664876758e-06, + "loss": 0.9895, + "step": 4552 + }, + { + "epoch": 0.8759717420712461, + "grad_norm": 0.3726995070754788, + "learning_rate": 7.995167196532527e-06, + "loss": 0.9494, + "step": 4553 + }, + { + "epoch": 0.8761641364797835, + "grad_norm": 0.2971354953411799, + "learning_rate": 7.970767461805217e-06, + "loss": 0.9925, + "step": 4554 + }, + { + "epoch": 0.8763565308883211, + "grad_norm": 0.2672533224321114, + "learning_rate": 7.946403470171327e-06, + "loss": 1.0142, + "step": 4555 + }, + { + "epoch": 0.8765489252968586, + "grad_norm": 0.30759314843219776, + "learning_rate": 7.922075231093629e-06, + "loss": 1.1331, + "step": 4556 + }, + { + "epoch": 0.876741319705396, + "grad_norm": 0.26725015363847765, + "learning_rate": 7.897782754020888e-06, + "loss": 1.0258, + "step": 4557 + }, + { + "epoch": 0.8769337141139336, + "grad_norm": 0.3614669287403115, + "learning_rate": 7.873526048388026e-06, + "loss": 1.0257, + "step": 4558 + }, + { + "epoch": 0.8771261085224711, + "grad_norm": 0.2792742565674579, + "learning_rate": 7.849305123616092e-06, + "loss": 0.9808, + "step": 4559 + }, + { + "epoch": 0.8773185029310085, + "grad_norm": 0.34917237286106484, + "learning_rate": 7.825119989112173e-06, + "loss": 1.1074, + "step": 4560 + }, + { + "epoch": 0.877510897339546, + "grad_norm": 0.36783188142796946, + "learning_rate": 7.800970654269512e-06, + "loss": 1.0171, + "step": 4561 + }, + { + "epoch": 0.8777032917480836, + "grad_norm": 0.3004651117437256, + "learning_rate": 7.776857128467463e-06, + "loss": 1.0281, + "step": 4562 + }, + { + "epoch": 0.8778956861566211, + "grad_norm": 0.3177625927176387, + "learning_rate": 7.75277942107141e-06, + "loss": 1.0023, + "step": 4563 + }, + { + "epoch": 0.8780880805651585, + "grad_norm": 0.32300275833253295, + "learning_rate": 7.728737541432863e-06, + "loss": 1.0401, + "step": 4564 + }, + { + "epoch": 0.8782804749736961, + "grad_norm": 0.3425531359765448, + "learning_rate": 7.704731498889428e-06, + "loss": 1.0772, + "step": 4565 + }, + { + "epoch": 0.8784728693822336, + "grad_norm": 0.29130708313697373, + "learning_rate": 7.680761302764727e-06, + "loss": 0.9022, + "step": 4566 + }, + { + "epoch": 0.878665263790771, + "grad_norm": 0.2933954423000946, + "learning_rate": 7.656826962368579e-06, + "loss": 1.0647, + "step": 4567 + }, + { + "epoch": 0.8788576581993086, + "grad_norm": 0.3998976598753328, + "learning_rate": 7.632928486996749e-06, + "loss": 1.0291, + "step": 4568 + }, + { + "epoch": 0.8790500526078461, + "grad_norm": 0.29213728572412817, + "learning_rate": 7.609065885931155e-06, + "loss": 1.0231, + "step": 4569 + }, + { + "epoch": 0.8792424470163835, + "grad_norm": 0.298579230197274, + "learning_rate": 7.585239168439761e-06, + "loss": 1.0777, + "step": 4570 + }, + { + "epoch": 0.8794348414249211, + "grad_norm": 0.3758451651902908, + "learning_rate": 7.561448343776567e-06, + "loss": 1.1126, + "step": 4571 + }, + { + "epoch": 0.8796272358334586, + "grad_norm": 0.3424006288082153, + "learning_rate": 7.537693421181658e-06, + "loss": 1.0157, + "step": 4572 + }, + { + "epoch": 0.879819630241996, + "grad_norm": 0.33944984462898, + "learning_rate": 7.513974409881186e-06, + "loss": 1.0515, + "step": 4573 + }, + { + "epoch": 0.8800120246505336, + "grad_norm": 0.2985959864275465, + "learning_rate": 7.490291319087339e-06, + "loss": 1.0288, + "step": 4574 + }, + { + "epoch": 0.8802044190590711, + "grad_norm": 0.41024714339703505, + "learning_rate": 7.46664415799837e-06, + "loss": 1.0506, + "step": 4575 + }, + { + "epoch": 0.8803968134676086, + "grad_norm": 0.36109715019961514, + "learning_rate": 7.443032935798533e-06, + "loss": 1.016, + "step": 4576 + }, + { + "epoch": 0.8805892078761461, + "grad_norm": 0.374191845666455, + "learning_rate": 7.419457661658169e-06, + "loss": 0.9572, + "step": 4577 + }, + { + "epoch": 0.8807816022846836, + "grad_norm": 0.32999386144732745, + "learning_rate": 7.3959183447336434e-06, + "loss": 1.0133, + "step": 4578 + }, + { + "epoch": 0.8809739966932211, + "grad_norm": 0.32854606115096013, + "learning_rate": 7.372414994167354e-06, + "loss": 1.0821, + "step": 4579 + }, + { + "epoch": 0.8811663911017587, + "grad_norm": 0.2847110915474937, + "learning_rate": 7.3489476190877536e-06, + "loss": 1.0574, + "step": 4580 + }, + { + "epoch": 0.8813587855102961, + "grad_norm": 0.27303589687485225, + "learning_rate": 7.325516228609264e-06, + "loss": 1.0641, + "step": 4581 + }, + { + "epoch": 0.8815511799188336, + "grad_norm": 0.33401077357174613, + "learning_rate": 7.302120831832382e-06, + "loss": 1.0438, + "step": 4582 + }, + { + "epoch": 0.8817435743273712, + "grad_norm": 0.3630441409035198, + "learning_rate": 7.278761437843629e-06, + "loss": 1.0187, + "step": 4583 + }, + { + "epoch": 0.8819359687359086, + "grad_norm": 0.30384362015195976, + "learning_rate": 7.255438055715469e-06, + "loss": 1.0756, + "step": 4584 + }, + { + "epoch": 0.8821283631444461, + "grad_norm": 0.31486811664333225, + "learning_rate": 7.232150694506512e-06, + "loss": 0.9881, + "step": 4585 + }, + { + "epoch": 0.8823207575529837, + "grad_norm": 0.36614796103834135, + "learning_rate": 7.208899363261234e-06, + "loss": 0.9636, + "step": 4586 + }, + { + "epoch": 0.8825131519615211, + "grad_norm": 0.29384081111177496, + "learning_rate": 7.185684071010224e-06, + "loss": 1.0296, + "step": 4587 + }, + { + "epoch": 0.8827055463700586, + "grad_norm": 0.3427269356235219, + "learning_rate": 7.162504826770033e-06, + "loss": 1.0921, + "step": 4588 + }, + { + "epoch": 0.8828979407785961, + "grad_norm": 0.29503300125278814, + "learning_rate": 7.139361639543185e-06, + "loss": 1.0112, + "step": 4589 + }, + { + "epoch": 0.8830903351871336, + "grad_norm": 0.3988848634130886, + "learning_rate": 7.116254518318221e-06, + "loss": 0.8527, + "step": 4590 + }, + { + "epoch": 0.8832827295956711, + "grad_norm": 0.3362618641133164, + "learning_rate": 7.0931834720697535e-06, + "loss": 0.9526, + "step": 4591 + }, + { + "epoch": 0.8834751240042086, + "grad_norm": 0.3171940337183918, + "learning_rate": 7.070148509758223e-06, + "loss": 1.0747, + "step": 4592 + }, + { + "epoch": 0.8836675184127462, + "grad_norm": 0.31391739649335915, + "learning_rate": 7.0471496403301975e-06, + "loss": 1.0683, + "step": 4593 + }, + { + "epoch": 0.8838599128212836, + "grad_norm": 0.32567208559468985, + "learning_rate": 7.024186872718164e-06, + "loss": 1.1047, + "step": 4594 + }, + { + "epoch": 0.8840523072298211, + "grad_norm": 0.288861639589572, + "learning_rate": 7.001260215840566e-06, + "loss": 1.098, + "step": 4595 + }, + { + "epoch": 0.8842447016383587, + "grad_norm": 0.3427373607584587, + "learning_rate": 6.978369678601893e-06, + "loss": 1.0368, + "step": 4596 + }, + { + "epoch": 0.8844370960468961, + "grad_norm": 0.34831122126737557, + "learning_rate": 6.955515269892532e-06, + "loss": 1.0635, + "step": 4597 + }, + { + "epoch": 0.8846294904554336, + "grad_norm": 0.30863953906433567, + "learning_rate": 6.932696998588895e-06, + "loss": 1.0007, + "step": 4598 + }, + { + "epoch": 0.8848218848639712, + "grad_norm": 0.382562898683625, + "learning_rate": 6.909914873553347e-06, + "loss": 1.0074, + "step": 4599 + }, + { + "epoch": 0.8850142792725086, + "grad_norm": 0.3511536472059468, + "learning_rate": 6.887168903634178e-06, + "loss": 1.1182, + "step": 4600 + }, + { + "epoch": 0.8852066736810461, + "grad_norm": 0.3796847833354549, + "learning_rate": 6.864459097665654e-06, + "loss": 1.059, + "step": 4601 + }, + { + "epoch": 0.8853990680895837, + "grad_norm": 0.3153127901217798, + "learning_rate": 6.8417854644680205e-06, + "loss": 1.0374, + "step": 4602 + }, + { + "epoch": 0.8855914624981212, + "grad_norm": 0.2685441588513206, + "learning_rate": 6.819148012847454e-06, + "loss": 1.0042, + "step": 4603 + }, + { + "epoch": 0.8857838569066586, + "grad_norm": 0.3201018442721621, + "learning_rate": 6.796546751596089e-06, + "loss": 1.1054, + "step": 4604 + }, + { + "epoch": 0.8859762513151962, + "grad_norm": 0.3488306027320683, + "learning_rate": 6.773981689491959e-06, + "loss": 1.0534, + "step": 4605 + }, + { + "epoch": 0.8861686457237337, + "grad_norm": 0.3036267090649368, + "learning_rate": 6.75145283529911e-06, + "loss": 0.9497, + "step": 4606 + }, + { + "epoch": 0.8863610401322711, + "grad_norm": 0.44302319637642057, + "learning_rate": 6.728960197767475e-06, + "loss": 1.0181, + "step": 4607 + }, + { + "epoch": 0.8865534345408087, + "grad_norm": 0.4058703377346576, + "learning_rate": 6.706503785632934e-06, + "loss": 1.0813, + "step": 4608 + }, + { + "epoch": 0.8867458289493462, + "grad_norm": 0.33406822452214446, + "learning_rate": 6.68408360761732e-06, + "loss": 1.0352, + "step": 4609 + }, + { + "epoch": 0.8869382233578836, + "grad_norm": 0.27904225537842964, + "learning_rate": 6.661699672428334e-06, + "loss": 1.0031, + "step": 4610 + }, + { + "epoch": 0.8871306177664212, + "grad_norm": 0.41040572374800766, + "learning_rate": 6.639351988759657e-06, + "loss": 1.0692, + "step": 4611 + }, + { + "epoch": 0.8873230121749587, + "grad_norm": 0.32900727877422314, + "learning_rate": 6.6170405652909e-06, + "loss": 0.9534, + "step": 4612 + }, + { + "epoch": 0.8875154065834961, + "grad_norm": 0.3737738495513355, + "learning_rate": 6.594765410687487e-06, + "loss": 1.0797, + "step": 4613 + }, + { + "epoch": 0.8877078009920337, + "grad_norm": 0.30707814793928245, + "learning_rate": 6.57252653360092e-06, + "loss": 0.9994, + "step": 4614 + }, + { + "epoch": 0.8879001954005712, + "grad_norm": 0.33435425358324994, + "learning_rate": 6.550323942668468e-06, + "loss": 0.9721, + "step": 4615 + }, + { + "epoch": 0.8880925898091087, + "grad_norm": 0.3251041998289954, + "learning_rate": 6.528157646513378e-06, + "loss": 1.1378, + "step": 4616 + }, + { + "epoch": 0.8882849842176461, + "grad_norm": 0.3913190590863455, + "learning_rate": 6.506027653744795e-06, + "loss": 1.0791, + "step": 4617 + }, + { + "epoch": 0.8884773786261837, + "grad_norm": 0.32990941547926167, + "learning_rate": 6.483933972957734e-06, + "loss": 1.1513, + "step": 4618 + }, + { + "epoch": 0.8886697730347212, + "grad_norm": 0.4322966205047125, + "learning_rate": 6.461876612733109e-06, + "loss": 1.0151, + "step": 4619 + }, + { + "epoch": 0.8888621674432586, + "grad_norm": 0.2921533053054054, + "learning_rate": 6.439855581637799e-06, + "loss": 1.0156, + "step": 4620 + }, + { + "epoch": 0.8890545618517962, + "grad_norm": 0.33251370626902865, + "learning_rate": 6.417870888224475e-06, + "loss": 1.0399, + "step": 4621 + }, + { + "epoch": 0.8892469562603337, + "grad_norm": 0.2952245531857302, + "learning_rate": 6.39592254103174e-06, + "loss": 0.9662, + "step": 4622 + }, + { + "epoch": 0.8894393506688711, + "grad_norm": 0.32120802741526594, + "learning_rate": 6.37401054858412e-06, + "loss": 1.0353, + "step": 4623 + }, + { + "epoch": 0.8896317450774087, + "grad_norm": 0.3218940622511122, + "learning_rate": 6.352134919391928e-06, + "loss": 1.0695, + "step": 4624 + }, + { + "epoch": 0.8898241394859462, + "grad_norm": 0.31885607353444745, + "learning_rate": 6.330295661951435e-06, + "loss": 1.0587, + "step": 4625 + }, + { + "epoch": 0.8900165338944837, + "grad_norm": 0.34241441708118187, + "learning_rate": 6.308492784744746e-06, + "loss": 0.9965, + "step": 4626 + }, + { + "epoch": 0.8902089283030212, + "grad_norm": 0.31364800543367083, + "learning_rate": 6.2867262962398535e-06, + "loss": 1.0373, + "step": 4627 + }, + { + "epoch": 0.8904013227115587, + "grad_norm": 0.34227924847973723, + "learning_rate": 6.2649962048906276e-06, + "loss": 0.9645, + "step": 4628 + }, + { + "epoch": 0.8905937171200962, + "grad_norm": 0.3110099557536748, + "learning_rate": 6.24330251913674e-06, + "loss": 0.9479, + "step": 4629 + }, + { + "epoch": 0.8907861115286337, + "grad_norm": 0.36315800752798055, + "learning_rate": 6.221645247403807e-06, + "loss": 0.9701, + "step": 4630 + }, + { + "epoch": 0.8909785059371712, + "grad_norm": 0.36357466131651023, + "learning_rate": 6.200024398103255e-06, + "loss": 1.0991, + "step": 4631 + }, + { + "epoch": 0.8911709003457087, + "grad_norm": 0.3419904365021918, + "learning_rate": 6.178439979632366e-06, + "loss": 0.9602, + "step": 4632 + }, + { + "epoch": 0.8913632947542462, + "grad_norm": 0.29102130463619397, + "learning_rate": 6.156892000374292e-06, + "loss": 1.0854, + "step": 4633 + }, + { + "epoch": 0.8915556891627837, + "grad_norm": 0.3310144371550895, + "learning_rate": 6.135380468698005e-06, + "loss": 1.0653, + "step": 4634 + }, + { + "epoch": 0.8917480835713212, + "grad_norm": 0.2821726495862142, + "learning_rate": 6.113905392958341e-06, + "loss": 1.0389, + "step": 4635 + }, + { + "epoch": 0.8919404779798588, + "grad_norm": 0.38318683412149257, + "learning_rate": 6.092466781495976e-06, + "loss": 1.059, + "step": 4636 + }, + { + "epoch": 0.8921328723883962, + "grad_norm": 0.32372680121821257, + "learning_rate": 6.071064642637403e-06, + "loss": 0.9398, + "step": 4637 + }, + { + "epoch": 0.8923252667969337, + "grad_norm": 0.31883471511113454, + "learning_rate": 6.049698984695007e-06, + "loss": 1.0942, + "step": 4638 + }, + { + "epoch": 0.8925176612054713, + "grad_norm": 0.31037935158238317, + "learning_rate": 6.028369815966917e-06, + "loss": 1.0672, + "step": 4639 + }, + { + "epoch": 0.8927100556140087, + "grad_norm": 0.4310545184398492, + "learning_rate": 6.007077144737161e-06, + "loss": 1.0144, + "step": 4640 + }, + { + "epoch": 0.8929024500225462, + "grad_norm": 0.3275514347235563, + "learning_rate": 5.985820979275569e-06, + "loss": 1.0361, + "step": 4641 + }, + { + "epoch": 0.8930948444310837, + "grad_norm": 0.3120789409608759, + "learning_rate": 5.96460132783776e-06, + "loss": 1.0356, + "step": 4642 + }, + { + "epoch": 0.8932872388396212, + "grad_norm": 0.2993952674923048, + "learning_rate": 5.943418198665252e-06, + "loss": 1.0275, + "step": 4643 + }, + { + "epoch": 0.8934796332481587, + "grad_norm": 0.35402351847110725, + "learning_rate": 5.922271599985285e-06, + "loss": 1.0626, + "step": 4644 + }, + { + "epoch": 0.8936720276566962, + "grad_norm": 0.38675466255553903, + "learning_rate": 5.901161540010968e-06, + "loss": 0.9366, + "step": 4645 + }, + { + "epoch": 0.8938644220652338, + "grad_norm": 0.35253814137892703, + "learning_rate": 5.8800880269412326e-06, + "loss": 1.0693, + "step": 4646 + }, + { + "epoch": 0.8940568164737712, + "grad_norm": 0.34140731566655314, + "learning_rate": 5.8590510689607415e-06, + "loss": 0.9287, + "step": 4647 + }, + { + "epoch": 0.8942492108823087, + "grad_norm": 0.3205469940104918, + "learning_rate": 5.838050674240025e-06, + "loss": 1.053, + "step": 4648 + }, + { + "epoch": 0.8944416052908463, + "grad_norm": 0.32893328930589, + "learning_rate": 5.817086850935416e-06, + "loss": 0.9709, + "step": 4649 + }, + { + "epoch": 0.8946339996993837, + "grad_norm": 0.2938137267581275, + "learning_rate": 5.796159607189e-06, + "loss": 1.0779, + "step": 4650 + }, + { + "epoch": 0.8948263941079212, + "grad_norm": 0.3436050832639803, + "learning_rate": 5.775268951128676e-06, + "loss": 1.0848, + "step": 4651 + }, + { + "epoch": 0.8950187885164588, + "grad_norm": 0.328391758232989, + "learning_rate": 5.754414890868154e-06, + "loss": 1.0354, + "step": 4652 + }, + { + "epoch": 0.8952111829249962, + "grad_norm": 0.28888688147049585, + "learning_rate": 5.733597434506877e-06, + "loss": 1.0025, + "step": 4653 + }, + { + "epoch": 0.8954035773335337, + "grad_norm": 0.28007441025842733, + "learning_rate": 5.712816590130132e-06, + "loss": 1.0741, + "step": 4654 + }, + { + "epoch": 0.8955959717420713, + "grad_norm": 0.26421831273018, + "learning_rate": 5.6920723658089534e-06, + "loss": 0.9876, + "step": 4655 + }, + { + "epoch": 0.8957883661506088, + "grad_norm": 0.27173567424146877, + "learning_rate": 5.671364769600163e-06, + "loss": 1.0714, + "step": 4656 + }, + { + "epoch": 0.8959807605591462, + "grad_norm": 0.26447474562109996, + "learning_rate": 5.650693809546348e-06, + "loss": 1.0254, + "step": 4657 + }, + { + "epoch": 0.8961731549676838, + "grad_norm": 0.2975042806371661, + "learning_rate": 5.630059493675865e-06, + "loss": 1.0022, + "step": 4658 + }, + { + "epoch": 0.8963655493762213, + "grad_norm": 0.48556404937171793, + "learning_rate": 5.609461830002849e-06, + "loss": 1.0284, + "step": 4659 + }, + { + "epoch": 0.8965579437847587, + "grad_norm": 0.286139606108514, + "learning_rate": 5.5889008265271995e-06, + "loss": 1.0405, + "step": 4660 + }, + { + "epoch": 0.8967503381932963, + "grad_norm": 0.3164533873164714, + "learning_rate": 5.5683764912345615e-06, + "loss": 1.1313, + "step": 4661 + }, + { + "epoch": 0.8969427326018338, + "grad_norm": 0.282476107216376, + "learning_rate": 5.547888832096382e-06, + "loss": 1.042, + "step": 4662 + }, + { + "epoch": 0.8971351270103712, + "grad_norm": 0.307869337456247, + "learning_rate": 5.527437857069784e-06, + "loss": 1.085, + "step": 4663 + }, + { + "epoch": 0.8973275214189088, + "grad_norm": 0.3145844460023375, + "learning_rate": 5.507023574097725e-06, + "loss": 1.1131, + "step": 4664 + }, + { + "epoch": 0.8975199158274463, + "grad_norm": 0.4021742817787599, + "learning_rate": 5.486645991108874e-06, + "loss": 1.0051, + "step": 4665 + }, + { + "epoch": 0.8977123102359837, + "grad_norm": 0.3137743874729658, + "learning_rate": 5.4663051160176225e-06, + "loss": 1.035, + "step": 4666 + }, + { + "epoch": 0.8979047046445213, + "grad_norm": 0.28991803622695916, + "learning_rate": 5.446000956724173e-06, + "loss": 0.9653, + "step": 4667 + }, + { + "epoch": 0.8980970990530588, + "grad_norm": 0.28534420867723764, + "learning_rate": 5.4257335211143955e-06, + "loss": 0.9961, + "step": 4668 + }, + { + "epoch": 0.8982894934615963, + "grad_norm": 0.32819704564998353, + "learning_rate": 5.4055028170599374e-06, + "loss": 1.0489, + "step": 4669 + }, + { + "epoch": 0.8984818878701337, + "grad_norm": 0.32864091963419684, + "learning_rate": 5.3853088524181914e-06, + "loss": 1.0522, + "step": 4670 + }, + { + "epoch": 0.8986742822786713, + "grad_norm": 0.3361236809262748, + "learning_rate": 5.365151635032218e-06, + "loss": 1.0878, + "step": 4671 + }, + { + "epoch": 0.8988666766872088, + "grad_norm": 0.32311371417787205, + "learning_rate": 5.345031172730875e-06, + "loss": 1.028, + "step": 4672 + }, + { + "epoch": 0.8990590710957462, + "grad_norm": 0.29418311661565816, + "learning_rate": 5.3249474733287345e-06, + "loss": 0.9015, + "step": 4673 + }, + { + "epoch": 0.8992514655042838, + "grad_norm": 0.344500548694273, + "learning_rate": 5.3049005446260455e-06, + "loss": 1.0007, + "step": 4674 + }, + { + "epoch": 0.8994438599128213, + "grad_norm": 0.36044099566013865, + "learning_rate": 5.284890394408826e-06, + "loss": 0.9919, + "step": 4675 + }, + { + "epoch": 0.8996362543213587, + "grad_norm": 0.31737076944270715, + "learning_rate": 5.2649170304487576e-06, + "loss": 1.0594, + "step": 4676 + }, + { + "epoch": 0.8998286487298963, + "grad_norm": 0.299635053084897, + "learning_rate": 5.2449804605032685e-06, + "loss": 1.0815, + "step": 4677 + }, + { + "epoch": 0.9000210431384338, + "grad_norm": 0.28145817307932297, + "learning_rate": 5.225080692315532e-06, + "loss": 1.0752, + "step": 4678 + }, + { + "epoch": 0.9002134375469713, + "grad_norm": 0.4469966026197235, + "learning_rate": 5.205217733614353e-06, + "loss": 1.042, + "step": 4679 + }, + { + "epoch": 0.9004058319555088, + "grad_norm": 0.30239234010479293, + "learning_rate": 5.185391592114286e-06, + "loss": 1.0656, + "step": 4680 + }, + { + "epoch": 0.9005982263640463, + "grad_norm": 0.35043673045070767, + "learning_rate": 5.165602275515591e-06, + "loss": 1.0589, + "step": 4681 + }, + { + "epoch": 0.9007906207725838, + "grad_norm": 0.36906667940377264, + "learning_rate": 5.145849791504187e-06, + "loss": 1.0616, + "step": 4682 + }, + { + "epoch": 0.9009830151811213, + "grad_norm": 0.31964155258238713, + "learning_rate": 5.126134147751716e-06, + "loss": 1.0624, + "step": 4683 + }, + { + "epoch": 0.9011754095896588, + "grad_norm": 0.30297233254917755, + "learning_rate": 5.106455351915507e-06, + "loss": 1.0991, + "step": 4684 + }, + { + "epoch": 0.9013678039981963, + "grad_norm": 0.25680122431409214, + "learning_rate": 5.0868134116385805e-06, + "loss": 1.0324, + "step": 4685 + }, + { + "epoch": 0.9015601984067338, + "grad_norm": 0.3376287318935145, + "learning_rate": 5.067208334549656e-06, + "loss": 1.1278, + "step": 4686 + }, + { + "epoch": 0.9017525928152713, + "grad_norm": 0.3834491733753379, + "learning_rate": 5.047640128263087e-06, + "loss": 1.0904, + "step": 4687 + }, + { + "epoch": 0.9019449872238088, + "grad_norm": 0.33098987845620953, + "learning_rate": 5.028108800378961e-06, + "loss": 1.0127, + "step": 4688 + }, + { + "epoch": 0.9021373816323464, + "grad_norm": 0.3162574053758955, + "learning_rate": 5.008614358483022e-06, + "loss": 1.1005, + "step": 4689 + }, + { + "epoch": 0.9023297760408838, + "grad_norm": 0.3370414185244734, + "learning_rate": 4.9891568101466666e-06, + "loss": 0.9485, + "step": 4690 + }, + { + "epoch": 0.9025221704494213, + "grad_norm": 0.2859260081361284, + "learning_rate": 4.969736162927019e-06, + "loss": 0.9994, + "step": 4691 + }, + { + "epoch": 0.9027145648579589, + "grad_norm": 0.31877757567776244, + "learning_rate": 4.9503524243667994e-06, + "loss": 1.0455, + "step": 4692 + }, + { + "epoch": 0.9029069592664963, + "grad_norm": 0.37209508640453964, + "learning_rate": 4.931005601994432e-06, + "loss": 0.9753, + "step": 4693 + }, + { + "epoch": 0.9030993536750338, + "grad_norm": 0.344925861162728, + "learning_rate": 4.911695703324038e-06, + "loss": 1.1069, + "step": 4694 + }, + { + "epoch": 0.9032917480835713, + "grad_norm": 0.3407600719310235, + "learning_rate": 4.892422735855285e-06, + "loss": 1.0365, + "step": 4695 + }, + { + "epoch": 0.9034841424921088, + "grad_norm": 0.3288644993736094, + "learning_rate": 4.873186707073663e-06, + "loss": 1.0268, + "step": 4696 + }, + { + "epoch": 0.9036765369006463, + "grad_norm": 0.4030587255941124, + "learning_rate": 4.853987624450151e-06, + "loss": 0.9906, + "step": 4697 + }, + { + "epoch": 0.9038689313091838, + "grad_norm": 0.3603505485429001, + "learning_rate": 4.834825495441475e-06, + "loss": 1.0784, + "step": 4698 + }, + { + "epoch": 0.9040613257177214, + "grad_norm": 0.3685601711605079, + "learning_rate": 4.815700327490014e-06, + "loss": 0.9907, + "step": 4699 + }, + { + "epoch": 0.9042537201262588, + "grad_norm": 0.29755955115289273, + "learning_rate": 4.796612128023725e-06, + "loss": 1.0372, + "step": 4700 + }, + { + "epoch": 0.9044461145347963, + "grad_norm": 0.3702446507454852, + "learning_rate": 4.777560904456235e-06, + "loss": 0.9751, + "step": 4701 + }, + { + "epoch": 0.9046385089433339, + "grad_norm": 0.33105706567839577, + "learning_rate": 4.758546664186869e-06, + "loss": 1.1285, + "step": 4702 + }, + { + "epoch": 0.9048309033518713, + "grad_norm": 0.3012697441491406, + "learning_rate": 4.739569414600498e-06, + "loss": 1.1422, + "step": 4703 + }, + { + "epoch": 0.9050232977604088, + "grad_norm": 0.48643189319635904, + "learning_rate": 4.720629163067703e-06, + "loss": 0.9624, + "step": 4704 + }, + { + "epoch": 0.9052156921689464, + "grad_norm": 0.27923402852355445, + "learning_rate": 4.70172591694461e-06, + "loss": 1.0395, + "step": 4705 + }, + { + "epoch": 0.9054080865774838, + "grad_norm": 0.37346233164118836, + "learning_rate": 4.682859683573048e-06, + "loss": 1.0817, + "step": 4706 + }, + { + "epoch": 0.9056004809860213, + "grad_norm": 0.294511129772034, + "learning_rate": 4.664030470280467e-06, + "loss": 0.9809, + "step": 4707 + }, + { + "epoch": 0.9057928753945589, + "grad_norm": 0.33917260682397626, + "learning_rate": 4.645238284379883e-06, + "loss": 1.0332, + "step": 4708 + }, + { + "epoch": 0.9059852698030963, + "grad_norm": 0.3418696516815014, + "learning_rate": 4.6264831331699675e-06, + "loss": 1.0228, + "step": 4709 + }, + { + "epoch": 0.9061776642116338, + "grad_norm": 0.2785906410099841, + "learning_rate": 4.60776502393504e-06, + "loss": 1.0427, + "step": 4710 + }, + { + "epoch": 0.9063700586201714, + "grad_norm": 0.2751496166182031, + "learning_rate": 4.589083963944951e-06, + "loss": 1.1048, + "step": 4711 + }, + { + "epoch": 0.9065624530287089, + "grad_norm": 0.3469497046864914, + "learning_rate": 4.570439960455242e-06, + "loss": 1.1205, + "step": 4712 + }, + { + "epoch": 0.9067548474372463, + "grad_norm": 0.32243734446344646, + "learning_rate": 4.551833020707008e-06, + "loss": 1.0166, + "step": 4713 + }, + { + "epoch": 0.9069472418457839, + "grad_norm": 0.2835201392756422, + "learning_rate": 4.533263151926981e-06, + "loss": 0.9664, + "step": 4714 + }, + { + "epoch": 0.9071396362543214, + "grad_norm": 0.3344748355979359, + "learning_rate": 4.5147303613275016e-06, + "loss": 1.045, + "step": 4715 + }, + { + "epoch": 0.9073320306628588, + "grad_norm": 0.3730153351141198, + "learning_rate": 4.496234656106457e-06, + "loss": 1.0579, + "step": 4716 + }, + { + "epoch": 0.9075244250713964, + "grad_norm": 0.390287045584428, + "learning_rate": 4.477776043447379e-06, + "loss": 1.0027, + "step": 4717 + }, + { + "epoch": 0.9077168194799339, + "grad_norm": 0.3310936579781912, + "learning_rate": 4.459354530519377e-06, + "loss": 1.0485, + "step": 4718 + }, + { + "epoch": 0.9079092138884713, + "grad_norm": 0.3354224436322591, + "learning_rate": 4.440970124477173e-06, + "loss": 0.9852, + "step": 4719 + }, + { + "epoch": 0.9081016082970089, + "grad_norm": 0.2848795605285452, + "learning_rate": 4.422622832461054e-06, + "loss": 1.0174, + "step": 4720 + }, + { + "epoch": 0.9082940027055464, + "grad_norm": 0.3156990972632789, + "learning_rate": 4.404312661596877e-06, + "loss": 1.0204, + "step": 4721 + }, + { + "epoch": 0.9084863971140839, + "grad_norm": 0.29160329378948585, + "learning_rate": 4.386039618996119e-06, + "loss": 1.1066, + "step": 4722 + }, + { + "epoch": 0.9086787915226213, + "grad_norm": 0.27796622696943685, + "learning_rate": 4.367803711755814e-06, + "loss": 1.0305, + "step": 4723 + }, + { + "epoch": 0.9088711859311589, + "grad_norm": 0.317004259576644, + "learning_rate": 4.349604946958563e-06, + "loss": 1.1259, + "step": 4724 + }, + { + "epoch": 0.9090635803396964, + "grad_norm": 0.29366726007516236, + "learning_rate": 4.331443331672591e-06, + "loss": 1.0927, + "step": 4725 + }, + { + "epoch": 0.9092559747482338, + "grad_norm": 0.3504554452641467, + "learning_rate": 4.313318872951633e-06, + "loss": 1.0233, + "step": 4726 + }, + { + "epoch": 0.9094483691567714, + "grad_norm": 0.39513607869209655, + "learning_rate": 4.295231577835024e-06, + "loss": 1.0777, + "step": 4727 + }, + { + "epoch": 0.9096407635653089, + "grad_norm": 0.36810292242258597, + "learning_rate": 4.277181453347678e-06, + "loss": 1.0282, + "step": 4728 + }, + { + "epoch": 0.9098331579738463, + "grad_norm": 0.36434477303207796, + "learning_rate": 4.259168506500022e-06, + "loss": 1.0809, + "step": 4729 + }, + { + "epoch": 0.9100255523823839, + "grad_norm": 0.35647505279118624, + "learning_rate": 4.241192744288091e-06, + "loss": 1.0299, + "step": 4730 + }, + { + "epoch": 0.9102179467909214, + "grad_norm": 0.401373897766685, + "learning_rate": 4.223254173693492e-06, + "loss": 1.0311, + "step": 4731 + }, + { + "epoch": 0.9104103411994589, + "grad_norm": 0.3423815390074625, + "learning_rate": 4.205352801683327e-06, + "loss": 0.9968, + "step": 4732 + }, + { + "epoch": 0.9106027356079964, + "grad_norm": 0.34650929862977964, + "learning_rate": 4.187488635210302e-06, + "loss": 1.0492, + "step": 4733 + }, + { + "epoch": 0.9107951300165339, + "grad_norm": 0.3320047163071482, + "learning_rate": 4.1696616812126335e-06, + "loss": 1.0063, + "step": 4734 + }, + { + "epoch": 0.9109875244250714, + "grad_norm": 0.2984260035123869, + "learning_rate": 4.151871946614116e-06, + "loss": 1.0452, + "step": 4735 + }, + { + "epoch": 0.9111799188336089, + "grad_norm": 0.3287456793464544, + "learning_rate": 4.13411943832408e-06, + "loss": 1.0614, + "step": 4736 + }, + { + "epoch": 0.9113723132421464, + "grad_norm": 0.3538509811310011, + "learning_rate": 4.116404163237386e-06, + "loss": 0.9903, + "step": 4737 + }, + { + "epoch": 0.9115647076506839, + "grad_norm": 0.311754307606841, + "learning_rate": 4.098726128234443e-06, + "loss": 1.0264, + "step": 4738 + }, + { + "epoch": 0.9117571020592214, + "grad_norm": 0.31461694776761406, + "learning_rate": 4.081085340181223e-06, + "loss": 1.0102, + "step": 4739 + }, + { + "epoch": 0.9119494964677589, + "grad_norm": 0.3428107142670629, + "learning_rate": 4.06348180592917e-06, + "loss": 1.0326, + "step": 4740 + }, + { + "epoch": 0.9121418908762964, + "grad_norm": 0.3130639226250299, + "learning_rate": 4.0459155323153035e-06, + "loss": 1.0298, + "step": 4741 + }, + { + "epoch": 0.912334285284834, + "grad_norm": 0.31660763786422674, + "learning_rate": 4.028386526162175e-06, + "loss": 1.1499, + "step": 4742 + }, + { + "epoch": 0.9125266796933714, + "grad_norm": 0.35070366408568776, + "learning_rate": 4.010894794277831e-06, + "loss": 1.0468, + "step": 4743 + }, + { + "epoch": 0.9127190741019089, + "grad_norm": 0.32266840253917106, + "learning_rate": 3.993440343455878e-06, + "loss": 0.9768, + "step": 4744 + }, + { + "epoch": 0.9129114685104465, + "grad_norm": 0.30485752247821407, + "learning_rate": 3.976023180475397e-06, + "loss": 0.9941, + "step": 4745 + }, + { + "epoch": 0.9131038629189839, + "grad_norm": 0.349700848622084, + "learning_rate": 3.95864331210104e-06, + "loss": 1.1024, + "step": 4746 + }, + { + "epoch": 0.9132962573275214, + "grad_norm": 0.35768207410177166, + "learning_rate": 3.941300745082932e-06, + "loss": 1.0066, + "step": 4747 + }, + { + "epoch": 0.9134886517360589, + "grad_norm": 0.3486707573125071, + "learning_rate": 3.923995486156717e-06, + "loss": 0.9458, + "step": 4748 + }, + { + "epoch": 0.9136810461445964, + "grad_norm": 0.29821996715749854, + "learning_rate": 3.906727542043598e-06, + "loss": 1.0813, + "step": 4749 + }, + { + "epoch": 0.9138734405531339, + "grad_norm": 0.34515012431476105, + "learning_rate": 3.889496919450208e-06, + "loss": 1.0102, + "step": 4750 + }, + { + "epoch": 0.9140658349616714, + "grad_norm": 0.25496242531786867, + "learning_rate": 3.872303625068729e-06, + "loss": 1.0312, + "step": 4751 + }, + { + "epoch": 0.914258229370209, + "grad_norm": 0.34750426348871405, + "learning_rate": 3.8551476655768525e-06, + "loss": 0.9874, + "step": 4752 + }, + { + "epoch": 0.9144506237787464, + "grad_norm": 0.2744330157187026, + "learning_rate": 3.8380290476377255e-06, + "loss": 0.9901, + "step": 4753 + }, + { + "epoch": 0.9146430181872839, + "grad_norm": 0.3876490329659078, + "learning_rate": 3.820947777900063e-06, + "loss": 0.9687, + "step": 4754 + }, + { + "epoch": 0.9148354125958215, + "grad_norm": 0.39139861433816614, + "learning_rate": 3.803903862998004e-06, + "loss": 1.0299, + "step": 4755 + }, + { + "epoch": 0.9150278070043589, + "grad_norm": 0.3063743765811644, + "learning_rate": 3.7868973095512185e-06, + "loss": 1.0577, + "step": 4756 + }, + { + "epoch": 0.9152202014128964, + "grad_norm": 0.36828417153808, + "learning_rate": 3.7699281241648564e-06, + "loss": 0.9977, + "step": 4757 + }, + { + "epoch": 0.915412595821434, + "grad_norm": 0.28780300258749514, + "learning_rate": 3.7529963134295465e-06, + "loss": 1.0596, + "step": 4758 + }, + { + "epoch": 0.9156049902299714, + "grad_norm": 0.3461580433326025, + "learning_rate": 3.7361018839214057e-06, + "loss": 1.0413, + "step": 4759 + }, + { + "epoch": 0.9157973846385089, + "grad_norm": 0.2850723602444769, + "learning_rate": 3.7192448422020743e-06, + "loss": 1.0108, + "step": 4760 + }, + { + "epoch": 0.9159897790470465, + "grad_norm": 0.2810026001645126, + "learning_rate": 3.7024251948185816e-06, + "loss": 1.0281, + "step": 4761 + }, + { + "epoch": 0.916182173455584, + "grad_norm": 0.3203215907471018, + "learning_rate": 3.6856429483035028e-06, + "loss": 0.9606, + "step": 4762 + }, + { + "epoch": 0.9163745678641214, + "grad_norm": 0.3064091568721602, + "learning_rate": 3.66889810917489e-06, + "loss": 0.95, + "step": 4763 + }, + { + "epoch": 0.916566962272659, + "grad_norm": 0.31491530367666204, + "learning_rate": 3.6521906839362185e-06, + "loss": 1.1035, + "step": 4764 + }, + { + "epoch": 0.9167593566811965, + "grad_norm": 0.32663413046198564, + "learning_rate": 3.635520679076465e-06, + "loss": 1.0336, + "step": 4765 + }, + { + "epoch": 0.9169517510897339, + "grad_norm": 0.335316225236997, + "learning_rate": 3.6188881010700726e-06, + "loss": 0.9827, + "step": 4766 + }, + { + "epoch": 0.9171441454982715, + "grad_norm": 0.2799735952248801, + "learning_rate": 3.6022929563769513e-06, + "loss": 1.0344, + "step": 4767 + }, + { + "epoch": 0.917336539906809, + "grad_norm": 0.30110546928367454, + "learning_rate": 3.585735251442457e-06, + "loss": 1.0451, + "step": 4768 + }, + { + "epoch": 0.9175289343153464, + "grad_norm": 0.30754811420297273, + "learning_rate": 3.5692149926974006e-06, + "loss": 1.0564, + "step": 4769 + }, + { + "epoch": 0.917721328723884, + "grad_norm": 0.4117794890640593, + "learning_rate": 3.552732186558072e-06, + "loss": 1.0439, + "step": 4770 + }, + { + "epoch": 0.9179137231324215, + "grad_norm": 0.2867302225609774, + "learning_rate": 3.536286839426195e-06, + "loss": 1.0288, + "step": 4771 + }, + { + "epoch": 0.9181061175409589, + "grad_norm": 0.3223331881370476, + "learning_rate": 3.5198789576889603e-06, + "loss": 1.0712, + "step": 4772 + }, + { + "epoch": 0.9182985119494965, + "grad_norm": 0.299777090994648, + "learning_rate": 3.5035085477190143e-06, + "loss": 0.9606, + "step": 4773 + }, + { + "epoch": 0.918490906358034, + "grad_norm": 0.2979044891398794, + "learning_rate": 3.4871756158744052e-06, + "loss": 1.1323, + "step": 4774 + }, + { + "epoch": 0.9186833007665715, + "grad_norm": 0.3504658917550687, + "learning_rate": 3.470880168498669e-06, + "loss": 0.9905, + "step": 4775 + }, + { + "epoch": 0.9188756951751089, + "grad_norm": 0.28519204077072113, + "learning_rate": 3.454622211920766e-06, + "loss": 1.0676, + "step": 4776 + }, + { + "epoch": 0.9190680895836465, + "grad_norm": 0.3749364264858349, + "learning_rate": 3.4384017524551116e-06, + "loss": 1.0138, + "step": 4777 + }, + { + "epoch": 0.919260483992184, + "grad_norm": 0.29620547010939036, + "learning_rate": 3.422218796401544e-06, + "loss": 1.0629, + "step": 4778 + }, + { + "epoch": 0.9194528784007214, + "grad_norm": 0.3448874604740178, + "learning_rate": 3.4060733500453247e-06, + "loss": 1.0431, + "step": 4779 + }, + { + "epoch": 0.919645272809259, + "grad_norm": 0.4293918180477315, + "learning_rate": 3.3899654196571597e-06, + "loss": 0.9424, + "step": 4780 + }, + { + "epoch": 0.9198376672177965, + "grad_norm": 0.2685364751795157, + "learning_rate": 3.3738950114932e-06, + "loss": 1.1123, + "step": 4781 + }, + { + "epoch": 0.9200300616263339, + "grad_norm": 0.3094663767757603, + "learning_rate": 3.3578621317949756e-06, + "loss": 0.9291, + "step": 4782 + }, + { + "epoch": 0.9202224560348715, + "grad_norm": 0.31426169410071264, + "learning_rate": 3.3418667867895047e-06, + "loss": 1.0479, + "step": 4783 + }, + { + "epoch": 0.920414850443409, + "grad_norm": 0.3005711694545352, + "learning_rate": 3.3259089826891854e-06, + "loss": 1.0308, + "step": 4784 + }, + { + "epoch": 0.9206072448519464, + "grad_norm": 0.32478489128523025, + "learning_rate": 3.309988725691837e-06, + "loss": 1.0518, + "step": 4785 + }, + { + "epoch": 0.920799639260484, + "grad_norm": 0.378754173468823, + "learning_rate": 3.294106021980714e-06, + "loss": 0.9836, + "step": 4786 + }, + { + "epoch": 0.9209920336690215, + "grad_norm": 0.2864945578879739, + "learning_rate": 3.2782608777244705e-06, + "loss": 1.097, + "step": 4787 + }, + { + "epoch": 0.921184428077559, + "grad_norm": 0.3110923092227332, + "learning_rate": 3.2624532990771505e-06, + "loss": 1.1217, + "step": 4788 + }, + { + "epoch": 0.9213768224860965, + "grad_norm": 0.3725423853538233, + "learning_rate": 3.2466832921782985e-06, + "loss": 1.0299, + "step": 4789 + }, + { + "epoch": 0.921569216894634, + "grad_norm": 0.3205879519436053, + "learning_rate": 3.2309508631527486e-06, + "loss": 1.0659, + "step": 4790 + }, + { + "epoch": 0.9217616113031715, + "grad_norm": 0.33535870039183635, + "learning_rate": 3.215256018110824e-06, + "loss": 1.0597, + "step": 4791 + }, + { + "epoch": 0.921954005711709, + "grad_norm": 0.31841686704841865, + "learning_rate": 3.1995987631482148e-06, + "loss": 1.0244, + "step": 4792 + }, + { + "epoch": 0.9221464001202465, + "grad_norm": 0.2891392043589409, + "learning_rate": 3.1839791043460023e-06, + "loss": 1.0667, + "step": 4793 + }, + { + "epoch": 0.922338794528784, + "grad_norm": 0.350886995142571, + "learning_rate": 3.168397047770699e-06, + "loss": 1.0267, + "step": 4794 + }, + { + "epoch": 0.9225311889373216, + "grad_norm": 0.33852892217693087, + "learning_rate": 3.1528525994741877e-06, + "loss": 0.9916, + "step": 4795 + }, + { + "epoch": 0.922723583345859, + "grad_norm": 0.44013632400182945, + "learning_rate": 3.13734576549376e-06, + "loss": 1.0408, + "step": 4796 + }, + { + "epoch": 0.9229159777543965, + "grad_norm": 0.32845052873146846, + "learning_rate": 3.1218765518520985e-06, + "loss": 0.9718, + "step": 4797 + }, + { + "epoch": 0.9231083721629341, + "grad_norm": 0.32663070994418064, + "learning_rate": 3.1064449645572537e-06, + "loss": 1.0918, + "step": 4798 + }, + { + "epoch": 0.9233007665714715, + "grad_norm": 0.26077891738687076, + "learning_rate": 3.0910510096026745e-06, + "loss": 0.991, + "step": 4799 + }, + { + "epoch": 0.923493160980009, + "grad_norm": 0.34353592786377535, + "learning_rate": 3.0756946929672014e-06, + "loss": 0.9836, + "step": 4800 + }, + { + "epoch": 0.9236855553885466, + "grad_norm": 0.32664461616404566, + "learning_rate": 3.0603760206150524e-06, + "loss": 0.9964, + "step": 4801 + }, + { + "epoch": 0.923877949797084, + "grad_norm": 0.3206455614642741, + "learning_rate": 3.045094998495834e-06, + "loss": 1.001, + "step": 4802 + }, + { + "epoch": 0.9240703442056215, + "grad_norm": 0.2951839621408488, + "learning_rate": 3.029851632544489e-06, + "loss": 0.9973, + "step": 4803 + }, + { + "epoch": 0.924262738614159, + "grad_norm": 0.37290732523734266, + "learning_rate": 3.0146459286813923e-06, + "loss": 1.1221, + "step": 4804 + }, + { + "epoch": 0.9244551330226966, + "grad_norm": 0.29236760879957674, + "learning_rate": 2.9994778928122636e-06, + "loss": 1.0356, + "step": 4805 + }, + { + "epoch": 0.924647527431234, + "grad_norm": 0.2931870899031517, + "learning_rate": 2.9843475308281575e-06, + "loss": 0.9466, + "step": 4806 + }, + { + "epoch": 0.9248399218397715, + "grad_norm": 0.31973296672323614, + "learning_rate": 2.9692548486055847e-06, + "loss": 1.0945, + "step": 4807 + }, + { + "epoch": 0.9250323162483091, + "grad_norm": 0.3661219127547001, + "learning_rate": 2.954199852006334e-06, + "loss": 1.0427, + "step": 4808 + }, + { + "epoch": 0.9252247106568465, + "grad_norm": 0.4175494631158559, + "learning_rate": 2.9391825468775945e-06, + "loss": 1.0891, + "step": 4809 + }, + { + "epoch": 0.925417105065384, + "grad_norm": 0.2788125545134578, + "learning_rate": 2.9242029390519456e-06, + "loss": 1.0199, + "step": 4810 + }, + { + "epoch": 0.9256094994739216, + "grad_norm": 0.35030552760024536, + "learning_rate": 2.909261034347255e-06, + "loss": 0.9827, + "step": 4811 + }, + { + "epoch": 0.925801893882459, + "grad_norm": 0.27581407804012614, + "learning_rate": 2.8943568385667918e-06, + "loss": 0.9738, + "step": 4812 + }, + { + "epoch": 0.9259942882909965, + "grad_norm": 0.44156556297116145, + "learning_rate": 2.8794903574992037e-06, + "loss": 1.0266, + "step": 4813 + }, + { + "epoch": 0.9261866826995341, + "grad_norm": 0.29859310101493625, + "learning_rate": 2.8646615969184276e-06, + "loss": 1.0063, + "step": 4814 + }, + { + "epoch": 0.9263790771080715, + "grad_norm": 0.36351924132348623, + "learning_rate": 2.8498705625838117e-06, + "loss": 0.9289, + "step": 4815 + }, + { + "epoch": 0.926571471516609, + "grad_norm": 0.30891981525308526, + "learning_rate": 2.8351172602399946e-06, + "loss": 1.032, + "step": 4816 + }, + { + "epoch": 0.9267638659251466, + "grad_norm": 0.31948945035477055, + "learning_rate": 2.820401695616992e-06, + "loss": 1.069, + "step": 4817 + }, + { + "epoch": 0.9269562603336841, + "grad_norm": 0.2909736294090731, + "learning_rate": 2.8057238744301996e-06, + "loss": 1.0696, + "step": 4818 + }, + { + "epoch": 0.9271486547422215, + "grad_norm": 0.2860334733992794, + "learning_rate": 2.7910838023802676e-06, + "loss": 1.0031, + "step": 4819 + }, + { + "epoch": 0.9273410491507591, + "grad_norm": 0.3069103045701495, + "learning_rate": 2.7764814851532482e-06, + "loss": 1.1075, + "step": 4820 + }, + { + "epoch": 0.9275334435592966, + "grad_norm": 0.27667505843985585, + "learning_rate": 2.761916928420527e-06, + "loss": 1.0639, + "step": 4821 + }, + { + "epoch": 0.927725837967834, + "grad_norm": 0.3386318817559325, + "learning_rate": 2.74739013783879e-06, + "loss": 0.8886, + "step": 4822 + }, + { + "epoch": 0.9279182323763716, + "grad_norm": 0.3547979920898592, + "learning_rate": 2.7329011190500796e-06, + "loss": 1.028, + "step": 4823 + }, + { + "epoch": 0.9281106267849091, + "grad_norm": 0.35650567572881114, + "learning_rate": 2.7184498776817612e-06, + "loss": 1.0684, + "step": 4824 + }, + { + "epoch": 0.9283030211934465, + "grad_norm": 0.32744332231790724, + "learning_rate": 2.704036419346534e-06, + "loss": 1.1264, + "step": 4825 + }, + { + "epoch": 0.9284954156019841, + "grad_norm": 0.3674114391060293, + "learning_rate": 2.689660749642442e-06, + "loss": 1.0133, + "step": 4826 + }, + { + "epoch": 0.9286878100105216, + "grad_norm": 0.37083950050765935, + "learning_rate": 2.675322874152786e-06, + "loss": 1.0182, + "step": 4827 + }, + { + "epoch": 0.928880204419059, + "grad_norm": 0.3209775086730098, + "learning_rate": 2.6610227984462554e-06, + "loss": 1.0543, + "step": 4828 + }, + { + "epoch": 0.9290725988275965, + "grad_norm": 0.2825439751894937, + "learning_rate": 2.6467605280768413e-06, + "loss": 0.9806, + "step": 4829 + }, + { + "epoch": 0.9292649932361341, + "grad_norm": 0.2878430630772789, + "learning_rate": 2.632536068583824e-06, + "loss": 1.0246, + "step": 4830 + }, + { + "epoch": 0.9294573876446716, + "grad_norm": 0.2675421152066387, + "learning_rate": 2.6183494254918505e-06, + "loss": 1.1268, + "step": 4831 + }, + { + "epoch": 0.929649782053209, + "grad_norm": 0.3494602575053354, + "learning_rate": 2.6042006043108247e-06, + "loss": 0.9946, + "step": 4832 + }, + { + "epoch": 0.9298421764617466, + "grad_norm": 0.2922246085136682, + "learning_rate": 2.590089610535984e-06, + "loss": 1.0399, + "step": 4833 + }, + { + "epoch": 0.9300345708702841, + "grad_norm": 0.3528907431025616, + "learning_rate": 2.5760164496479e-06, + "loss": 1.0514, + "step": 4834 + }, + { + "epoch": 0.9302269652788215, + "grad_norm": 0.3598896569766778, + "learning_rate": 2.5619811271123894e-06, + "loss": 1.0533, + "step": 4835 + }, + { + "epoch": 0.9304193596873591, + "grad_norm": 0.3560654831586113, + "learning_rate": 2.5479836483806587e-06, + "loss": 0.9881, + "step": 4836 + }, + { + "epoch": 0.9306117540958966, + "grad_norm": 0.3913011561841819, + "learning_rate": 2.534024018889114e-06, + "loss": 1.0933, + "step": 4837 + }, + { + "epoch": 0.930804148504434, + "grad_norm": 0.339144569976722, + "learning_rate": 2.5201022440595522e-06, + "loss": 1.0482, + "step": 4838 + }, + { + "epoch": 0.9309965429129716, + "grad_norm": 0.32288385554851634, + "learning_rate": 2.506218329299026e-06, + "loss": 1.0367, + "step": 4839 + }, + { + "epoch": 0.9311889373215091, + "grad_norm": 0.32793579252765287, + "learning_rate": 2.492372279999866e-06, + "loss": 1.0376, + "step": 4840 + }, + { + "epoch": 0.9313813317300466, + "grad_norm": 0.3498260259802165, + "learning_rate": 2.4785641015397374e-06, + "loss": 1.031, + "step": 4841 + }, + { + "epoch": 0.9315737261385841, + "grad_norm": 0.41037160210456186, + "learning_rate": 2.464793799281573e-06, + "loss": 0.9956, + "step": 4842 + }, + { + "epoch": 0.9317661205471216, + "grad_norm": 0.3285375397793912, + "learning_rate": 2.4510613785735938e-06, + "loss": 1.025, + "step": 4843 + }, + { + "epoch": 0.9319585149556591, + "grad_norm": 0.35561665183650015, + "learning_rate": 2.4373668447493224e-06, + "loss": 1.0575, + "step": 4844 + }, + { + "epoch": 0.9321509093641966, + "grad_norm": 0.25749452794270217, + "learning_rate": 2.4237102031275607e-06, + "loss": 1.0743, + "step": 4845 + }, + { + "epoch": 0.9323433037727341, + "grad_norm": 0.33505290025284906, + "learning_rate": 2.4100914590123756e-06, + "loss": 1.0315, + "step": 4846 + }, + { + "epoch": 0.9325356981812716, + "grad_norm": 0.28728856362608546, + "learning_rate": 2.3965106176931374e-06, + "loss": 0.9858, + "step": 4847 + }, + { + "epoch": 0.9327280925898092, + "grad_norm": 0.4902951527240888, + "learning_rate": 2.382967684444493e-06, + "loss": 1.0443, + "step": 4848 + }, + { + "epoch": 0.9329204869983466, + "grad_norm": 0.2917632548051919, + "learning_rate": 2.3694626645263675e-06, + "loss": 1.0427, + "step": 4849 + }, + { + "epoch": 0.9331128814068841, + "grad_norm": 0.33975483972911147, + "learning_rate": 2.3559955631839434e-06, + "loss": 1.0502, + "step": 4850 + }, + { + "epoch": 0.9333052758154217, + "grad_norm": 0.40146935445843523, + "learning_rate": 2.3425663856476932e-06, + "loss": 0.9184, + "step": 4851 + }, + { + "epoch": 0.9334976702239591, + "grad_norm": 0.31735660398853427, + "learning_rate": 2.3291751371333437e-06, + "loss": 0.9794, + "step": 4852 + }, + { + "epoch": 0.9336900646324966, + "grad_norm": 0.310236063303638, + "learning_rate": 2.3158218228419127e-06, + "loss": 1.0753, + "step": 4853 + }, + { + "epoch": 0.9338824590410342, + "grad_norm": 0.3425510296524538, + "learning_rate": 2.3025064479596627e-06, + "loss": 1.0535, + "step": 4854 + }, + { + "epoch": 0.9340748534495716, + "grad_norm": 0.3584980665652243, + "learning_rate": 2.2892290176581676e-06, + "loss": 1.0197, + "step": 4855 + }, + { + "epoch": 0.9342672478581091, + "grad_norm": 0.28912489809723535, + "learning_rate": 2.2759895370941807e-06, + "loss": 1.0575, + "step": 4856 + }, + { + "epoch": 0.9344596422666466, + "grad_norm": 0.3486576476561234, + "learning_rate": 2.2627880114097776e-06, + "loss": 0.9741, + "step": 4857 + }, + { + "epoch": 0.9346520366751842, + "grad_norm": 0.29947452971608934, + "learning_rate": 2.249624445732301e-06, + "loss": 1.033, + "step": 4858 + }, + { + "epoch": 0.9348444310837216, + "grad_norm": 0.33413340824935367, + "learning_rate": 2.2364988451742953e-06, + "loss": 0.9967, + "step": 4859 + }, + { + "epoch": 0.9350368254922591, + "grad_norm": 0.33405221722967166, + "learning_rate": 2.223411214833637e-06, + "loss": 1.0954, + "step": 4860 + }, + { + "epoch": 0.9352292199007967, + "grad_norm": 0.4607464547692085, + "learning_rate": 2.210361559793361e-06, + "loss": 0.9939, + "step": 4861 + }, + { + "epoch": 0.9354216143093341, + "grad_norm": 0.25654193414478754, + "learning_rate": 2.1973498851218242e-06, + "loss": 1.048, + "step": 4862 + }, + { + "epoch": 0.9356140087178716, + "grad_norm": 0.3289380619374026, + "learning_rate": 2.1843761958726285e-06, + "loss": 1.122, + "step": 4863 + }, + { + "epoch": 0.9358064031264092, + "grad_norm": 0.3376021402986719, + "learning_rate": 2.1714404970845647e-06, + "loss": 1.0825, + "step": 4864 + }, + { + "epoch": 0.9359987975349466, + "grad_norm": 0.33197297073054194, + "learning_rate": 2.158542793781759e-06, + "loss": 1.1005, + "step": 4865 + }, + { + "epoch": 0.9361911919434841, + "grad_norm": 0.3715262749437805, + "learning_rate": 2.1456830909734936e-06, + "loss": 1.0331, + "step": 4866 + }, + { + "epoch": 0.9363835863520217, + "grad_norm": 0.31017615300071755, + "learning_rate": 2.1328613936543397e-06, + "loss": 0.986, + "step": 4867 + }, + { + "epoch": 0.9365759807605591, + "grad_norm": 0.4285549502927926, + "learning_rate": 2.1200777068041134e-06, + "loss": 1.0437, + "step": 4868 + }, + { + "epoch": 0.9367683751690966, + "grad_norm": 0.28864745103907863, + "learning_rate": 2.10733203538781e-06, + "loss": 1.0907, + "step": 4869 + }, + { + "epoch": 0.9369607695776342, + "grad_norm": 0.36904236344609426, + "learning_rate": 2.0946243843557367e-06, + "loss": 1.0278, + "step": 4870 + }, + { + "epoch": 0.9371531639861717, + "grad_norm": 0.3102728966580874, + "learning_rate": 2.081954758643401e-06, + "loss": 1.029, + "step": 4871 + }, + { + "epoch": 0.9373455583947091, + "grad_norm": 0.3123550210871251, + "learning_rate": 2.0693231631715103e-06, + "loss": 0.9711, + "step": 4872 + }, + { + "epoch": 0.9375379528032467, + "grad_norm": 0.26796339723537277, + "learning_rate": 2.0567296028460637e-06, + "loss": 1.0617, + "step": 4873 + }, + { + "epoch": 0.9377303472117842, + "grad_norm": 0.3169742849441562, + "learning_rate": 2.0441740825582257e-06, + "loss": 1.0811, + "step": 4874 + }, + { + "epoch": 0.9379227416203216, + "grad_norm": 0.3962702167905515, + "learning_rate": 2.03165660718444e-06, + "loss": 1.1389, + "step": 4875 + }, + { + "epoch": 0.9381151360288592, + "grad_norm": 0.38869223617079507, + "learning_rate": 2.019177181586329e-06, + "loss": 1.0143, + "step": 4876 + }, + { + "epoch": 0.9383075304373967, + "grad_norm": 0.43113120042528336, + "learning_rate": 2.0067358106107716e-06, + "loss": 1.0278, + "step": 4877 + }, + { + "epoch": 0.9384999248459341, + "grad_norm": 0.4273010434622452, + "learning_rate": 1.994332499089846e-06, + "loss": 1.0516, + "step": 4878 + }, + { + "epoch": 0.9386923192544717, + "grad_norm": 0.33401338414813536, + "learning_rate": 1.9819672518408663e-06, + "loss": 0.9897, + "step": 4879 + }, + { + "epoch": 0.9388847136630092, + "grad_norm": 0.2903052212541772, + "learning_rate": 1.9696400736663457e-06, + "loss": 1.0415, + "step": 4880 + }, + { + "epoch": 0.9390771080715467, + "grad_norm": 0.2885973544109154, + "learning_rate": 1.9573509693540102e-06, + "loss": 1.0472, + "step": 4881 + }, + { + "epoch": 0.9392695024800841, + "grad_norm": 0.4055233808292139, + "learning_rate": 1.945099943676809e-06, + "loss": 1.0238, + "step": 4882 + }, + { + "epoch": 0.9394618968886217, + "grad_norm": 0.3043600360792589, + "learning_rate": 1.9328870013929135e-06, + "loss": 0.9858, + "step": 4883 + }, + { + "epoch": 0.9396542912971592, + "grad_norm": 0.3288107196283699, + "learning_rate": 1.9207121472456847e-06, + "loss": 1.0178, + "step": 4884 + }, + { + "epoch": 0.9398466857056966, + "grad_norm": 0.34658482573316357, + "learning_rate": 1.9085753859636734e-06, + "loss": 1.0898, + "step": 4885 + }, + { + "epoch": 0.9400390801142342, + "grad_norm": 0.3172365309727629, + "learning_rate": 1.8964767222606871e-06, + "loss": 1.025, + "step": 4886 + }, + { + "epoch": 0.9402314745227717, + "grad_norm": 0.34031796787447277, + "learning_rate": 1.8844161608356781e-06, + "loss": 1.0427, + "step": 4887 + }, + { + "epoch": 0.9404238689313091, + "grad_norm": 0.3291207418764745, + "learning_rate": 1.872393706372866e-06, + "loss": 1.0009, + "step": 4888 + }, + { + "epoch": 0.9406162633398467, + "grad_norm": 0.29478539067629855, + "learning_rate": 1.8604093635416153e-06, + "loss": 1.0464, + "step": 4889 + }, + { + "epoch": 0.9408086577483842, + "grad_norm": 0.3623751718748766, + "learning_rate": 1.8484631369964922e-06, + "loss": 1.0602, + "step": 4890 + }, + { + "epoch": 0.9410010521569216, + "grad_norm": 0.30845842469050583, + "learning_rate": 1.836555031377285e-06, + "loss": 1.0668, + "step": 4891 + }, + { + "epoch": 0.9411934465654592, + "grad_norm": 0.32249605054941965, + "learning_rate": 1.824685051308983e-06, + "loss": 1.091, + "step": 4892 + }, + { + "epoch": 0.9413858409739967, + "grad_norm": 0.3138256728496758, + "learning_rate": 1.8128532014017096e-06, + "loss": 1.0716, + "step": 4893 + }, + { + "epoch": 0.9415782353825342, + "grad_norm": 0.3119478462358224, + "learning_rate": 1.8010594862508668e-06, + "loss": 0.942, + "step": 4894 + }, + { + "epoch": 0.9417706297910717, + "grad_norm": 0.3488029324246239, + "learning_rate": 1.789303910436968e-06, + "loss": 1.0603, + "step": 4895 + }, + { + "epoch": 0.9419630241996092, + "grad_norm": 0.3787800306507009, + "learning_rate": 1.777586478525739e-06, + "loss": 1.046, + "step": 4896 + }, + { + "epoch": 0.9421554186081467, + "grad_norm": 0.3135274644887523, + "learning_rate": 1.7659071950681172e-06, + "loss": 0.9332, + "step": 4897 + }, + { + "epoch": 0.9423478130166842, + "grad_norm": 0.33137046188757624, + "learning_rate": 1.754266064600174e-06, + "loss": 1.0091, + "step": 4898 + }, + { + "epoch": 0.9425402074252217, + "grad_norm": 0.3065716222440659, + "learning_rate": 1.7426630916432041e-06, + "loss": 1.0374, + "step": 4899 + }, + { + "epoch": 0.9427326018337592, + "grad_norm": 0.31380857607841134, + "learning_rate": 1.7310982807036912e-06, + "loss": 1.0093, + "step": 4900 + }, + { + "epoch": 0.9429249962422968, + "grad_norm": 0.3762180087028263, + "learning_rate": 1.7195716362732427e-06, + "loss": 1.0377, + "step": 4901 + }, + { + "epoch": 0.9431173906508342, + "grad_norm": 0.3295357694390801, + "learning_rate": 1.7080831628286887e-06, + "loss": 1.0466, + "step": 4902 + }, + { + "epoch": 0.9433097850593717, + "grad_norm": 0.38756594399970046, + "learning_rate": 1.696632864832015e-06, + "loss": 1.03, + "step": 4903 + }, + { + "epoch": 0.9435021794679093, + "grad_norm": 0.29547189408230545, + "learning_rate": 1.685220746730387e-06, + "loss": 1.0756, + "step": 4904 + }, + { + "epoch": 0.9436945738764467, + "grad_norm": 0.36152742957456924, + "learning_rate": 1.6738468129561369e-06, + "loss": 0.9748, + "step": 4905 + }, + { + "epoch": 0.9438869682849842, + "grad_norm": 0.3394094491262785, + "learning_rate": 1.6625110679267641e-06, + "loss": 0.9952, + "step": 4906 + }, + { + "epoch": 0.9440793626935218, + "grad_norm": 0.32356067482846507, + "learning_rate": 1.6512135160449583e-06, + "loss": 1.0275, + "step": 4907 + }, + { + "epoch": 0.9442717571020592, + "grad_norm": 0.2934165285307212, + "learning_rate": 1.6399541616985648e-06, + "loss": 1.0749, + "step": 4908 + }, + { + "epoch": 0.9444641515105967, + "grad_norm": 0.3739368505174029, + "learning_rate": 1.6287330092605524e-06, + "loss": 1.0416, + "step": 4909 + }, + { + "epoch": 0.9446565459191342, + "grad_norm": 0.31729146999040525, + "learning_rate": 1.6175500630891127e-06, + "loss": 1.1309, + "step": 4910 + }, + { + "epoch": 0.9448489403276717, + "grad_norm": 0.27925966909469846, + "learning_rate": 1.6064053275275714e-06, + "loss": 1.0902, + "step": 4911 + }, + { + "epoch": 0.9450413347362092, + "grad_norm": 0.3534517232238773, + "learning_rate": 1.5952988069044106e-06, + "loss": 1.0863, + "step": 4912 + }, + { + "epoch": 0.9452337291447467, + "grad_norm": 0.2966449356315687, + "learning_rate": 1.5842305055332795e-06, + "loss": 0.9766, + "step": 4913 + }, + { + "epoch": 0.9454261235532843, + "grad_norm": 0.2932166784805485, + "learning_rate": 1.573200427712973e-06, + "loss": 1.0655, + "step": 4914 + }, + { + "epoch": 0.9456185179618217, + "grad_norm": 0.35760784565349485, + "learning_rate": 1.562208577727442e-06, + "loss": 1.0548, + "step": 4915 + }, + { + "epoch": 0.9458109123703592, + "grad_norm": 0.3592867540870145, + "learning_rate": 1.5512549598458048e-06, + "loss": 1.0426, + "step": 4916 + }, + { + "epoch": 0.9460033067788968, + "grad_norm": 0.276766046699416, + "learning_rate": 1.540339578322314e-06, + "loss": 1.0428, + "step": 4917 + }, + { + "epoch": 0.9461957011874342, + "grad_norm": 0.39779999778007813, + "learning_rate": 1.5294624373963895e-06, + "loss": 0.9154, + "step": 4918 + }, + { + "epoch": 0.9463880955959717, + "grad_norm": 0.31654156787619875, + "learning_rate": 1.5186235412925742e-06, + "loss": 1.0365, + "step": 4919 + }, + { + "epoch": 0.9465804900045093, + "grad_norm": 0.3234701154471859, + "learning_rate": 1.5078228942205674e-06, + "loss": 0.9417, + "step": 4920 + }, + { + "epoch": 0.9467728844130467, + "grad_norm": 0.2823261131733285, + "learning_rate": 1.497060500375236e-06, + "loss": 1.0225, + "step": 4921 + }, + { + "epoch": 0.9469652788215842, + "grad_norm": 0.325333468865082, + "learning_rate": 1.4863363639365357e-06, + "loss": 1.0717, + "step": 4922 + }, + { + "epoch": 0.9471576732301218, + "grad_norm": 0.2910093377836334, + "learning_rate": 1.4756504890696466e-06, + "loss": 1.0304, + "step": 4923 + }, + { + "epoch": 0.9473500676386593, + "grad_norm": 0.3535415888828865, + "learning_rate": 1.4650028799247928e-06, + "loss": 1.0075, + "step": 4924 + }, + { + "epoch": 0.9475424620471967, + "grad_norm": 0.36128552754942406, + "learning_rate": 1.454393540637411e-06, + "loss": 1.0177, + "step": 4925 + }, + { + "epoch": 0.9477348564557343, + "grad_norm": 0.336278605392095, + "learning_rate": 1.4438224753280382e-06, + "loss": 0.9692, + "step": 4926 + }, + { + "epoch": 0.9479272508642718, + "grad_norm": 0.36851277106489816, + "learning_rate": 1.4332896881023461e-06, + "loss": 1.0513, + "step": 4927 + }, + { + "epoch": 0.9481196452728092, + "grad_norm": 0.34292967990781215, + "learning_rate": 1.422795183051151e-06, + "loss": 1.1092, + "step": 4928 + }, + { + "epoch": 0.9483120396813468, + "grad_norm": 0.3954331229329331, + "learning_rate": 1.4123389642504148e-06, + "loss": 1.0335, + "step": 4929 + }, + { + "epoch": 0.9485044340898843, + "grad_norm": 0.34196522169793797, + "learning_rate": 1.401921035761189e-06, + "loss": 1.054, + "step": 4930 + }, + { + "epoch": 0.9486968284984217, + "grad_norm": 0.3558813609551621, + "learning_rate": 1.3915414016296924e-06, + "loss": 0.9845, + "step": 4931 + }, + { + "epoch": 0.9488892229069593, + "grad_norm": 0.3317569179637117, + "learning_rate": 1.381200065887256e-06, + "loss": 1.0452, + "step": 4932 + }, + { + "epoch": 0.9490816173154968, + "grad_norm": 0.3213494368941408, + "learning_rate": 1.3708970325503223e-06, + "loss": 1.0054, + "step": 4933 + }, + { + "epoch": 0.9492740117240343, + "grad_norm": 0.2872465885520479, + "learning_rate": 1.3606323056204795e-06, + "loss": 1.0079, + "step": 4934 + }, + { + "epoch": 0.9494664061325718, + "grad_norm": 0.29249274691955834, + "learning_rate": 1.3504058890844273e-06, + "loss": 1.0959, + "step": 4935 + }, + { + "epoch": 0.9496588005411093, + "grad_norm": 0.28275085751362944, + "learning_rate": 1.3402177869139886e-06, + "loss": 1.0508, + "step": 4936 + }, + { + "epoch": 0.9498511949496468, + "grad_norm": 0.3054640897212883, + "learning_rate": 1.3300680030661095e-06, + "loss": 1.0043, + "step": 4937 + }, + { + "epoch": 0.9500435893581842, + "grad_norm": 0.30570198243457886, + "learning_rate": 1.3199565414828364e-06, + "loss": 1.0938, + "step": 4938 + }, + { + "epoch": 0.9502359837667218, + "grad_norm": 0.37211481598858215, + "learning_rate": 1.309883406091361e-06, + "loss": 0.9454, + "step": 4939 + }, + { + "epoch": 0.9504283781752593, + "grad_norm": 0.3242052176726218, + "learning_rate": 1.2998486008039545e-06, + "loss": 1.0366, + "step": 4940 + }, + { + "epoch": 0.9506207725837967, + "grad_norm": 0.2985978591366246, + "learning_rate": 1.2898521295180322e-06, + "loss": 1.0992, + "step": 4941 + }, + { + "epoch": 0.9508131669923343, + "grad_norm": 0.29500611801476373, + "learning_rate": 1.2798939961161215e-06, + "loss": 1.0179, + "step": 4942 + }, + { + "epoch": 0.9510055614008718, + "grad_norm": 0.2729088391853405, + "learning_rate": 1.269974204465818e-06, + "loss": 0.9948, + "step": 4943 + }, + { + "epoch": 0.9511979558094092, + "grad_norm": 0.3050495645978466, + "learning_rate": 1.260092758419862e-06, + "loss": 1.0503, + "step": 4944 + }, + { + "epoch": 0.9513903502179468, + "grad_norm": 0.3945562672310988, + "learning_rate": 1.2502496618161164e-06, + "loss": 0.9892, + "step": 4945 + }, + { + "epoch": 0.9515827446264843, + "grad_norm": 0.34853183191628206, + "learning_rate": 1.2404449184774902e-06, + "loss": 1.1257, + "step": 4946 + }, + { + "epoch": 0.9517751390350218, + "grad_norm": 0.3467851955053795, + "learning_rate": 1.2306785322120595e-06, + "loss": 1.0521, + "step": 4947 + }, + { + "epoch": 0.9519675334435593, + "grad_norm": 0.3058773332816911, + "learning_rate": 1.220950506812968e-06, + "loss": 1.0327, + "step": 4948 + }, + { + "epoch": 0.9521599278520968, + "grad_norm": 0.30765152112549815, + "learning_rate": 1.2112608460584707e-06, + "loss": 1.0455, + "step": 4949 + }, + { + "epoch": 0.9523523222606343, + "grad_norm": 0.5386394603532704, + "learning_rate": 1.2016095537119243e-06, + "loss": 1.0611, + "step": 4950 + }, + { + "epoch": 0.9525447166691718, + "grad_norm": 0.366068053972542, + "learning_rate": 1.1919966335217636e-06, + "loss": 0.8987, + "step": 4951 + }, + { + "epoch": 0.9527371110777093, + "grad_norm": 0.3007287526569373, + "learning_rate": 1.1824220892215464e-06, + "loss": 1.061, + "step": 4952 + }, + { + "epoch": 0.9529295054862468, + "grad_norm": 0.38681754054034423, + "learning_rate": 1.1728859245299205e-06, + "loss": 0.9447, + "step": 4953 + }, + { + "epoch": 0.9531218998947844, + "grad_norm": 0.295780045648606, + "learning_rate": 1.1633881431506122e-06, + "loss": 0.9852, + "step": 4954 + }, + { + "epoch": 0.9533142943033218, + "grad_norm": 0.3220803327742914, + "learning_rate": 1.1539287487724593e-06, + "loss": 1.0308, + "step": 4955 + }, + { + "epoch": 0.9535066887118593, + "grad_norm": 0.3312860254546172, + "learning_rate": 1.1445077450693786e-06, + "loss": 1.0285, + "step": 4956 + }, + { + "epoch": 0.9536990831203969, + "grad_norm": 0.42106776361564996, + "learning_rate": 1.1351251357003655e-06, + "loss": 0.9361, + "step": 4957 + }, + { + "epoch": 0.9538914775289343, + "grad_norm": 0.32889789776165806, + "learning_rate": 1.1257809243095386e-06, + "loss": 0.9823, + "step": 4958 + }, + { + "epoch": 0.9540838719374718, + "grad_norm": 0.39465940212204376, + "learning_rate": 1.1164751145260722e-06, + "loss": 1.0534, + "step": 4959 + }, + { + "epoch": 0.9542762663460094, + "grad_norm": 0.3986728447282168, + "learning_rate": 1.1072077099642418e-06, + "loss": 1.05, + "step": 4960 + }, + { + "epoch": 0.9544686607545468, + "grad_norm": 0.32610161967454426, + "learning_rate": 1.097978714223391e-06, + "loss": 0.9867, + "step": 4961 + }, + { + "epoch": 0.9546610551630843, + "grad_norm": 0.3264805381065678, + "learning_rate": 1.0887881308879633e-06, + "loss": 1.0906, + "step": 4962 + }, + { + "epoch": 0.9548534495716218, + "grad_norm": 0.31076733448210864, + "learning_rate": 1.07963596352747e-06, + "loss": 1.075, + "step": 4963 + }, + { + "epoch": 0.9550458439801593, + "grad_norm": 0.3190122657108563, + "learning_rate": 1.0705222156965012e-06, + "loss": 1.0424, + "step": 4964 + }, + { + "epoch": 0.9552382383886968, + "grad_norm": 0.30517956610751173, + "learning_rate": 1.0614468909347474e-06, + "loss": 0.9354, + "step": 4965 + }, + { + "epoch": 0.9554306327972343, + "grad_norm": 0.3692653648118493, + "learning_rate": 1.0524099927669563e-06, + "loss": 1.0236, + "step": 4966 + }, + { + "epoch": 0.9556230272057719, + "grad_norm": 0.296500174680699, + "learning_rate": 1.043411524702942e-06, + "loss": 1.0736, + "step": 4967 + }, + { + "epoch": 0.9558154216143093, + "grad_norm": 0.32263885549526844, + "learning_rate": 1.03445149023762e-06, + "loss": 1.0013, + "step": 4968 + }, + { + "epoch": 0.9560078160228468, + "grad_norm": 0.3705868130695738, + "learning_rate": 1.0255298928509627e-06, + "loss": 1.109, + "step": 4969 + }, + { + "epoch": 0.9562002104313844, + "grad_norm": 0.3052829339008226, + "learning_rate": 1.016646736007998e-06, + "loss": 1.0115, + "step": 4970 + }, + { + "epoch": 0.9563926048399218, + "grad_norm": 0.3362393744463195, + "learning_rate": 1.0078020231588768e-06, + "loss": 0.9704, + "step": 4971 + }, + { + "epoch": 0.9565849992484593, + "grad_norm": 0.31742447278834984, + "learning_rate": 9.989957577387521e-07, + "loss": 1.0067, + "step": 4972 + }, + { + "epoch": 0.9567773936569969, + "grad_norm": 0.3048006284582907, + "learning_rate": 9.902279431678873e-07, + "loss": 1.0663, + "step": 4973 + }, + { + "epoch": 0.9569697880655343, + "grad_norm": 0.40837297051339433, + "learning_rate": 9.814985828516033e-07, + "loss": 1.0326, + "step": 4974 + }, + { + "epoch": 0.9571621824740718, + "grad_norm": 0.28414315019383085, + "learning_rate": 9.728076801802655e-07, + "loss": 1.0578, + "step": 4975 + }, + { + "epoch": 0.9573545768826094, + "grad_norm": 0.3394737525573108, + "learning_rate": 9.641552385293518e-07, + "loss": 0.9949, + "step": 4976 + }, + { + "epoch": 0.9575469712911469, + "grad_norm": 0.37620313313250575, + "learning_rate": 9.555412612593518e-07, + "loss": 1.0158, + "step": 4977 + }, + { + "epoch": 0.9577393656996843, + "grad_norm": 0.3284786956023174, + "learning_rate": 9.469657517158226e-07, + "loss": 1.028, + "step": 4978 + }, + { + "epoch": 0.9579317601082219, + "grad_norm": 0.32029102021052164, + "learning_rate": 9.384287132294223e-07, + "loss": 0.9752, + "step": 4979 + }, + { + "epoch": 0.9581241545167594, + "grad_norm": 0.2817345331905995, + "learning_rate": 9.299301491158207e-07, + "loss": 1.0367, + "step": 4980 + }, + { + "epoch": 0.9583165489252968, + "grad_norm": 0.36074091813494247, + "learning_rate": 9.214700626757666e-07, + "loss": 1.034, + "step": 4981 + }, + { + "epoch": 0.9585089433338344, + "grad_norm": 0.3045402123270288, + "learning_rate": 9.130484571950537e-07, + "loss": 1.0372, + "step": 4982 + }, + { + "epoch": 0.9587013377423719, + "grad_norm": 0.2874965714266688, + "learning_rate": 9.046653359445323e-07, + "loss": 1.0093, + "step": 4983 + }, + { + "epoch": 0.9588937321509093, + "grad_norm": 0.28719417590798496, + "learning_rate": 8.963207021801423e-07, + "loss": 1.0915, + "step": 4984 + }, + { + "epoch": 0.9590861265594469, + "grad_norm": 0.3894590181618534, + "learning_rate": 8.880145591428024e-07, + "loss": 0.9573, + "step": 4985 + }, + { + "epoch": 0.9592785209679844, + "grad_norm": 0.36760468451037237, + "learning_rate": 8.797469100585431e-07, + "loss": 0.9966, + "step": 4986 + }, + { + "epoch": 0.9594709153765218, + "grad_norm": 0.34228324657728326, + "learning_rate": 8.715177581384182e-07, + "loss": 1.0008, + "step": 4987 + }, + { + "epoch": 0.9596633097850594, + "grad_norm": 0.5079965020003488, + "learning_rate": 8.633271065785486e-07, + "loss": 1.0156, + "step": 4988 + }, + { + "epoch": 0.9598557041935969, + "grad_norm": 0.32560821376502563, + "learning_rate": 8.551749585600677e-07, + "loss": 1.0717, + "step": 4989 + }, + { + "epoch": 0.9600480986021344, + "grad_norm": 0.36258177524586555, + "learning_rate": 8.470613172491981e-07, + "loss": 1.1304, + "step": 4990 + }, + { + "epoch": 0.9602404930106718, + "grad_norm": 0.34090086472935294, + "learning_rate": 8.389861857971748e-07, + "loss": 1.1374, + "step": 4991 + }, + { + "epoch": 0.9604328874192094, + "grad_norm": 0.41406228945182855, + "learning_rate": 8.309495673402779e-07, + "loss": 1.13, + "step": 4992 + }, + { + "epoch": 0.9606252818277469, + "grad_norm": 0.31467940982811504, + "learning_rate": 8.229514649998437e-07, + "loss": 1.0519, + "step": 4993 + }, + { + "epoch": 0.9608176762362843, + "grad_norm": 0.3936063919661145, + "learning_rate": 8.149918818822433e-07, + "loss": 0.9858, + "step": 4994 + }, + { + "epoch": 0.9610100706448219, + "grad_norm": 0.30032133813124073, + "learning_rate": 8.070708210788924e-07, + "loss": 1.0487, + "step": 4995 + }, + { + "epoch": 0.9612024650533594, + "grad_norm": 0.2545971837000099, + "learning_rate": 7.991882856662302e-07, + "loss": 1.0984, + "step": 4996 + }, + { + "epoch": 0.9613948594618968, + "grad_norm": 0.3253419585783581, + "learning_rate": 7.913442787057523e-07, + "loss": 1.0599, + "step": 4997 + }, + { + "epoch": 0.9615872538704344, + "grad_norm": 0.37692848928140704, + "learning_rate": 7.835388032439661e-07, + "loss": 0.9417, + "step": 4998 + }, + { + "epoch": 0.9617796482789719, + "grad_norm": 0.3228267566390857, + "learning_rate": 7.757718623124466e-07, + "loss": 0.9511, + "step": 4999 + }, + { + "epoch": 0.9619720426875094, + "grad_norm": 0.3952488429491064, + "learning_rate": 7.680434589277697e-07, + "loss": 1.0789, + "step": 5000 + }, + { + "epoch": 0.9621644370960469, + "grad_norm": 0.30622710527485747, + "learning_rate": 7.603535960915675e-07, + "loss": 1.0395, + "step": 5001 + }, + { + "epoch": 0.9623568315045844, + "grad_norm": 0.38342706353059464, + "learning_rate": 7.527022767904956e-07, + "loss": 1.0077, + "step": 5002 + }, + { + "epoch": 0.9625492259131219, + "grad_norm": 0.3395337716599965, + "learning_rate": 7.450895039962214e-07, + "loss": 1.0927, + "step": 5003 + }, + { + "epoch": 0.9627416203216594, + "grad_norm": 0.36232484857569147, + "learning_rate": 7.375152806654683e-07, + "loss": 1.0387, + "step": 5004 + }, + { + "epoch": 0.9629340147301969, + "grad_norm": 0.36656435099597173, + "learning_rate": 7.299796097399947e-07, + "loss": 1.067, + "step": 5005 + }, + { + "epoch": 0.9631264091387344, + "grad_norm": 0.29315110436382213, + "learning_rate": 7.224824941465369e-07, + "loss": 1.0228, + "step": 5006 + }, + { + "epoch": 0.963318803547272, + "grad_norm": 0.43515984259373935, + "learning_rate": 7.150239367969102e-07, + "loss": 0.9789, + "step": 5007 + }, + { + "epoch": 0.9635111979558094, + "grad_norm": 0.3110854369609877, + "learning_rate": 7.076039405879309e-07, + "loss": 1.0924, + "step": 5008 + }, + { + "epoch": 0.9637035923643469, + "grad_norm": 0.28387957421165444, + "learning_rate": 7.002225084014269e-07, + "loss": 1.0357, + "step": 5009 + }, + { + "epoch": 0.9638959867728845, + "grad_norm": 0.29887345785884456, + "learning_rate": 6.928796431042717e-07, + "loss": 1.0634, + "step": 5010 + }, + { + "epoch": 0.9640883811814219, + "grad_norm": 0.3141623923742202, + "learning_rate": 6.855753475483506e-07, + "loss": 0.9347, + "step": 5011 + }, + { + "epoch": 0.9642807755899594, + "grad_norm": 0.33119300666646084, + "learning_rate": 6.783096245705611e-07, + "loss": 1.0173, + "step": 5012 + }, + { + "epoch": 0.964473169998497, + "grad_norm": 0.4166242696396946, + "learning_rate": 6.710824769928348e-07, + "loss": 1.0347, + "step": 5013 + }, + { + "epoch": 0.9646655644070344, + "grad_norm": 0.33297074336934146, + "learning_rate": 6.63893907622104e-07, + "loss": 1.1196, + "step": 5014 + }, + { + "epoch": 0.9648579588155719, + "grad_norm": 0.29337790425361476, + "learning_rate": 6.567439192503245e-07, + "loss": 0.9234, + "step": 5015 + }, + { + "epoch": 0.9650503532241094, + "grad_norm": 0.42082504983687435, + "learning_rate": 6.496325146544746e-07, + "loss": 1.0825, + "step": 5016 + }, + { + "epoch": 0.965242747632647, + "grad_norm": 0.3632439017386509, + "learning_rate": 6.425596965965452e-07, + "loss": 1.0366, + "step": 5017 + }, + { + "epoch": 0.9654351420411844, + "grad_norm": 0.3082693157834947, + "learning_rate": 6.35525467823539e-07, + "loss": 1.1006, + "step": 5018 + }, + { + "epoch": 0.9656275364497219, + "grad_norm": 0.31182322007077123, + "learning_rate": 6.285298310674703e-07, + "loss": 1.0385, + "step": 5019 + }, + { + "epoch": 0.9658199308582595, + "grad_norm": 0.3423418253615423, + "learning_rate": 6.215727890453438e-07, + "loss": 1.0634, + "step": 5020 + }, + { + "epoch": 0.9660123252667969, + "grad_norm": 0.31689270309642814, + "learning_rate": 6.14654344459209e-07, + "loss": 1.0528, + "step": 5021 + }, + { + "epoch": 0.9662047196753344, + "grad_norm": 0.40301768880378613, + "learning_rate": 6.077744999961165e-07, + "loss": 1.0881, + "step": 5022 + }, + { + "epoch": 0.966397114083872, + "grad_norm": 0.31492110205436796, + "learning_rate": 6.009332583281069e-07, + "loss": 1.0787, + "step": 5023 + }, + { + "epoch": 0.9665895084924094, + "grad_norm": 0.4023107389473161, + "learning_rate": 5.941306221122545e-07, + "loss": 1.075, + "step": 5024 + }, + { + "epoch": 0.9667819029009469, + "grad_norm": 0.30721715303343167, + "learning_rate": 5.873665939906015e-07, + "loss": 1.0737, + "step": 5025 + }, + { + "epoch": 0.9669742973094845, + "grad_norm": 0.29014115403983065, + "learning_rate": 5.806411765902353e-07, + "loss": 0.9845, + "step": 5026 + }, + { + "epoch": 0.9671666917180219, + "grad_norm": 0.26137856671050164, + "learning_rate": 5.739543725232332e-07, + "loss": 1.0461, + "step": 5027 + }, + { + "epoch": 0.9673590861265594, + "grad_norm": 0.2759912417552023, + "learning_rate": 5.673061843866623e-07, + "loss": 1.0338, + "step": 5028 + }, + { + "epoch": 0.967551480535097, + "grad_norm": 0.2845290218412168, + "learning_rate": 5.606966147626124e-07, + "loss": 1.1266, + "step": 5029 + }, + { + "epoch": 0.9677438749436345, + "grad_norm": 0.3496918140083736, + "learning_rate": 5.541256662181526e-07, + "loss": 1.0113, + "step": 5030 + }, + { + "epoch": 0.9679362693521719, + "grad_norm": 0.32659747761414937, + "learning_rate": 5.475933413053636e-07, + "loss": 1.0333, + "step": 5031 + }, + { + "epoch": 0.9681286637607095, + "grad_norm": 0.3675553389359608, + "learning_rate": 5.410996425613379e-07, + "loss": 1.0833, + "step": 5032 + }, + { + "epoch": 0.968321058169247, + "grad_norm": 0.4808990978514393, + "learning_rate": 5.346445725081473e-07, + "loss": 0.914, + "step": 5033 + }, + { + "epoch": 0.9685134525777844, + "grad_norm": 0.35795587440588744, + "learning_rate": 5.282281336528638e-07, + "loss": 0.9903, + "step": 5034 + }, + { + "epoch": 0.968705846986322, + "grad_norm": 0.35282334183642, + "learning_rate": 5.218503284875609e-07, + "loss": 1.0208, + "step": 5035 + }, + { + "epoch": 0.9688982413948595, + "grad_norm": 0.2902959575167372, + "learning_rate": 5.155111594893014e-07, + "loss": 0.9751, + "step": 5036 + }, + { + "epoch": 0.9690906358033969, + "grad_norm": 0.3599339035019339, + "learning_rate": 5.092106291201604e-07, + "loss": 1.099, + "step": 5037 + }, + { + "epoch": 0.9692830302119345, + "grad_norm": 0.3438592152853123, + "learning_rate": 5.029487398271692e-07, + "loss": 1.0155, + "step": 5038 + }, + { + "epoch": 0.969475424620472, + "grad_norm": 0.2727622230106225, + "learning_rate": 4.967254940423715e-07, + "loss": 1.0899, + "step": 5039 + }, + { + "epoch": 0.9696678190290094, + "grad_norm": 0.3187102551863971, + "learning_rate": 4.905408941828338e-07, + "loss": 1.0093, + "step": 5040 + }, + { + "epoch": 0.969860213437547, + "grad_norm": 0.27321792094726255, + "learning_rate": 4.843949426505567e-07, + "loss": 0.9545, + "step": 5041 + }, + { + "epoch": 0.9700526078460845, + "grad_norm": 0.45757931008923125, + "learning_rate": 4.782876418325755e-07, + "loss": 1.0078, + "step": 5042 + }, + { + "epoch": 0.970245002254622, + "grad_norm": 0.33484521601404077, + "learning_rate": 4.7221899410087033e-07, + "loss": 1.0748, + "step": 5043 + }, + { + "epoch": 0.9704373966631594, + "grad_norm": 0.2762389527650955, + "learning_rate": 4.6618900181245595e-07, + "loss": 1.1174, + "step": 5044 + }, + { + "epoch": 0.970629791071697, + "grad_norm": 0.3223238593624723, + "learning_rate": 4.6019766730930336e-07, + "loss": 1.032, + "step": 5045 + }, + { + "epoch": 0.9708221854802345, + "grad_norm": 0.33345687239029953, + "learning_rate": 4.5424499291838454e-07, + "loss": 1.0953, + "step": 5046 + }, + { + "epoch": 0.9710145798887719, + "grad_norm": 0.30811885961325575, + "learning_rate": 4.4833098095165004e-07, + "loss": 1.0191, + "step": 5047 + }, + { + "epoch": 0.9712069742973095, + "grad_norm": 0.3169431005266869, + "learning_rate": 4.4245563370601815e-07, + "loss": 1.0009, + "step": 5048 + }, + { + "epoch": 0.971399368705847, + "grad_norm": 0.31354157329219307, + "learning_rate": 4.366189534634191e-07, + "loss": 0.9344, + "step": 5049 + }, + { + "epoch": 0.9715917631143844, + "grad_norm": 0.31487998402505285, + "learning_rate": 4.308209424907506e-07, + "loss": 1.1079, + "step": 5050 + }, + { + "epoch": 0.971784157522922, + "grad_norm": 0.28691385148451876, + "learning_rate": 4.250616030399002e-07, + "loss": 1.083, + "step": 5051 + }, + { + "epoch": 0.9719765519314595, + "grad_norm": 0.3350178958167011, + "learning_rate": 4.1934093734771195e-07, + "loss": 0.9912, + "step": 5052 + }, + { + "epoch": 0.972168946339997, + "grad_norm": 0.37849520683837934, + "learning_rate": 4.1365894763604197e-07, + "loss": 0.9879, + "step": 5053 + }, + { + "epoch": 0.9723613407485345, + "grad_norm": 0.37069367516727025, + "learning_rate": 4.080156361117027e-07, + "loss": 1.0408, + "step": 5054 + }, + { + "epoch": 0.972553735157072, + "grad_norm": 0.3137751182780175, + "learning_rate": 4.024110049664853e-07, + "loss": 1.0364, + "step": 5055 + }, + { + "epoch": 0.9727461295656095, + "grad_norm": 0.29747626355891726, + "learning_rate": 3.9684505637718194e-07, + "loss": 1.0502, + "step": 5056 + }, + { + "epoch": 0.972938523974147, + "grad_norm": 0.3152025452518086, + "learning_rate": 3.913177925055189e-07, + "loss": 1.07, + "step": 5057 + }, + { + "epoch": 0.9731309183826845, + "grad_norm": 0.33432316652220423, + "learning_rate": 3.858292154982457e-07, + "loss": 0.9501, + "step": 5058 + }, + { + "epoch": 0.973323312791222, + "grad_norm": 0.310274214150185, + "learning_rate": 3.8037932748704597e-07, + "loss": 1.0468, + "step": 5059 + }, + { + "epoch": 0.9735157071997596, + "grad_norm": 0.27870253609330775, + "learning_rate": 3.7496813058859326e-07, + "loss": 1.0257, + "step": 5060 + }, + { + "epoch": 0.973708101608297, + "grad_norm": 0.41910230664417786, + "learning_rate": 3.6959562690455087e-07, + "loss": 1.1828, + "step": 5061 + }, + { + "epoch": 0.9739004960168345, + "grad_norm": 0.303718678540597, + "learning_rate": 3.642618185215163e-07, + "loss": 1.0768, + "step": 5062 + }, + { + "epoch": 0.9740928904253721, + "grad_norm": 0.3351723168140292, + "learning_rate": 3.5896670751109916e-07, + "loss": 1.1378, + "step": 5063 + }, + { + "epoch": 0.9742852848339095, + "grad_norm": 0.28292247744285975, + "learning_rate": 3.537102959298322e-07, + "loss": 0.9996, + "step": 5064 + }, + { + "epoch": 0.974477679242447, + "grad_norm": 0.29316251503126217, + "learning_rate": 3.484925858192822e-07, + "loss": 0.9714, + "step": 5065 + }, + { + "epoch": 0.9746700736509846, + "grad_norm": 0.3365946720449322, + "learning_rate": 3.4331357920591724e-07, + "loss": 1.0325, + "step": 5066 + }, + { + "epoch": 0.974862468059522, + "grad_norm": 0.26037443573801905, + "learning_rate": 3.38173278101217e-07, + "loss": 0.9809, + "step": 5067 + }, + { + "epoch": 0.9750548624680595, + "grad_norm": 0.3373501754356196, + "learning_rate": 3.3307168450160685e-07, + "loss": 1.0276, + "step": 5068 + }, + { + "epoch": 0.975247256876597, + "grad_norm": 0.34352897012496336, + "learning_rate": 3.280088003885018e-07, + "loss": 1.0656, + "step": 5069 + }, + { + "epoch": 0.9754396512851345, + "grad_norm": 0.42857668201851623, + "learning_rate": 3.2298462772825113e-07, + "loss": 0.9532, + "step": 5070 + }, + { + "epoch": 0.975632045693672, + "grad_norm": 0.42496313488765447, + "learning_rate": 3.17999168472205e-07, + "loss": 0.9518, + "step": 5071 + }, + { + "epoch": 0.9758244401022095, + "grad_norm": 0.2899132290117504, + "learning_rate": 3.130524245566369e-07, + "loss": 0.972, + "step": 5072 + }, + { + "epoch": 0.9760168345107471, + "grad_norm": 0.3431746855107751, + "learning_rate": 3.081443979028098e-07, + "loss": 1.0186, + "step": 5073 + }, + { + "epoch": 0.9762092289192845, + "grad_norm": 0.2804886758532411, + "learning_rate": 3.032750904169546e-07, + "loss": 1.0124, + "step": 5074 + }, + { + "epoch": 0.976401623327822, + "grad_norm": 0.3399834976319933, + "learning_rate": 2.9844450399024726e-07, + "loss": 1.1384, + "step": 5075 + }, + { + "epoch": 0.9765940177363596, + "grad_norm": 0.2830378097148043, + "learning_rate": 2.9365264049884267e-07, + "loss": 1.0306, + "step": 5076 + }, + { + "epoch": 0.976786412144897, + "grad_norm": 0.3123175290278857, + "learning_rate": 2.8889950180382986e-07, + "loss": 0.9695, + "step": 5077 + }, + { + "epoch": 0.9769788065534345, + "grad_norm": 0.33577519483208257, + "learning_rate": 2.8418508975127654e-07, + "loss": 1.0596, + "step": 5078 + }, + { + "epoch": 0.9771712009619721, + "grad_norm": 0.307192702623424, + "learning_rate": 2.795094061722181e-07, + "loss": 1.0036, + "step": 5079 + }, + { + "epoch": 0.9773635953705095, + "grad_norm": 0.4440475783331159, + "learning_rate": 2.7487245288261296e-07, + "loss": 1.1049, + "step": 5080 + }, + { + "epoch": 0.977555989779047, + "grad_norm": 0.3156220079680518, + "learning_rate": 2.702742316834206e-07, + "loss": 1.0217, + "step": 5081 + }, + { + "epoch": 0.9777483841875846, + "grad_norm": 0.42724938870948, + "learning_rate": 2.657147443605457e-07, + "loss": 1.095, + "step": 5082 + }, + { + "epoch": 0.977940778596122, + "grad_norm": 0.2819177191675086, + "learning_rate": 2.6119399268480507e-07, + "loss": 0.9718, + "step": 5083 + }, + { + "epoch": 0.9781331730046595, + "grad_norm": 0.2867413296030326, + "learning_rate": 2.5671197841203865e-07, + "loss": 1.017, + "step": 5084 + }, + { + "epoch": 0.9783255674131971, + "grad_norm": 0.3131370832798454, + "learning_rate": 2.5226870328299845e-07, + "loss": 0.9642, + "step": 5085 + }, + { + "epoch": 0.9785179618217346, + "grad_norm": 0.284876663165116, + "learning_rate": 2.478641690233929e-07, + "loss": 0.9948, + "step": 5086 + }, + { + "epoch": 0.978710356230272, + "grad_norm": 0.16414187797845742, + "learning_rate": 2.434983773439092e-07, + "loss": 1.0587, + "step": 5087 + }, + { + "epoch": 0.9789027506388096, + "grad_norm": 0.3792642984444722, + "learning_rate": 2.3917132994016877e-07, + "loss": 1.0413, + "step": 5088 + }, + { + "epoch": 0.9790951450473471, + "grad_norm": 0.3166014315663511, + "learning_rate": 2.3488302849272724e-07, + "loss": 1.0379, + "step": 5089 + }, + { + "epoch": 0.9792875394558845, + "grad_norm": 0.2807327494227699, + "learning_rate": 2.3063347466713013e-07, + "loss": 0.982, + "step": 5090 + }, + { + "epoch": 0.9794799338644221, + "grad_norm": 0.29118100069509606, + "learning_rate": 2.264226701138461e-07, + "loss": 0.95, + "step": 5091 + }, + { + "epoch": 0.9796723282729596, + "grad_norm": 0.34729023109741464, + "learning_rate": 2.2225061646830025e-07, + "loss": 1.0836, + "step": 5092 + }, + { + "epoch": 0.979864722681497, + "grad_norm": 0.29257850627314713, + "learning_rate": 2.181173153508853e-07, + "loss": 1.0233, + "step": 5093 + }, + { + "epoch": 0.9800571170900346, + "grad_norm": 0.3959917265023297, + "learning_rate": 2.1402276836691714e-07, + "loss": 0.992, + "step": 5094 + }, + { + "epoch": 0.9802495114985721, + "grad_norm": 0.2590900735118314, + "learning_rate": 2.0996697710666812e-07, + "loss": 1.097, + "step": 5095 + }, + { + "epoch": 0.9804419059071096, + "grad_norm": 0.3654215070810998, + "learning_rate": 2.0594994314536707e-07, + "loss": 0.9906, + "step": 5096 + }, + { + "epoch": 0.980634300315647, + "grad_norm": 0.3604543846432549, + "learning_rate": 2.0197166804317712e-07, + "loss": 1.0263, + "step": 5097 + }, + { + "epoch": 0.9808266947241846, + "grad_norm": 0.3052356194126838, + "learning_rate": 1.9803215334522896e-07, + "loss": 0.9838, + "step": 5098 + }, + { + "epoch": 0.9810190891327221, + "grad_norm": 0.331316123888874, + "learning_rate": 1.941314005815653e-07, + "loss": 1.1548, + "step": 5099 + }, + { + "epoch": 0.9812114835412595, + "grad_norm": 0.29670341958253454, + "learning_rate": 1.9026941126721875e-07, + "loss": 0.9693, + "step": 5100 + }, + { + "epoch": 0.9814038779497971, + "grad_norm": 0.3153939739119349, + "learning_rate": 1.8644618690211168e-07, + "loss": 0.9661, + "step": 5101 + }, + { + "epoch": 0.9815962723583346, + "grad_norm": 0.3161423649297131, + "learning_rate": 1.826617289711563e-07, + "loss": 0.9762, + "step": 5102 + }, + { + "epoch": 0.981788666766872, + "grad_norm": 0.2851548919511647, + "learning_rate": 1.7891603894418796e-07, + "loss": 1.0781, + "step": 5103 + }, + { + "epoch": 0.9819810611754096, + "grad_norm": 0.3695966861691063, + "learning_rate": 1.7520911827598742e-07, + "loss": 1.0483, + "step": 5104 + }, + { + "epoch": 0.9821734555839471, + "grad_norm": 0.3364653546491064, + "learning_rate": 1.7154096840629185e-07, + "loss": 1.0092, + "step": 5105 + }, + { + "epoch": 0.9823658499924846, + "grad_norm": 0.2859723773642625, + "learning_rate": 1.679115907597617e-07, + "loss": 1.0642, + "step": 5106 + }, + { + "epoch": 0.9825582444010221, + "grad_norm": 0.3627135038541458, + "learning_rate": 1.643209867460027e-07, + "loss": 1.0595, + "step": 5107 + }, + { + "epoch": 0.9827506388095596, + "grad_norm": 0.34671507806669843, + "learning_rate": 1.6076915775956604e-07, + "loss": 1.0372, + "step": 5108 + }, + { + "epoch": 0.9829430332180971, + "grad_norm": 0.37906382876933126, + "learning_rate": 1.5725610517994815e-07, + "loss": 1.0274, + "step": 5109 + }, + { + "epoch": 0.9831354276266346, + "grad_norm": 0.3896087181310295, + "learning_rate": 1.537818303715688e-07, + "loss": 1.0223, + "step": 5110 + }, + { + "epoch": 0.9833278220351721, + "grad_norm": 0.31701559870235946, + "learning_rate": 1.50346334683793e-07, + "loss": 1.0776, + "step": 5111 + }, + { + "epoch": 0.9835202164437096, + "grad_norm": 0.33106207329083365, + "learning_rate": 1.4694961945093122e-07, + "loss": 1.0536, + "step": 5112 + }, + { + "epoch": 0.9837126108522471, + "grad_norm": 0.30840765701709627, + "learning_rate": 1.4359168599223928e-07, + "loss": 0.9884, + "step": 5113 + }, + { + "epoch": 0.9839050052607846, + "grad_norm": 0.40430042383446857, + "learning_rate": 1.4027253561188502e-07, + "loss": 1.0541, + "step": 5114 + }, + { + "epoch": 0.9840973996693221, + "grad_norm": 0.3676395713742101, + "learning_rate": 1.3699216959899287e-07, + "loss": 1.0052, + "step": 5115 + }, + { + "epoch": 0.9842897940778597, + "grad_norm": 0.3718158973689816, + "learning_rate": 1.3375058922763252e-07, + "loss": 1.0406, + "step": 5116 + }, + { + "epoch": 0.9844821884863971, + "grad_norm": 0.30102185062051356, + "learning_rate": 1.3054779575677466e-07, + "loss": 1.0683, + "step": 5117 + }, + { + "epoch": 0.9846745828949346, + "grad_norm": 0.30723822686900915, + "learning_rate": 1.2738379043035764e-07, + "loss": 0.9981, + "step": 5118 + }, + { + "epoch": 0.9848669773034722, + "grad_norm": 0.37907539466338597, + "learning_rate": 1.2425857447725398e-07, + "loss": 1.0811, + "step": 5119 + }, + { + "epoch": 0.9850593717120096, + "grad_norm": 0.41688853701192846, + "learning_rate": 1.211721491112372e-07, + "loss": 1.0408, + "step": 5120 + }, + { + "epoch": 0.9852517661205471, + "grad_norm": 0.30608020355653576, + "learning_rate": 1.1812451553107062e-07, + "loss": 1.0056, + "step": 5121 + }, + { + "epoch": 0.9854441605290847, + "grad_norm": 0.3520025563703071, + "learning_rate": 1.1511567492038522e-07, + "loss": 1.0606, + "step": 5122 + }, + { + "epoch": 0.9856365549376221, + "grad_norm": 0.3068441943985922, + "learning_rate": 1.1214562844781285e-07, + "loss": 1.0366, + "step": 5123 + }, + { + "epoch": 0.9858289493461596, + "grad_norm": 0.3249513870562328, + "learning_rate": 1.0921437726686411e-07, + "loss": 1.0075, + "step": 5124 + }, + { + "epoch": 0.9860213437546971, + "grad_norm": 0.3626847659538625, + "learning_rate": 1.0632192251601725e-07, + "loss": 1.0602, + "step": 5125 + }, + { + "epoch": 0.9862137381632347, + "grad_norm": 0.30729084202484547, + "learning_rate": 1.0346826531865139e-07, + "loss": 0.966, + "step": 5126 + }, + { + "epoch": 0.9864061325717721, + "grad_norm": 0.3507531533210151, + "learning_rate": 1.006534067831022e-07, + "loss": 1.0929, + "step": 5127 + }, + { + "epoch": 0.9865985269803096, + "grad_norm": 0.40352441135005895, + "learning_rate": 9.78773480026396e-08, + "loss": 1.0611, + "step": 5128 + }, + { + "epoch": 0.9867909213888472, + "grad_norm": 0.31468960125532663, + "learning_rate": 9.514009005543445e-08, + "loss": 0.9624, + "step": 5129 + }, + { + "epoch": 0.9869833157973846, + "grad_norm": 0.3270197500831794, + "learning_rate": 9.244163400462525e-08, + "loss": 1.0326, + "step": 5130 + }, + { + "epoch": 0.9871757102059221, + "grad_norm": 0.29821584705824833, + "learning_rate": 8.978198089824031e-08, + "loss": 1.0625, + "step": 5131 + }, + { + "epoch": 0.9873681046144597, + "grad_norm": 0.36897523605551197, + "learning_rate": 8.716113176927554e-08, + "loss": 1.1163, + "step": 5132 + }, + { + "epoch": 0.9875604990229971, + "grad_norm": 0.32400594888563705, + "learning_rate": 8.457908763562783e-08, + "loss": 1.1224, + "step": 5133 + }, + { + "epoch": 0.9877528934315346, + "grad_norm": 0.3092625063397082, + "learning_rate": 8.203584950013943e-08, + "loss": 1.0871, + "step": 5134 + }, + { + "epoch": 0.9879452878400722, + "grad_norm": 0.2826061850679332, + "learning_rate": 7.953141835057576e-08, + "loss": 1.0583, + "step": 5135 + }, + { + "epoch": 0.9881376822486097, + "grad_norm": 0.30800872364108206, + "learning_rate": 7.706579515962542e-08, + "loss": 1.0506, + "step": 5136 + }, + { + "epoch": 0.9883300766571471, + "grad_norm": 0.27389570162999394, + "learning_rate": 7.463898088490018e-08, + "loss": 1.0334, + "step": 5137 + }, + { + "epoch": 0.9885224710656847, + "grad_norm": 0.294756176525652, + "learning_rate": 7.225097646895718e-08, + "loss": 1.022, + "step": 5138 + }, + { + "epoch": 0.9887148654742222, + "grad_norm": 0.34231691884217796, + "learning_rate": 6.990178283927674e-08, + "loss": 0.9789, + "step": 5139 + }, + { + "epoch": 0.9889072598827596, + "grad_norm": 0.26817788047365737, + "learning_rate": 6.759140090824013e-08, + "loss": 1.0641, + "step": 5140 + }, + { + "epoch": 0.9890996542912972, + "grad_norm": 0.25716053792488497, + "learning_rate": 6.53198315731851e-08, + "loss": 1.0427, + "step": 5141 + }, + { + "epoch": 0.9892920486998347, + "grad_norm": 0.32935953149064995, + "learning_rate": 6.308707571636153e-08, + "loss": 1.05, + "step": 5142 + }, + { + "epoch": 0.9894844431083721, + "grad_norm": 0.328380784771582, + "learning_rate": 6.089313420494236e-08, + "loss": 0.9967, + "step": 5143 + }, + { + "epoch": 0.9896768375169097, + "grad_norm": 0.37057095491843356, + "learning_rate": 5.87380078910349e-08, + "loss": 1.0385, + "step": 5144 + }, + { + "epoch": 0.9898692319254472, + "grad_norm": 0.3532213545283488, + "learning_rate": 5.6621697611658474e-08, + "loss": 1.0159, + "step": 5145 + }, + { + "epoch": 0.9900616263339846, + "grad_norm": 0.3264479235789096, + "learning_rate": 5.454420418877782e-08, + "loss": 1.0119, + "step": 5146 + }, + { + "epoch": 0.9902540207425222, + "grad_norm": 0.32391000736233166, + "learning_rate": 5.2505528429247494e-08, + "loss": 1.1292, + "step": 5147 + }, + { + "epoch": 0.9904464151510597, + "grad_norm": 0.40009508655518117, + "learning_rate": 5.0505671124878584e-08, + "loss": 1.1856, + "step": 5148 + }, + { + "epoch": 0.9906388095595972, + "grad_norm": 0.27838687609714285, + "learning_rate": 4.8544633052383106e-08, + "loss": 1.0858, + "step": 5149 + }, + { + "epoch": 0.9908312039681346, + "grad_norm": 0.3505331434814559, + "learning_rate": 4.662241497341846e-08, + "loss": 1.0012, + "step": 5150 + }, + { + "epoch": 0.9910235983766722, + "grad_norm": 0.3735866147332912, + "learning_rate": 4.473901763454302e-08, + "loss": 1.0039, + "step": 5151 + }, + { + "epoch": 0.9912159927852097, + "grad_norm": 0.35050639185913424, + "learning_rate": 4.289444176724944e-08, + "loss": 0.9755, + "step": 5152 + }, + { + "epoch": 0.9914083871937471, + "grad_norm": 0.4015214322041215, + "learning_rate": 4.108868808794242e-08, + "loss": 1.099, + "step": 5153 + }, + { + "epoch": 0.9916007816022847, + "grad_norm": 0.32490188732318753, + "learning_rate": 3.932175729797205e-08, + "loss": 1.0825, + "step": 5154 + }, + { + "epoch": 0.9917931760108222, + "grad_norm": 0.25621099687365334, + "learning_rate": 3.759365008357829e-08, + "loss": 0.9776, + "step": 5155 + }, + { + "epoch": 0.9919855704193596, + "grad_norm": 0.30612496529530275, + "learning_rate": 3.590436711594647e-08, + "loss": 1.0419, + "step": 5156 + }, + { + "epoch": 0.9921779648278972, + "grad_norm": 0.27549390219302733, + "learning_rate": 3.425390905117398e-08, + "loss": 0.9952, + "step": 5157 + }, + { + "epoch": 0.9923703592364347, + "grad_norm": 0.25044499402764225, + "learning_rate": 3.26422765302814e-08, + "loss": 0.9608, + "step": 5158 + }, + { + "epoch": 0.9925627536449722, + "grad_norm": 0.3473895293144665, + "learning_rate": 3.1069470179201364e-08, + "loss": 1.0073, + "step": 5159 + }, + { + "epoch": 0.9927551480535097, + "grad_norm": 0.2801863283558563, + "learning_rate": 2.9535490608789684e-08, + "loss": 0.992, + "step": 5160 + }, + { + "epoch": 0.9929475424620472, + "grad_norm": 0.28783072718150426, + "learning_rate": 2.8040338414847545e-08, + "loss": 1.0381, + "step": 5161 + }, + { + "epoch": 0.9931399368705847, + "grad_norm": 0.35476132654503884, + "learning_rate": 2.6584014178054895e-08, + "loss": 1.0589, + "step": 5162 + }, + { + "epoch": 0.9933323312791222, + "grad_norm": 0.36852576995944003, + "learning_rate": 2.516651846403706e-08, + "loss": 1.0105, + "step": 5163 + }, + { + "epoch": 0.9935247256876597, + "grad_norm": 0.32577321750325783, + "learning_rate": 2.378785182333143e-08, + "loss": 0.9502, + "step": 5164 + }, + { + "epoch": 0.9937171200961972, + "grad_norm": 0.28902393769938606, + "learning_rate": 2.2448014791398574e-08, + "loss": 1.0236, + "step": 5165 + }, + { + "epoch": 0.9939095145047347, + "grad_norm": 0.2782511558091309, + "learning_rate": 2.1147007888622228e-08, + "loss": 1.0375, + "step": 5166 + }, + { + "epoch": 0.9941019089132722, + "grad_norm": 0.33963338490072537, + "learning_rate": 1.9884831620287093e-08, + "loss": 1.0853, + "step": 5167 + }, + { + "epoch": 0.9942943033218097, + "grad_norm": 0.26228132825875633, + "learning_rate": 1.8661486476612145e-08, + "loss": 1.0217, + "step": 5168 + }, + { + "epoch": 0.9944866977303473, + "grad_norm": 0.3150664604179331, + "learning_rate": 1.747697293272843e-08, + "loss": 0.9769, + "step": 5169 + }, + { + "epoch": 0.9946790921388847, + "grad_norm": 0.27546539074040105, + "learning_rate": 1.633129144870127e-08, + "loss": 1.0444, + "step": 5170 + }, + { + "epoch": 0.9948714865474222, + "grad_norm": 0.31547501045666676, + "learning_rate": 1.522444246947474e-08, + "loss": 1.042, + "step": 5171 + }, + { + "epoch": 0.9950638809559598, + "grad_norm": 0.2797112311490469, + "learning_rate": 1.4156426424960512e-08, + "loss": 1.0165, + "step": 5172 + }, + { + "epoch": 0.9952562753644972, + "grad_norm": 0.42737115683387134, + "learning_rate": 1.3127243729949001e-08, + "loss": 0.9595, + "step": 5173 + }, + { + "epoch": 0.9954486697730347, + "grad_norm": 0.3362270391859536, + "learning_rate": 1.2136894784176011e-08, + "loss": 0.9807, + "step": 5174 + }, + { + "epoch": 0.9956410641815723, + "grad_norm": 0.29380800797610535, + "learning_rate": 1.1185379972256105e-08, + "loss": 0.9628, + "step": 5175 + }, + { + "epoch": 0.9958334585901097, + "grad_norm": 0.27825084019984586, + "learning_rate": 1.0272699663782526e-08, + "loss": 1.0753, + "step": 5176 + }, + { + "epoch": 0.9960258529986472, + "grad_norm": 0.3120808822918355, + "learning_rate": 9.398854213193974e-09, + "loss": 1.0719, + "step": 5177 + }, + { + "epoch": 0.9962182474071847, + "grad_norm": 0.303026423702011, + "learning_rate": 8.563843959907836e-09, + "loss": 1.0695, + "step": 5178 + }, + { + "epoch": 0.9964106418157223, + "grad_norm": 0.3408252513090465, + "learning_rate": 7.767669228231356e-09, + "loss": 1.043, + "step": 5179 + }, + { + "epoch": 0.9966030362242597, + "grad_norm": 0.2870483299653104, + "learning_rate": 7.01033032737275e-09, + "loss": 1.0582, + "step": 5180 + }, + { + "epoch": 0.9967954306327972, + "grad_norm": 0.32622364042112134, + "learning_rate": 6.291827551474505e-09, + "loss": 1.0088, + "step": 5181 + }, + { + "epoch": 0.9969878250413348, + "grad_norm": 0.4938456626228281, + "learning_rate": 5.6121611796133845e-09, + "loss": 0.9896, + "step": 5182 + }, + { + "epoch": 0.9971802194498722, + "grad_norm": 0.341240759710936, + "learning_rate": 4.971331475756014e-09, + "loss": 0.9955, + "step": 5183 + }, + { + "epoch": 0.9973726138584097, + "grad_norm": 0.3593343391171791, + "learning_rate": 4.369338688781088e-09, + "loss": 1.1007, + "step": 5184 + }, + { + "epoch": 0.9975650082669473, + "grad_norm": 0.2646884522334642, + "learning_rate": 3.806183052512679e-09, + "loss": 1.0633, + "step": 5185 + }, + { + "epoch": 0.9977574026754847, + "grad_norm": 0.38411537205633367, + "learning_rate": 3.2818647856647234e-09, + "loss": 1.1244, + "step": 5186 + }, + { + "epoch": 0.9979497970840222, + "grad_norm": 0.3403012202566673, + "learning_rate": 2.7963840918854288e-09, + "loss": 1.0468, + "step": 5187 + }, + { + "epoch": 0.9981421914925598, + "grad_norm": 0.30320713046567127, + "learning_rate": 2.3497411597128703e-09, + "loss": 1.0754, + "step": 5188 + }, + { + "epoch": 0.9983345859010972, + "grad_norm": 0.363354538467187, + "learning_rate": 1.9419361626416e-09, + "loss": 1.0282, + "step": 5189 + }, + { + "epoch": 0.9985269803096347, + "grad_norm": 0.2894238073581119, + "learning_rate": 1.5729692590338297e-09, + "loss": 1.022, + "step": 5190 + }, + { + "epoch": 0.9987193747181723, + "grad_norm": 0.3261138013320079, + "learning_rate": 1.2428405921971476e-09, + "loss": 1.0666, + "step": 5191 + }, + { + "epoch": 0.9989117691267098, + "grad_norm": 0.34898553867330984, + "learning_rate": 9.515502903734153e-10, + "loss": 1.0095, + "step": 5192 + }, + { + "epoch": 0.9991041635352472, + "grad_norm": 0.3063028761263027, + "learning_rate": 6.990984666610523e-10, + "loss": 1.0384, + "step": 5193 + }, + { + "epoch": 0.9992965579437848, + "grad_norm": 0.29267951087106403, + "learning_rate": 4.854852191371606e-10, + "loss": 1.0509, + "step": 5194 + }, + { + "epoch": 0.9994889523523223, + "grad_norm": 0.29660899633363924, + "learning_rate": 3.1071063075760466e-10, + "loss": 1.0022, + "step": 5195 + }, + { + "epoch": 0.9996813467608597, + "grad_norm": 0.28400778819313277, + "learning_rate": 1.7477476940142012e-10, + "loss": 0.979, + "step": 5196 + }, + { + "epoch": 0.9998737411693973, + "grad_norm": 0.44440155061503905, + "learning_rate": 7.767768785971185e-11, + "loss": 1.0221, + "step": 5197 + }, + { + "epoch": 0.9998737411693973, + "step": 5197, + "total_flos": 2841295497035776.0, + "train_loss": 0.08884279989888857, + "train_runtime": 6271.7197, + "train_samples_per_second": 106.079, + "train_steps_per_second": 0.829 + } + ], + "logging_steps": 1.0, + "max_steps": 5197, + "num_input_tokens_seen": 0, + "num_train_epochs": 1, + "save_steps": 250, + "total_flos": 2841295497035776.0, + "train_batch_size": 4, + "trial_name": null, + "trial_params": null +}