| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.999429549343982, |
| "eval_steps": 500, |
| "global_step": 2628, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0011409013120365088, |
| "grad_norm": 54.65760974624293, |
| "learning_rate": 1.9011406844106465e-07, |
| "loss": 11.0934, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0022818026240730175, |
| "grad_norm": 54.77744512464696, |
| "learning_rate": 3.802281368821293e-07, |
| "loss": 11.0737, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.0034227039361095267, |
| "grad_norm": 58.007163271645624, |
| "learning_rate": 5.70342205323194e-07, |
| "loss": 10.9735, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.004563605248146035, |
| "grad_norm": 55.77970400435833, |
| "learning_rate": 7.604562737642586e-07, |
| "loss": 10.9883, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.005704506560182544, |
| "grad_norm": 57.7425202963039, |
| "learning_rate": 9.505703422053232e-07, |
| "loss": 10.7121, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.0068454078722190535, |
| "grad_norm": 56.50330125597672, |
| "learning_rate": 1.140684410646388e-06, |
| "loss": 10.9614, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.007986309184255563, |
| "grad_norm": 54.6706710453405, |
| "learning_rate": 1.3307984790874525e-06, |
| "loss": 11.004, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.00912721049629207, |
| "grad_norm": 60.82691342176186, |
| "learning_rate": 1.5209125475285172e-06, |
| "loss": 10.7417, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.01026811180832858, |
| "grad_norm": 60.66911793811066, |
| "learning_rate": 1.711026615969582e-06, |
| "loss": 10.6853, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.011409013120365089, |
| "grad_norm": 62.943469991829076, |
| "learning_rate": 1.9011406844106463e-06, |
| "loss": 10.5299, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.012549914432401598, |
| "grad_norm": 87.13133317065872, |
| "learning_rate": 2.091254752851711e-06, |
| "loss": 9.2991, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.013690815744438107, |
| "grad_norm": 92.23580150407606, |
| "learning_rate": 2.281368821292776e-06, |
| "loss": 9.1733, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.014831717056474614, |
| "grad_norm": 98.2645411335883, |
| "learning_rate": 2.4714828897338406e-06, |
| "loss": 8.7304, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.015972618368511125, |
| "grad_norm": 102.02071115957615, |
| "learning_rate": 2.661596958174905e-06, |
| "loss": 8.5508, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.017113519680547633, |
| "grad_norm": 63.610623784408745, |
| "learning_rate": 2.8517110266159697e-06, |
| "loss": 3.6785, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.01825442099258414, |
| "grad_norm": 56.24211488085604, |
| "learning_rate": 3.0418250950570345e-06, |
| "loss": 3.4128, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.01939532230462065, |
| "grad_norm": 43.70694807512611, |
| "learning_rate": 3.2319391634980988e-06, |
| "loss": 2.8317, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.02053622361665716, |
| "grad_norm": 33.75445660517821, |
| "learning_rate": 3.422053231939164e-06, |
| "loss": 2.4295, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.02167712492869367, |
| "grad_norm": 25.615342965674877, |
| "learning_rate": 3.612167300380228e-06, |
| "loss": 2.1501, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.022818026240730177, |
| "grad_norm": 6.639036637383511, |
| "learning_rate": 3.8022813688212926e-06, |
| "loss": 1.3864, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.023958927552766685, |
| "grad_norm": 5.463351047298746, |
| "learning_rate": 3.992395437262358e-06, |
| "loss": 1.3348, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.025099828864803195, |
| "grad_norm": 4.486017466274795, |
| "learning_rate": 4.182509505703422e-06, |
| "loss": 1.277, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.026240730176839703, |
| "grad_norm": 3.403461105976068, |
| "learning_rate": 4.3726235741444865e-06, |
| "loss": 1.2209, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.027381631488876214, |
| "grad_norm": 2.7431919860295952, |
| "learning_rate": 4.562737642585552e-06, |
| "loss": 1.1256, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.02852253280091272, |
| "grad_norm": 2.1398348178705002, |
| "learning_rate": 4.752851711026616e-06, |
| "loss": 1.0774, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.02966343411294923, |
| "grad_norm": 1.6629279927743938, |
| "learning_rate": 4.942965779467681e-06, |
| "loss": 1.0002, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.03080433542498574, |
| "grad_norm": 5.096050154303005, |
| "learning_rate": 5.1330798479087455e-06, |
| "loss": 0.9648, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.03194523673702225, |
| "grad_norm": 20.80863576938441, |
| "learning_rate": 5.32319391634981e-06, |
| "loss": 0.9545, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.03308613804905876, |
| "grad_norm": 3.60709294861697, |
| "learning_rate": 5.513307984790875e-06, |
| "loss": 0.9197, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.034227039361095266, |
| "grad_norm": 1.38157954773502, |
| "learning_rate": 5.703422053231939e-06, |
| "loss": 0.8682, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.03536794067313177, |
| "grad_norm": 1.0909877915531283, |
| "learning_rate": 5.893536121673004e-06, |
| "loss": 0.8942, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.03650884198516828, |
| "grad_norm": 0.9344301030076203, |
| "learning_rate": 6.083650190114069e-06, |
| "loss": 0.8461, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.037649743297204795, |
| "grad_norm": 0.8591775118329756, |
| "learning_rate": 6.273764258555133e-06, |
| "loss": 0.8398, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.0387906446092413, |
| "grad_norm": 0.7938493134058952, |
| "learning_rate": 6.4638783269961976e-06, |
| "loss": 0.8067, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.03993154592127781, |
| "grad_norm": 0.7271814029051149, |
| "learning_rate": 6.653992395437263e-06, |
| "loss": 0.8076, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.04107244723331432, |
| "grad_norm": 0.8987059398819114, |
| "learning_rate": 6.844106463878328e-06, |
| "loss": 0.813, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.042213348545350825, |
| "grad_norm": 0.7348990443928746, |
| "learning_rate": 7.034220532319392e-06, |
| "loss": 0.7624, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.04335424985738734, |
| "grad_norm": 0.6060862909294358, |
| "learning_rate": 7.224334600760456e-06, |
| "loss": 0.7888, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.04449515116942385, |
| "grad_norm": 0.6572288005824576, |
| "learning_rate": 7.414448669201521e-06, |
| "loss": 0.7309, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.045636052481460354, |
| "grad_norm": 0.7414983714652406, |
| "learning_rate": 7.604562737642585e-06, |
| "loss": 0.7613, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.04677695379349686, |
| "grad_norm": 0.5647065598184654, |
| "learning_rate": 7.79467680608365e-06, |
| "loss": 0.714, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.04791785510553337, |
| "grad_norm": 0.4897340721044095, |
| "learning_rate": 7.984790874524716e-06, |
| "loss": 0.6845, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.04905875641756988, |
| "grad_norm": 0.47501108184275426, |
| "learning_rate": 8.17490494296578e-06, |
| "loss": 0.6938, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.05019965772960639, |
| "grad_norm": 0.4100828588343714, |
| "learning_rate": 8.365019011406844e-06, |
| "loss": 0.661, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.0513405590416429, |
| "grad_norm": 0.4600477852854328, |
| "learning_rate": 8.55513307984791e-06, |
| "loss": 0.6711, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.052481460353679406, |
| "grad_norm": 0.49355119570935346, |
| "learning_rate": 8.745247148288973e-06, |
| "loss": 0.6621, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.05362236166571591, |
| "grad_norm": 0.4895364232739456, |
| "learning_rate": 8.935361216730038e-06, |
| "loss": 0.6596, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.05476326297775243, |
| "grad_norm": 0.380390266394673, |
| "learning_rate": 9.125475285171103e-06, |
| "loss": 0.6605, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.055904164289788935, |
| "grad_norm": 0.4067888570775391, |
| "learning_rate": 9.315589353612169e-06, |
| "loss": 0.636, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.05704506560182544, |
| "grad_norm": 0.41398686146929453, |
| "learning_rate": 9.505703422053232e-06, |
| "loss": 0.6569, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.05818596691386195, |
| "grad_norm": 0.4159751891164936, |
| "learning_rate": 9.695817490494297e-06, |
| "loss": 0.6425, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.05932686822589846, |
| "grad_norm": 0.4311602717231244, |
| "learning_rate": 9.885931558935362e-06, |
| "loss": 0.6617, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.06046776953793497, |
| "grad_norm": 0.420579457115958, |
| "learning_rate": 1.0076045627376426e-05, |
| "loss": 0.6413, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.06160867084997148, |
| "grad_norm": 0.32749957676716046, |
| "learning_rate": 1.0266159695817491e-05, |
| "loss": 0.6239, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.06274957216200798, |
| "grad_norm": 0.33903263089513586, |
| "learning_rate": 1.0456273764258556e-05, |
| "loss": 0.6029, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.0638904734740445, |
| "grad_norm": 0.3428426351133492, |
| "learning_rate": 1.064638783269962e-05, |
| "loss": 0.6329, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.06503137478608101, |
| "grad_norm": 0.3028264313705029, |
| "learning_rate": 1.0836501901140685e-05, |
| "loss": 0.6237, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.06617227609811752, |
| "grad_norm": 0.3196521894503953, |
| "learning_rate": 1.102661596958175e-05, |
| "loss": 0.6304, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.06731317741015402, |
| "grad_norm": 0.31340766105880463, |
| "learning_rate": 1.1216730038022814e-05, |
| "loss": 0.6351, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.06845407872219053, |
| "grad_norm": 0.2947895411613211, |
| "learning_rate": 1.1406844106463879e-05, |
| "loss": 0.6181, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.06959498003422704, |
| "grad_norm": 0.3158134981188498, |
| "learning_rate": 1.1596958174904944e-05, |
| "loss": 0.6359, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.07073588134626355, |
| "grad_norm": 0.2935907176760453, |
| "learning_rate": 1.1787072243346007e-05, |
| "loss": 0.6472, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.07187678265830005, |
| "grad_norm": 0.2942195957589177, |
| "learning_rate": 1.1977186311787073e-05, |
| "loss": 0.6124, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.07301768397033656, |
| "grad_norm": 0.31711851127434204, |
| "learning_rate": 1.2167300380228138e-05, |
| "loss": 0.6104, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.07415858528237307, |
| "grad_norm": 0.27423197310063907, |
| "learning_rate": 1.2357414448669203e-05, |
| "loss": 0.6032, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.07529948659440959, |
| "grad_norm": 0.32727870494920275, |
| "learning_rate": 1.2547528517110266e-05, |
| "loss": 0.5998, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.0764403879064461, |
| "grad_norm": 0.268006371687303, |
| "learning_rate": 1.2737642585551332e-05, |
| "loss": 0.5977, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.0775812892184826, |
| "grad_norm": 0.2691340842565907, |
| "learning_rate": 1.2927756653992395e-05, |
| "loss": 0.6017, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.07872219053051911, |
| "grad_norm": 0.3215576928511998, |
| "learning_rate": 1.3117870722433462e-05, |
| "loss": 0.6111, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.07986309184255562, |
| "grad_norm": 0.2524933357357243, |
| "learning_rate": 1.3307984790874526e-05, |
| "loss": 0.5856, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.08100399315459213, |
| "grad_norm": 0.2976420763364573, |
| "learning_rate": 1.3498098859315589e-05, |
| "loss": 0.5745, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.08214489446662863, |
| "grad_norm": 0.2721011406702133, |
| "learning_rate": 1.3688212927756656e-05, |
| "loss": 0.5956, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.08328579577866514, |
| "grad_norm": 0.2602309725894347, |
| "learning_rate": 1.387832699619772e-05, |
| "loss": 0.5774, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.08442669709070165, |
| "grad_norm": 0.2853820862748792, |
| "learning_rate": 1.4068441064638785e-05, |
| "loss": 0.5809, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.08556759840273816, |
| "grad_norm": 0.27878528131213076, |
| "learning_rate": 1.4258555133079848e-05, |
| "loss": 0.5659, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.08670849971477468, |
| "grad_norm": 0.2875430217961869, |
| "learning_rate": 1.4448669201520912e-05, |
| "loss": 0.5645, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.08784940102681119, |
| "grad_norm": 0.3014267143867856, |
| "learning_rate": 1.4638783269961978e-05, |
| "loss": 0.5919, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.0889903023388477, |
| "grad_norm": 0.2703052723297986, |
| "learning_rate": 1.4828897338403042e-05, |
| "loss": 0.5902, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.0901312036508842, |
| "grad_norm": 0.303585390748195, |
| "learning_rate": 1.5019011406844107e-05, |
| "loss": 0.5772, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.09127210496292071, |
| "grad_norm": 0.26905000684530966, |
| "learning_rate": 1.520912547528517e-05, |
| "loss": 0.5822, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.09241300627495722, |
| "grad_norm": 0.28915135241524265, |
| "learning_rate": 1.5399239543726237e-05, |
| "loss": 0.5588, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.09355390758699372, |
| "grad_norm": 0.29494358324783804, |
| "learning_rate": 1.55893536121673e-05, |
| "loss": 0.5909, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.09469480889903023, |
| "grad_norm": 0.267960505652568, |
| "learning_rate": 1.5779467680608364e-05, |
| "loss": 0.5449, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.09583571021106674, |
| "grad_norm": 0.29734284598046806, |
| "learning_rate": 1.596958174904943e-05, |
| "loss": 0.563, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.09697661152310325, |
| "grad_norm": 0.2975566270568251, |
| "learning_rate": 1.6159695817490495e-05, |
| "loss": 0.5733, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.09811751283513977, |
| "grad_norm": 0.3144496200827959, |
| "learning_rate": 1.634980988593156e-05, |
| "loss": 0.5393, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.09925841414717627, |
| "grad_norm": 0.3439082551622902, |
| "learning_rate": 1.6539923954372625e-05, |
| "loss": 0.5801, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.10039931545921278, |
| "grad_norm": 0.3272359309053644, |
| "learning_rate": 1.673003802281369e-05, |
| "loss": 0.595, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.10154021677124929, |
| "grad_norm": 0.2806665105776375, |
| "learning_rate": 1.6920152091254756e-05, |
| "loss": 0.5539, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.1026811180832858, |
| "grad_norm": 0.3583067110894645, |
| "learning_rate": 1.711026615969582e-05, |
| "loss": 0.5682, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.1038220193953223, |
| "grad_norm": 0.3124598607304225, |
| "learning_rate": 1.7300380228136882e-05, |
| "loss": 0.5521, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.10496292070735881, |
| "grad_norm": 0.30309929429920146, |
| "learning_rate": 1.7490494296577946e-05, |
| "loss": 0.5498, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.10610382201939532, |
| "grad_norm": 0.3323899816536335, |
| "learning_rate": 1.7680608365019013e-05, |
| "loss": 0.5888, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.10724472333143183, |
| "grad_norm": 0.2752652481956559, |
| "learning_rate": 1.7870722433460076e-05, |
| "loss": 0.5564, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.10838562464346833, |
| "grad_norm": 0.31613768457039215, |
| "learning_rate": 1.806083650190114e-05, |
| "loss": 0.5526, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.10952652595550486, |
| "grad_norm": 0.34846752285781013, |
| "learning_rate": 1.8250950570342207e-05, |
| "loss": 0.5605, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.11066742726754136, |
| "grad_norm": 0.32422289472592464, |
| "learning_rate": 1.844106463878327e-05, |
| "loss": 0.5761, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.11180832857957787, |
| "grad_norm": 0.2821646610453036, |
| "learning_rate": 1.8631178707224337e-05, |
| "loss": 0.538, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.11294922989161438, |
| "grad_norm": 0.3363578624190279, |
| "learning_rate": 1.88212927756654e-05, |
| "loss": 0.5207, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.11409013120365089, |
| "grad_norm": 0.33298699970382173, |
| "learning_rate": 1.9011406844106464e-05, |
| "loss": 0.5518, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.11523103251568739, |
| "grad_norm": 0.34942931660045073, |
| "learning_rate": 1.920152091254753e-05, |
| "loss": 0.5472, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.1163719338277239, |
| "grad_norm": 0.32312662063760633, |
| "learning_rate": 1.9391634980988594e-05, |
| "loss": 0.5673, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.11751283513976041, |
| "grad_norm": 0.4126867118933167, |
| "learning_rate": 1.958174904942966e-05, |
| "loss": 0.5463, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.11865373645179692, |
| "grad_norm": 0.32148007377648646, |
| "learning_rate": 1.9771863117870725e-05, |
| "loss": 0.5598, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.11979463776383342, |
| "grad_norm": 0.3942522629458706, |
| "learning_rate": 1.9961977186311788e-05, |
| "loss": 0.5631, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.12093553907586994, |
| "grad_norm": 0.3313247044983476, |
| "learning_rate": 2.0152091254752852e-05, |
| "loss": 0.5568, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.12207644038790645, |
| "grad_norm": 0.4023164269134208, |
| "learning_rate": 2.0342205323193915e-05, |
| "loss": 0.5544, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.12321734169994296, |
| "grad_norm": 0.3746260516214422, |
| "learning_rate": 2.0532319391634982e-05, |
| "loss": 0.5375, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.12435824301197947, |
| "grad_norm": 0.31792875838737306, |
| "learning_rate": 2.0722433460076046e-05, |
| "loss": 0.5675, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.12549914432401596, |
| "grad_norm": 0.36266589494136714, |
| "learning_rate": 2.0912547528517112e-05, |
| "loss": 0.5346, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.1266400456360525, |
| "grad_norm": 0.35621216017329244, |
| "learning_rate": 2.1102661596958176e-05, |
| "loss": 0.5233, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.127780946948089, |
| "grad_norm": 0.34992793415074863, |
| "learning_rate": 2.129277566539924e-05, |
| "loss": 0.5204, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.1289218482601255, |
| "grad_norm": 0.43140776333761577, |
| "learning_rate": 2.1482889733840306e-05, |
| "loss": 0.5247, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.13006274957216202, |
| "grad_norm": 0.30876595829241754, |
| "learning_rate": 2.167300380228137e-05, |
| "loss": 0.5366, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.13120365088419852, |
| "grad_norm": 0.49393023217750726, |
| "learning_rate": 2.1863117870722437e-05, |
| "loss": 0.5543, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.13234455219623503, |
| "grad_norm": 0.3174141199663179, |
| "learning_rate": 2.20532319391635e-05, |
| "loss": 0.5136, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.13348545350827154, |
| "grad_norm": 0.43744514649905636, |
| "learning_rate": 2.2243346007604564e-05, |
| "loss": 0.562, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.13462635482030805, |
| "grad_norm": 0.35548978897729616, |
| "learning_rate": 2.2433460076045627e-05, |
| "loss": 0.5324, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.13576725613234455, |
| "grad_norm": 2.144134688500371, |
| "learning_rate": 2.262357414448669e-05, |
| "loss": 0.5417, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.13690815744438106, |
| "grad_norm": 0.5181165703644828, |
| "learning_rate": 2.2813688212927758e-05, |
| "loss": 0.5439, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.13804905875641757, |
| "grad_norm": 0.3562744580952984, |
| "learning_rate": 2.300380228136882e-05, |
| "loss": 0.4994, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.13918996006845408, |
| "grad_norm": 0.47803128689037555, |
| "learning_rate": 2.3193916349809888e-05, |
| "loss": 0.5225, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.14033086138049058, |
| "grad_norm": 0.43179878456026727, |
| "learning_rate": 2.338403041825095e-05, |
| "loss": 0.5173, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.1414717626925271, |
| "grad_norm": 0.48797324922902563, |
| "learning_rate": 2.3574144486692015e-05, |
| "loss": 0.5331, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.1426126640045636, |
| "grad_norm": 0.3713551353610012, |
| "learning_rate": 2.3764258555133082e-05, |
| "loss": 0.5399, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.1437535653166001, |
| "grad_norm": 0.5169644283335567, |
| "learning_rate": 2.3954372623574145e-05, |
| "loss": 0.5357, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.14489446662863661, |
| "grad_norm": 0.31629506927905415, |
| "learning_rate": 2.4144486692015212e-05, |
| "loss": 0.5239, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.14603536794067312, |
| "grad_norm": 0.5278145043561342, |
| "learning_rate": 2.4334600760456276e-05, |
| "loss": 0.528, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.14717626925270963, |
| "grad_norm": 0.3315425028798385, |
| "learning_rate": 2.452471482889734e-05, |
| "loss": 0.5495, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.14831717056474614, |
| "grad_norm": 0.517192805192663, |
| "learning_rate": 2.4714828897338406e-05, |
| "loss": 0.5083, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.14945807187678267, |
| "grad_norm": 0.36358628344754546, |
| "learning_rate": 2.490494296577947e-05, |
| "loss": 0.5132, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.15059897318881918, |
| "grad_norm": 0.4243392518096659, |
| "learning_rate": 2.5095057034220533e-05, |
| "loss": 0.5239, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.1517398745008557, |
| "grad_norm": 0.42938450026993014, |
| "learning_rate": 2.5285171102661596e-05, |
| "loss": 0.5222, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.1528807758128922, |
| "grad_norm": 0.3341842284068171, |
| "learning_rate": 2.5475285171102663e-05, |
| "loss": 0.5186, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.1540216771249287, |
| "grad_norm": 0.4541463176730268, |
| "learning_rate": 2.5665399239543723e-05, |
| "loss": 0.5455, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.1551625784369652, |
| "grad_norm": 0.30644393922157925, |
| "learning_rate": 2.585551330798479e-05, |
| "loss": 0.5219, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.15630347974900172, |
| "grad_norm": 0.4150075147103399, |
| "learning_rate": 2.6045627376425857e-05, |
| "loss": 0.516, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.15744438106103822, |
| "grad_norm": 0.36139117065630944, |
| "learning_rate": 2.6235741444866924e-05, |
| "loss": 0.5293, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.15858528237307473, |
| "grad_norm": 0.3803593895867164, |
| "learning_rate": 2.6425855513307984e-05, |
| "loss": 0.5197, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.15972618368511124, |
| "grad_norm": 0.32396824952520176, |
| "learning_rate": 2.661596958174905e-05, |
| "loss": 0.5244, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.16086708499714775, |
| "grad_norm": 0.3774873367059754, |
| "learning_rate": 2.6806083650190118e-05, |
| "loss": 0.5154, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.16200798630918425, |
| "grad_norm": 0.34633191929954577, |
| "learning_rate": 2.6996197718631178e-05, |
| "loss": 0.5249, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.16314888762122076, |
| "grad_norm": 0.3894815335476097, |
| "learning_rate": 2.7186311787072245e-05, |
| "loss": 0.5198, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.16428978893325727, |
| "grad_norm": 0.36444762291566796, |
| "learning_rate": 2.7376425855513312e-05, |
| "loss": 0.5015, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.16543069024529378, |
| "grad_norm": 0.423060528009577, |
| "learning_rate": 2.7566539923954375e-05, |
| "loss": 0.5218, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.16657159155733028, |
| "grad_norm": 0.35615661027370654, |
| "learning_rate": 2.775665399239544e-05, |
| "loss": 0.5237, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.1677124928693668, |
| "grad_norm": 0.46050626206133344, |
| "learning_rate": 2.7946768060836502e-05, |
| "loss": 0.5398, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.1688533941814033, |
| "grad_norm": 0.36853471772573493, |
| "learning_rate": 2.813688212927757e-05, |
| "loss": 0.5095, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.1699942954934398, |
| "grad_norm": 0.4932478981324432, |
| "learning_rate": 2.832699619771863e-05, |
| "loss": 0.5226, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.17113519680547631, |
| "grad_norm": 0.37242675018432586, |
| "learning_rate": 2.8517110266159696e-05, |
| "loss": 0.5551, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.17227609811751282, |
| "grad_norm": 0.37826249255692296, |
| "learning_rate": 2.8707224334600763e-05, |
| "loss": 0.4902, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.17341699942954936, |
| "grad_norm": 0.39728450228224194, |
| "learning_rate": 2.8897338403041823e-05, |
| "loss": 0.5209, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.17455790074158586, |
| "grad_norm": 0.40470187502226895, |
| "learning_rate": 2.908745247148289e-05, |
| "loss": 0.5359, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.17569880205362237, |
| "grad_norm": 0.40620197495649407, |
| "learning_rate": 2.9277566539923957e-05, |
| "loss": 0.5242, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.17683970336565888, |
| "grad_norm": 0.3727566620054441, |
| "learning_rate": 2.9467680608365024e-05, |
| "loss": 0.5055, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.1779806046776954, |
| "grad_norm": 0.4398128842105725, |
| "learning_rate": 2.9657794676806084e-05, |
| "loss": 0.5408, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.1791215059897319, |
| "grad_norm": 0.38490204332241523, |
| "learning_rate": 2.984790874524715e-05, |
| "loss": 0.5156, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.1802624073017684, |
| "grad_norm": 0.4517639869302986, |
| "learning_rate": 3.0038022813688214e-05, |
| "loss": 0.528, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.1814033086138049, |
| "grad_norm": 0.4954944774014952, |
| "learning_rate": 3.0228136882129278e-05, |
| "loss": 0.508, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.18254420992584142, |
| "grad_norm": 0.3748332582197787, |
| "learning_rate": 3.041825095057034e-05, |
| "loss": 0.5167, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.18368511123787792, |
| "grad_norm": 0.40942567421244364, |
| "learning_rate": 3.060836501901141e-05, |
| "loss": 0.4919, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.18482601254991443, |
| "grad_norm": 0.37179984547777867, |
| "learning_rate": 3.0798479087452475e-05, |
| "loss": 0.5277, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.18596691386195094, |
| "grad_norm": 0.4427813850278362, |
| "learning_rate": 3.098859315589354e-05, |
| "loss": 0.5084, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.18710781517398745, |
| "grad_norm": 0.3953797053294692, |
| "learning_rate": 3.11787072243346e-05, |
| "loss": 0.4996, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.18824871648602395, |
| "grad_norm": 0.4498502562903012, |
| "learning_rate": 3.1368821292775665e-05, |
| "loss": 0.5049, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.18938961779806046, |
| "grad_norm": 0.4108040639817359, |
| "learning_rate": 3.155893536121673e-05, |
| "loss": 0.5328, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.19053051911009697, |
| "grad_norm": 0.5410412527841703, |
| "learning_rate": 3.174904942965779e-05, |
| "loss": 0.5119, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.19167142042213348, |
| "grad_norm": 0.4344065614283204, |
| "learning_rate": 3.193916349809886e-05, |
| "loss": 0.4837, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.19281232173416998, |
| "grad_norm": 0.6626492419165108, |
| "learning_rate": 3.2129277566539926e-05, |
| "loss": 0.5132, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.1939532230462065, |
| "grad_norm": 0.4038123054819114, |
| "learning_rate": 3.231939163498099e-05, |
| "loss": 0.5067, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.195094124358243, |
| "grad_norm": 0.5636456551357991, |
| "learning_rate": 3.250950570342205e-05, |
| "loss": 0.4673, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.19623502567027953, |
| "grad_norm": 0.5205483430986917, |
| "learning_rate": 3.269961977186312e-05, |
| "loss": 0.5265, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.19737592698231604, |
| "grad_norm": 0.48211911743840075, |
| "learning_rate": 3.288973384030418e-05, |
| "loss": 0.507, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.19851682829435255, |
| "grad_norm": 0.47908754790447056, |
| "learning_rate": 3.307984790874525e-05, |
| "loss": 0.5165, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.19965772960638906, |
| "grad_norm": 0.4230015023005651, |
| "learning_rate": 3.3269961977186314e-05, |
| "loss": 0.5102, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.20079863091842556, |
| "grad_norm": 0.5427071406265588, |
| "learning_rate": 3.346007604562738e-05, |
| "loss": 0.5054, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.20193953223046207, |
| "grad_norm": 0.5099286314808494, |
| "learning_rate": 3.365019011406844e-05, |
| "loss": 0.5222, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.20308043354249858, |
| "grad_norm": 0.6159657751770209, |
| "learning_rate": 3.384030418250951e-05, |
| "loss": 0.5431, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.20422133485453509, |
| "grad_norm": 0.7193603225951907, |
| "learning_rate": 3.4030418250950574e-05, |
| "loss": 0.4963, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.2053622361665716, |
| "grad_norm": 0.45033133846022033, |
| "learning_rate": 3.422053231939164e-05, |
| "loss": 0.5014, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.2065031374786081, |
| "grad_norm": 0.7111256209057891, |
| "learning_rate": 3.44106463878327e-05, |
| "loss": 0.5059, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.2076440387906446, |
| "grad_norm": 0.6493203071076293, |
| "learning_rate": 3.4600760456273765e-05, |
| "loss": 0.4941, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.20878494010268112, |
| "grad_norm": 0.5283000924211072, |
| "learning_rate": 3.479087452471483e-05, |
| "loss": 0.5177, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.20992584141471762, |
| "grad_norm": 0.653191879689384, |
| "learning_rate": 3.498098859315589e-05, |
| "loss": 0.513, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.21106674272675413, |
| "grad_norm": 0.7341746099628614, |
| "learning_rate": 3.517110266159696e-05, |
| "loss": 0.4987, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.21220764403879064, |
| "grad_norm": 0.4263764021054984, |
| "learning_rate": 3.5361216730038026e-05, |
| "loss": 0.4939, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.21334854535082715, |
| "grad_norm": 0.6255115057204136, |
| "learning_rate": 3.555133079847909e-05, |
| "loss": 0.479, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.21448944666286365, |
| "grad_norm": 0.5199063260718685, |
| "learning_rate": 3.574144486692015e-05, |
| "loss": 0.4973, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.21563034797490016, |
| "grad_norm": 0.4976122263753569, |
| "learning_rate": 3.593155893536122e-05, |
| "loss": 0.4823, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.21677124928693667, |
| "grad_norm": 0.5304914545222749, |
| "learning_rate": 3.612167300380228e-05, |
| "loss": 0.499, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.21791215059897318, |
| "grad_norm": 0.4108657755861063, |
| "learning_rate": 3.631178707224335e-05, |
| "loss": 0.4968, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.2190530519110097, |
| "grad_norm": 0.5596852694323317, |
| "learning_rate": 3.6501901140684413e-05, |
| "loss": 0.5001, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.22019395322304622, |
| "grad_norm": 0.502896811074524, |
| "learning_rate": 3.669201520912548e-05, |
| "loss": 0.4846, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.22133485453508273, |
| "grad_norm": 0.5091731389286583, |
| "learning_rate": 3.688212927756654e-05, |
| "loss": 0.5224, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.22247575584711923, |
| "grad_norm": 0.5274679963561089, |
| "learning_rate": 3.7072243346007604e-05, |
| "loss": 0.5276, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.22361665715915574, |
| "grad_norm": 0.3898202631701823, |
| "learning_rate": 3.7262357414448674e-05, |
| "loss": 0.4836, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.22475755847119225, |
| "grad_norm": 0.42129635358682016, |
| "learning_rate": 3.745247148288973e-05, |
| "loss": 0.4678, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.22589845978322876, |
| "grad_norm": 0.4618057419115625, |
| "learning_rate": 3.76425855513308e-05, |
| "loss": 0.5342, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.22703936109526526, |
| "grad_norm": 0.4381869533215169, |
| "learning_rate": 3.7832699619771865e-05, |
| "loss": 0.4671, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.22818026240730177, |
| "grad_norm": 0.5440837221051571, |
| "learning_rate": 3.802281368821293e-05, |
| "loss": 0.5059, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.22932116371933828, |
| "grad_norm": 0.5334139765821885, |
| "learning_rate": 3.821292775665399e-05, |
| "loss": 0.5145, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.23046206503137479, |
| "grad_norm": 0.3732495424879209, |
| "learning_rate": 3.840304182509506e-05, |
| "loss": 0.4838, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.2316029663434113, |
| "grad_norm": 0.5988989154670815, |
| "learning_rate": 3.8593155893536125e-05, |
| "loss": 0.5037, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.2327438676554478, |
| "grad_norm": 0.5200393200147204, |
| "learning_rate": 3.878326996197719e-05, |
| "loss": 0.4745, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.2338847689674843, |
| "grad_norm": 0.4721161101938097, |
| "learning_rate": 3.897338403041825e-05, |
| "loss": 0.5197, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.23502567027952082, |
| "grad_norm": 0.5749531215762707, |
| "learning_rate": 3.916349809885932e-05, |
| "loss": 0.4825, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.23616657159155732, |
| "grad_norm": 0.40149594230609625, |
| "learning_rate": 3.935361216730038e-05, |
| "loss": 0.4597, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.23730747290359383, |
| "grad_norm": 0.5486586047623457, |
| "learning_rate": 3.954372623574145e-05, |
| "loss": 0.4847, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.23844837421563034, |
| "grad_norm": 0.5820602383783187, |
| "learning_rate": 3.973384030418251e-05, |
| "loss": 0.4877, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.23958927552766685, |
| "grad_norm": 0.5489212619030648, |
| "learning_rate": 3.9923954372623577e-05, |
| "loss": 0.5013, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.24073017683970335, |
| "grad_norm": 0.6147528782803471, |
| "learning_rate": 4.011406844106464e-05, |
| "loss": 0.4988, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.2418710781517399, |
| "grad_norm": 0.5588776012384556, |
| "learning_rate": 4.0304182509505703e-05, |
| "loss": 0.5189, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.2430119794637764, |
| "grad_norm": 0.4790644696477516, |
| "learning_rate": 4.0494296577946774e-05, |
| "loss": 0.5036, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.2441528807758129, |
| "grad_norm": 0.4833204786443911, |
| "learning_rate": 4.068441064638783e-05, |
| "loss": 0.476, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.2452937820878494, |
| "grad_norm": 0.47845223722709473, |
| "learning_rate": 4.08745247148289e-05, |
| "loss": 0.4965, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.24643468339988592, |
| "grad_norm": 0.4481560825138665, |
| "learning_rate": 4.1064638783269964e-05, |
| "loss": 0.5017, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.24757558471192243, |
| "grad_norm": 0.5086810495955193, |
| "learning_rate": 4.125475285171103e-05, |
| "loss": 0.4969, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.24871648602395893, |
| "grad_norm": 0.5343756618348462, |
| "learning_rate": 4.144486692015209e-05, |
| "loss": 0.4821, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.24985738733599544, |
| "grad_norm": 0.401711278686693, |
| "learning_rate": 4.163498098859316e-05, |
| "loss": 0.4985, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.2509982886480319, |
| "grad_norm": 0.5346993959856637, |
| "learning_rate": 4.1825095057034225e-05, |
| "loss": 0.5037, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.25213918996006845, |
| "grad_norm": 0.46140846706399685, |
| "learning_rate": 4.201520912547529e-05, |
| "loss": 0.4856, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.253280091272105, |
| "grad_norm": 0.4839447665729109, |
| "learning_rate": 4.220532319391635e-05, |
| "loss": 0.4842, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.25442099258414147, |
| "grad_norm": 0.3697667503798369, |
| "learning_rate": 4.2395437262357415e-05, |
| "loss": 0.4873, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.255561893896178, |
| "grad_norm": 0.4427985621603716, |
| "learning_rate": 4.258555133079848e-05, |
| "loss": 0.4835, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.2567027952082145, |
| "grad_norm": 0.39597098010473725, |
| "learning_rate": 4.277566539923954e-05, |
| "loss": 0.5005, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.257843696520251, |
| "grad_norm": 0.4176905956944042, |
| "learning_rate": 4.296577946768061e-05, |
| "loss": 0.4986, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.2589845978322875, |
| "grad_norm": 0.42126473910651496, |
| "learning_rate": 4.3155893536121676e-05, |
| "loss": 0.4788, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.26012549914432403, |
| "grad_norm": 0.41188830182365, |
| "learning_rate": 4.334600760456274e-05, |
| "loss": 0.4925, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.2612664004563605, |
| "grad_norm": 0.457545396221541, |
| "learning_rate": 4.35361216730038e-05, |
| "loss": 0.5031, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.26240730176839705, |
| "grad_norm": 0.4116499547651067, |
| "learning_rate": 4.3726235741444873e-05, |
| "loss": 0.5025, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.26354820308043353, |
| "grad_norm": 0.4760209285221355, |
| "learning_rate": 4.391634980988593e-05, |
| "loss": 0.4959, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.26468910439247006, |
| "grad_norm": 0.3542120628745739, |
| "learning_rate": 4.4106463878327e-05, |
| "loss": 0.4936, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.26583000570450654, |
| "grad_norm": 0.422037077717719, |
| "learning_rate": 4.4296577946768064e-05, |
| "loss": 0.4894, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.2669709070165431, |
| "grad_norm": 0.3914300693793639, |
| "learning_rate": 4.448669201520913e-05, |
| "loss": 0.4931, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.26811180832857956, |
| "grad_norm": 0.4320021869729976, |
| "learning_rate": 4.467680608365019e-05, |
| "loss": 0.4791, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.2692527096406161, |
| "grad_norm": 0.4175577806166712, |
| "learning_rate": 4.4866920152091254e-05, |
| "loss": 0.4906, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.2703936109526526, |
| "grad_norm": 0.41222578442948693, |
| "learning_rate": 4.5057034220532325e-05, |
| "loss": 0.4694, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.2715345122646891, |
| "grad_norm": 0.43338867254807784, |
| "learning_rate": 4.524714828897338e-05, |
| "loss": 0.4922, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.2726754135767256, |
| "grad_norm": 0.42813684712893474, |
| "learning_rate": 4.543726235741445e-05, |
| "loss": 0.4943, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.2738163148887621, |
| "grad_norm": 0.4819004155531821, |
| "learning_rate": 4.5627376425855515e-05, |
| "loss": 0.511, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.2749572162007986, |
| "grad_norm": 0.38917770762771053, |
| "learning_rate": 4.581749049429658e-05, |
| "loss": 0.4731, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.27609811751283514, |
| "grad_norm": 0.4212713190618949, |
| "learning_rate": 4.600760456273764e-05, |
| "loss": 0.486, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.2772390188248717, |
| "grad_norm": 0.48210207775472946, |
| "learning_rate": 4.619771863117871e-05, |
| "loss": 0.4701, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.27837992013690815, |
| "grad_norm": 0.4440172173976686, |
| "learning_rate": 4.6387832699619776e-05, |
| "loss": 0.4807, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.2795208214489447, |
| "grad_norm": 0.5697960117028452, |
| "learning_rate": 4.657794676806084e-05, |
| "loss": 0.477, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.28066172276098117, |
| "grad_norm": 0.39282938347691765, |
| "learning_rate": 4.67680608365019e-05, |
| "loss": 0.4653, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.2818026240730177, |
| "grad_norm": 0.49426639160977187, |
| "learning_rate": 4.695817490494297e-05, |
| "loss": 0.4757, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.2829435253850542, |
| "grad_norm": 0.5392025850210608, |
| "learning_rate": 4.714828897338403e-05, |
| "loss": 0.489, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.2840844266970907, |
| "grad_norm": 0.6749892159090881, |
| "learning_rate": 4.73384030418251e-05, |
| "loss": 0.4954, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.2852253280091272, |
| "grad_norm": 0.45560583490235546, |
| "learning_rate": 4.7528517110266163e-05, |
| "loss": 0.4821, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.28636622932116373, |
| "grad_norm": 0.5214154421651995, |
| "learning_rate": 4.771863117870723e-05, |
| "loss": 0.4849, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.2875071306332002, |
| "grad_norm": 0.5633817781068714, |
| "learning_rate": 4.790874524714829e-05, |
| "loss": 0.4446, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.28864803194523675, |
| "grad_norm": 0.48784588145190155, |
| "learning_rate": 4.8098859315589354e-05, |
| "loss": 0.4736, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.28978893325727323, |
| "grad_norm": 0.5919296100354379, |
| "learning_rate": 4.8288973384030424e-05, |
| "loss": 0.4792, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.29092983456930976, |
| "grad_norm": 0.5264307123179902, |
| "learning_rate": 4.847908745247148e-05, |
| "loss": 0.5007, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.29207073588134624, |
| "grad_norm": 0.5852556002815498, |
| "learning_rate": 4.866920152091255e-05, |
| "loss": 0.4684, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.2932116371933828, |
| "grad_norm": 0.5604440642451641, |
| "learning_rate": 4.8859315589353615e-05, |
| "loss": 0.4937, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.29435253850541926, |
| "grad_norm": 0.5083827458606978, |
| "learning_rate": 4.904942965779468e-05, |
| "loss": 0.4761, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.2954934398174558, |
| "grad_norm": 0.6035414465096012, |
| "learning_rate": 4.923954372623574e-05, |
| "loss": 0.4699, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.2966343411294923, |
| "grad_norm": 0.45520013209105414, |
| "learning_rate": 4.942965779467681e-05, |
| "loss": 0.4703, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.2977752424415288, |
| "grad_norm": 0.4981914121296809, |
| "learning_rate": 4.9619771863117875e-05, |
| "loss": 0.4711, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.29891614375356534, |
| "grad_norm": 0.4843901405929016, |
| "learning_rate": 4.980988593155894e-05, |
| "loss": 0.49, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.3000570450656018, |
| "grad_norm": 0.519532365295793, |
| "learning_rate": 5e-05, |
| "loss": 0.4549, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.30119794637763836, |
| "grad_norm": 0.5085787597831526, |
| "learning_rate": 4.997885835095137e-05, |
| "loss": 0.4959, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.30233884768967484, |
| "grad_norm": 0.5063823188387865, |
| "learning_rate": 4.995771670190275e-05, |
| "loss": 0.4817, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.3034797490017114, |
| "grad_norm": 0.5183643098113566, |
| "learning_rate": 4.993657505285412e-05, |
| "loss": 0.4859, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.30462065031374785, |
| "grad_norm": 0.726951443078331, |
| "learning_rate": 4.99154334038055e-05, |
| "loss": 0.508, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.3057615516257844, |
| "grad_norm": 0.5979308391864225, |
| "learning_rate": 4.989429175475687e-05, |
| "loss": 0.4751, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.30690245293782087, |
| "grad_norm": 0.5642531616180014, |
| "learning_rate": 4.987315010570825e-05, |
| "loss": 0.4618, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.3080433542498574, |
| "grad_norm": 0.4528790393447231, |
| "learning_rate": 4.9852008456659624e-05, |
| "loss": 0.5006, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.3091842555618939, |
| "grad_norm": 0.6608468146942221, |
| "learning_rate": 4.9830866807611e-05, |
| "loss": 0.4919, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.3103251568739304, |
| "grad_norm": 0.5534909136528965, |
| "learning_rate": 4.980972515856237e-05, |
| "loss": 0.4787, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.3114660581859669, |
| "grad_norm": 0.4855554939031897, |
| "learning_rate": 4.978858350951374e-05, |
| "loss": 0.4854, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.31260695949800343, |
| "grad_norm": 0.6042118406817583, |
| "learning_rate": 4.976744186046512e-05, |
| "loss": 0.5032, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.3137478608100399, |
| "grad_norm": 0.6301236427577049, |
| "learning_rate": 4.974630021141649e-05, |
| "loss": 0.504, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.31488876212207645, |
| "grad_norm": 0.43962836238867853, |
| "learning_rate": 4.972515856236787e-05, |
| "loss": 0.4693, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.31602966343411293, |
| "grad_norm": 0.6865074425382169, |
| "learning_rate": 4.970401691331924e-05, |
| "loss": 0.4984, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.31717056474614946, |
| "grad_norm": 0.5707037542530051, |
| "learning_rate": 4.968287526427062e-05, |
| "loss": 0.4728, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.31831146605818594, |
| "grad_norm": 0.48013638357531163, |
| "learning_rate": 4.966173361522199e-05, |
| "loss": 0.4897, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.3194523673702225, |
| "grad_norm": 0.5553808724683528, |
| "learning_rate": 4.9640591966173365e-05, |
| "loss": 0.47, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.32059326868225896, |
| "grad_norm": 0.6673573258804002, |
| "learning_rate": 4.9619450317124736e-05, |
| "loss": 0.4742, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.3217341699942955, |
| "grad_norm": 0.4777963168027366, |
| "learning_rate": 4.959830866807611e-05, |
| "loss": 0.5114, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.32287507130633203, |
| "grad_norm": 0.47238719588748307, |
| "learning_rate": 4.957716701902749e-05, |
| "loss": 0.4572, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.3240159726183685, |
| "grad_norm": 0.5109087160306456, |
| "learning_rate": 4.955602536997886e-05, |
| "loss": 0.5017, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.32515687393040504, |
| "grad_norm": 0.5913296622786067, |
| "learning_rate": 4.953488372093024e-05, |
| "loss": 0.4899, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.3262977752424415, |
| "grad_norm": 0.4779423075939576, |
| "learning_rate": 4.951374207188161e-05, |
| "loss": 0.4728, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.32743867655447806, |
| "grad_norm": 0.46538463444466194, |
| "learning_rate": 4.949260042283299e-05, |
| "loss": 0.4952, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.32857957786651454, |
| "grad_norm": 0.595981118622069, |
| "learning_rate": 4.947145877378436e-05, |
| "loss": 0.4871, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.3297204791785511, |
| "grad_norm": 0.4913472671952078, |
| "learning_rate": 4.9450317124735735e-05, |
| "loss": 0.4752, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.33086138049058755, |
| "grad_norm": 0.41164661389934154, |
| "learning_rate": 4.9429175475687106e-05, |
| "loss": 0.482, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.3320022818026241, |
| "grad_norm": 0.5609144536016337, |
| "learning_rate": 4.9408033826638476e-05, |
| "loss": 0.4645, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.33314318311466057, |
| "grad_norm": 0.5121918191555825, |
| "learning_rate": 4.9386892177589854e-05, |
| "loss": 0.4984, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.3342840844266971, |
| "grad_norm": 0.6135026464347259, |
| "learning_rate": 4.9365750528541225e-05, |
| "loss": 0.4792, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.3354249857387336, |
| "grad_norm": 0.4892389294352073, |
| "learning_rate": 4.93446088794926e-05, |
| "loss": 0.4639, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.3365658870507701, |
| "grad_norm": 0.7410520792633273, |
| "learning_rate": 4.932346723044397e-05, |
| "loss": 0.4808, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.3377067883628066, |
| "grad_norm": 0.46888189797192714, |
| "learning_rate": 4.930232558139535e-05, |
| "loss": 0.4832, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.33884768967484313, |
| "grad_norm": 0.6007793767794352, |
| "learning_rate": 4.928118393234673e-05, |
| "loss": 0.4716, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.3399885909868796, |
| "grad_norm": 0.69216963559477, |
| "learning_rate": 4.9260042283298105e-05, |
| "loss": 0.5043, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.34112949229891615, |
| "grad_norm": 0.4687938078596549, |
| "learning_rate": 4.9238900634249476e-05, |
| "loss": 0.4992, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.34227039361095263, |
| "grad_norm": 0.7268590781702684, |
| "learning_rate": 4.9217758985200846e-05, |
| "loss": 0.4816, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.34341129492298916, |
| "grad_norm": 0.45936413381359864, |
| "learning_rate": 4.9196617336152224e-05, |
| "loss": 0.4453, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.34455219623502564, |
| "grad_norm": 0.4294250943742063, |
| "learning_rate": 4.9175475687103595e-05, |
| "loss": 0.468, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.3456930975470622, |
| "grad_norm": 0.5527427692618347, |
| "learning_rate": 4.915433403805497e-05, |
| "loss": 0.4722, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.3468339988590987, |
| "grad_norm": 0.5071440857492974, |
| "learning_rate": 4.913319238900634e-05, |
| "loss": 0.4816, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.3479749001711352, |
| "grad_norm": 0.4686500913435551, |
| "learning_rate": 4.911205073995772e-05, |
| "loss": 0.4473, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.34911580148317173, |
| "grad_norm": 0.5883985093169647, |
| "learning_rate": 4.909090909090909e-05, |
| "loss": 0.4721, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.3502567027952082, |
| "grad_norm": 0.4591838303775839, |
| "learning_rate": 4.906976744186046e-05, |
| "loss": 0.4553, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.35139760410724474, |
| "grad_norm": 0.5549203579767384, |
| "learning_rate": 4.904862579281184e-05, |
| "loss": 0.4806, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.3525385054192812, |
| "grad_norm": 0.5707305006848794, |
| "learning_rate": 4.9027484143763217e-05, |
| "loss": 0.462, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.35367940673131776, |
| "grad_norm": 0.5070136654928392, |
| "learning_rate": 4.9006342494714594e-05, |
| "loss": 0.4914, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.35482030804335424, |
| "grad_norm": 0.513783859903657, |
| "learning_rate": 4.8985200845665965e-05, |
| "loss": 0.4563, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.3559612093553908, |
| "grad_norm": 0.3903353048037027, |
| "learning_rate": 4.896405919661734e-05, |
| "loss": 0.4646, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.35710211066742725, |
| "grad_norm": 0.4944624580259425, |
| "learning_rate": 4.894291754756871e-05, |
| "loss": 0.4794, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.3582430119794638, |
| "grad_norm": 0.4650438379271747, |
| "learning_rate": 4.892177589852009e-05, |
| "loss": 0.4734, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.35938391329150027, |
| "grad_norm": 0.40152357897892427, |
| "learning_rate": 4.890063424947146e-05, |
| "loss": 0.4558, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.3605248146035368, |
| "grad_norm": 0.4062828773190652, |
| "learning_rate": 4.887949260042283e-05, |
| "loss": 0.4424, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.3616657159155733, |
| "grad_norm": 0.4013001138970794, |
| "learning_rate": 4.885835095137421e-05, |
| "loss": 0.4799, |
| "step": 317 |
| }, |
| { |
| "epoch": 0.3628066172276098, |
| "grad_norm": 0.369004901568825, |
| "learning_rate": 4.883720930232558e-05, |
| "loss": 0.4653, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.3639475185396463, |
| "grad_norm": 0.48032108696732456, |
| "learning_rate": 4.881606765327696e-05, |
| "loss": 0.469, |
| "step": 319 |
| }, |
| { |
| "epoch": 0.36508841985168283, |
| "grad_norm": 0.405515724241134, |
| "learning_rate": 4.879492600422833e-05, |
| "loss": 0.4697, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.3662293211637193, |
| "grad_norm": 0.4030716686771247, |
| "learning_rate": 4.8773784355179705e-05, |
| "loss": 0.4743, |
| "step": 321 |
| }, |
| { |
| "epoch": 0.36737022247575585, |
| "grad_norm": 0.4611550051218631, |
| "learning_rate": 4.875264270613108e-05, |
| "loss": 0.4639, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.3685111237877924, |
| "grad_norm": 0.49050018609555823, |
| "learning_rate": 4.873150105708246e-05, |
| "loss": 0.4709, |
| "step": 323 |
| }, |
| { |
| "epoch": 0.36965202509982886, |
| "grad_norm": 0.44515733384409556, |
| "learning_rate": 4.871035940803383e-05, |
| "loss": 0.4409, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.3707929264118654, |
| "grad_norm": 0.405005368775498, |
| "learning_rate": 4.86892177589852e-05, |
| "loss": 0.4662, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.3719338277239019, |
| "grad_norm": 0.44424805271869844, |
| "learning_rate": 4.866807610993658e-05, |
| "loss": 0.4919, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.3730747290359384, |
| "grad_norm": 0.47916627855366895, |
| "learning_rate": 4.864693446088795e-05, |
| "loss": 0.4665, |
| "step": 327 |
| }, |
| { |
| "epoch": 0.3742156303479749, |
| "grad_norm": 0.4591787529396741, |
| "learning_rate": 4.862579281183933e-05, |
| "loss": 0.4751, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.3753565316600114, |
| "grad_norm": 0.3863033452781703, |
| "learning_rate": 4.86046511627907e-05, |
| "loss": 0.4666, |
| "step": 329 |
| }, |
| { |
| "epoch": 0.3764974329720479, |
| "grad_norm": 0.4267031556202453, |
| "learning_rate": 4.8583509513742075e-05, |
| "loss": 0.4617, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.37763833428408444, |
| "grad_norm": 0.4566590620989737, |
| "learning_rate": 4.8562367864693446e-05, |
| "loss": 0.4568, |
| "step": 331 |
| }, |
| { |
| "epoch": 0.3787792355961209, |
| "grad_norm": 0.3909866338741756, |
| "learning_rate": 4.8541226215644824e-05, |
| "loss": 0.4431, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.37992013690815746, |
| "grad_norm": 0.4445867673454024, |
| "learning_rate": 4.8520084566596194e-05, |
| "loss": 0.4593, |
| "step": 333 |
| }, |
| { |
| "epoch": 0.38106103822019394, |
| "grad_norm": 0.4002731387396594, |
| "learning_rate": 4.849894291754757e-05, |
| "loss": 0.4411, |
| "step": 334 |
| }, |
| { |
| "epoch": 0.3822019395322305, |
| "grad_norm": 0.4133903450308309, |
| "learning_rate": 4.847780126849894e-05, |
| "loss": 0.4781, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.38334284084426695, |
| "grad_norm": 0.47770252676093944, |
| "learning_rate": 4.845665961945032e-05, |
| "loss": 0.4883, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.3844837421563035, |
| "grad_norm": 0.3666231185699412, |
| "learning_rate": 4.84355179704017e-05, |
| "loss": 0.4824, |
| "step": 337 |
| }, |
| { |
| "epoch": 0.38562464346833997, |
| "grad_norm": 0.48134048114822264, |
| "learning_rate": 4.841437632135307e-05, |
| "loss": 0.4586, |
| "step": 338 |
| }, |
| { |
| "epoch": 0.3867655447803765, |
| "grad_norm": 0.4186276272086947, |
| "learning_rate": 4.8393234672304445e-05, |
| "loss": 0.4743, |
| "step": 339 |
| }, |
| { |
| "epoch": 0.387906446092413, |
| "grad_norm": 0.4320564974138227, |
| "learning_rate": 4.8372093023255816e-05, |
| "loss": 0.4687, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.3890473474044495, |
| "grad_norm": 0.4969726708200199, |
| "learning_rate": 4.8350951374207194e-05, |
| "loss": 0.4708, |
| "step": 341 |
| }, |
| { |
| "epoch": 0.390188248716486, |
| "grad_norm": 0.476437641405027, |
| "learning_rate": 4.8329809725158564e-05, |
| "loss": 0.4477, |
| "step": 342 |
| }, |
| { |
| "epoch": 0.39132915002852253, |
| "grad_norm": 0.4953362791687895, |
| "learning_rate": 4.8308668076109935e-05, |
| "loss": 0.4811, |
| "step": 343 |
| }, |
| { |
| "epoch": 0.39247005134055907, |
| "grad_norm": 0.551611928855792, |
| "learning_rate": 4.828752642706131e-05, |
| "loss": 0.4738, |
| "step": 344 |
| }, |
| { |
| "epoch": 0.39361095265259555, |
| "grad_norm": 0.40738026772406793, |
| "learning_rate": 4.826638477801268e-05, |
| "loss": 0.4615, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.3947518539646321, |
| "grad_norm": 0.4782666268905305, |
| "learning_rate": 4.824524312896406e-05, |
| "loss": 0.4731, |
| "step": 346 |
| }, |
| { |
| "epoch": 0.39589275527666856, |
| "grad_norm": 0.44504504658811994, |
| "learning_rate": 4.822410147991543e-05, |
| "loss": 0.4677, |
| "step": 347 |
| }, |
| { |
| "epoch": 0.3970336565887051, |
| "grad_norm": 0.4533234538781712, |
| "learning_rate": 4.820295983086681e-05, |
| "loss": 0.4617, |
| "step": 348 |
| }, |
| { |
| "epoch": 0.3981745579007416, |
| "grad_norm": 0.46248079465133807, |
| "learning_rate": 4.8181818181818186e-05, |
| "loss": 0.4932, |
| "step": 349 |
| }, |
| { |
| "epoch": 0.3993154592127781, |
| "grad_norm": 0.4780267679080641, |
| "learning_rate": 4.8160676532769564e-05, |
| "loss": 0.4599, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.4004563605248146, |
| "grad_norm": 0.4448999136385355, |
| "learning_rate": 4.8139534883720934e-05, |
| "loss": 0.487, |
| "step": 351 |
| }, |
| { |
| "epoch": 0.4015972618368511, |
| "grad_norm": 0.602985396056506, |
| "learning_rate": 4.8118393234672305e-05, |
| "loss": 0.4807, |
| "step": 352 |
| }, |
| { |
| "epoch": 0.4027381631488876, |
| "grad_norm": 0.4400835155350816, |
| "learning_rate": 4.809725158562368e-05, |
| "loss": 0.4614, |
| "step": 353 |
| }, |
| { |
| "epoch": 0.40387906446092414, |
| "grad_norm": 0.6103639448472573, |
| "learning_rate": 4.807610993657505e-05, |
| "loss": 0.4688, |
| "step": 354 |
| }, |
| { |
| "epoch": 0.4050199657729606, |
| "grad_norm": 0.48612080977613853, |
| "learning_rate": 4.805496828752643e-05, |
| "loss": 0.4554, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.40616086708499716, |
| "grad_norm": 0.5220692500810554, |
| "learning_rate": 4.80338266384778e-05, |
| "loss": 0.4603, |
| "step": 356 |
| }, |
| { |
| "epoch": 0.40730176839703364, |
| "grad_norm": 0.5248213992380287, |
| "learning_rate": 4.801268498942918e-05, |
| "loss": 0.4623, |
| "step": 357 |
| }, |
| { |
| "epoch": 0.40844266970907017, |
| "grad_norm": 0.6420774579491392, |
| "learning_rate": 4.799154334038055e-05, |
| "loss": 0.4756, |
| "step": 358 |
| }, |
| { |
| "epoch": 0.40958357102110665, |
| "grad_norm": 0.4352271420359283, |
| "learning_rate": 4.797040169133193e-05, |
| "loss": 0.4516, |
| "step": 359 |
| }, |
| { |
| "epoch": 0.4107244723331432, |
| "grad_norm": 0.658584341212213, |
| "learning_rate": 4.79492600422833e-05, |
| "loss": 0.4763, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.41186537364517967, |
| "grad_norm": 0.6084351588841551, |
| "learning_rate": 4.7928118393234675e-05, |
| "loss": 0.4798, |
| "step": 361 |
| }, |
| { |
| "epoch": 0.4130062749572162, |
| "grad_norm": 0.5771682284997688, |
| "learning_rate": 4.790697674418605e-05, |
| "loss": 0.481, |
| "step": 362 |
| }, |
| { |
| "epoch": 0.41414717626925274, |
| "grad_norm": 0.6043942596380553, |
| "learning_rate": 4.788583509513742e-05, |
| "loss": 0.4688, |
| "step": 363 |
| }, |
| { |
| "epoch": 0.4152880775812892, |
| "grad_norm": 0.45719477020974547, |
| "learning_rate": 4.78646934460888e-05, |
| "loss": 0.4501, |
| "step": 364 |
| }, |
| { |
| "epoch": 0.41642897889332575, |
| "grad_norm": 0.5741113368011214, |
| "learning_rate": 4.784355179704017e-05, |
| "loss": 0.4606, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.41756988020536223, |
| "grad_norm": 0.6037611112612937, |
| "learning_rate": 4.782241014799155e-05, |
| "loss": 0.4681, |
| "step": 366 |
| }, |
| { |
| "epoch": 0.41871078151739877, |
| "grad_norm": 0.4385849776699442, |
| "learning_rate": 4.780126849894292e-05, |
| "loss": 0.4401, |
| "step": 367 |
| }, |
| { |
| "epoch": 0.41985168282943525, |
| "grad_norm": 0.4484383147175151, |
| "learning_rate": 4.77801268498943e-05, |
| "loss": 0.4737, |
| "step": 368 |
| }, |
| { |
| "epoch": 0.4209925841414718, |
| "grad_norm": 0.5756231728396068, |
| "learning_rate": 4.775898520084567e-05, |
| "loss": 0.4646, |
| "step": 369 |
| }, |
| { |
| "epoch": 0.42213348545350826, |
| "grad_norm": 0.39706507133213986, |
| "learning_rate": 4.773784355179704e-05, |
| "loss": 0.4645, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.4232743867655448, |
| "grad_norm": 0.5924514871872969, |
| "learning_rate": 4.7716701902748416e-05, |
| "loss": 0.461, |
| "step": 371 |
| }, |
| { |
| "epoch": 0.4244152880775813, |
| "grad_norm": 0.41316719683932873, |
| "learning_rate": 4.7695560253699787e-05, |
| "loss": 0.4785, |
| "step": 372 |
| }, |
| { |
| "epoch": 0.4255561893896178, |
| "grad_norm": 0.39683025268881833, |
| "learning_rate": 4.7674418604651164e-05, |
| "loss": 0.4591, |
| "step": 373 |
| }, |
| { |
| "epoch": 0.4266970907016543, |
| "grad_norm": 0.48000498985785206, |
| "learning_rate": 4.765327695560254e-05, |
| "loss": 0.4414, |
| "step": 374 |
| }, |
| { |
| "epoch": 0.4278379920136908, |
| "grad_norm": 0.3591998859631829, |
| "learning_rate": 4.763213530655392e-05, |
| "loss": 0.471, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.4289788933257273, |
| "grad_norm": 0.4431516881768443, |
| "learning_rate": 4.761099365750529e-05, |
| "loss": 0.4597, |
| "step": 376 |
| }, |
| { |
| "epoch": 0.43011979463776384, |
| "grad_norm": 0.36560648233369786, |
| "learning_rate": 4.758985200845667e-05, |
| "loss": 0.4682, |
| "step": 377 |
| }, |
| { |
| "epoch": 0.4312606959498003, |
| "grad_norm": 0.4558699153147223, |
| "learning_rate": 4.756871035940804e-05, |
| "loss": 0.4499, |
| "step": 378 |
| }, |
| { |
| "epoch": 0.43240159726183686, |
| "grad_norm": 0.5460346505513063, |
| "learning_rate": 4.754756871035941e-05, |
| "loss": 0.4539, |
| "step": 379 |
| }, |
| { |
| "epoch": 0.43354249857387334, |
| "grad_norm": 0.4945804014357001, |
| "learning_rate": 4.7526427061310786e-05, |
| "loss": 0.4668, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.43468339988590987, |
| "grad_norm": 0.381982606728481, |
| "learning_rate": 4.7505285412262157e-05, |
| "loss": 0.4629, |
| "step": 381 |
| }, |
| { |
| "epoch": 0.43582430119794635, |
| "grad_norm": 0.5075561122741824, |
| "learning_rate": 4.7484143763213534e-05, |
| "loss": 0.4419, |
| "step": 382 |
| }, |
| { |
| "epoch": 0.4369652025099829, |
| "grad_norm": 0.4890820416949409, |
| "learning_rate": 4.7463002114164905e-05, |
| "loss": 0.474, |
| "step": 383 |
| }, |
| { |
| "epoch": 0.4381061038220194, |
| "grad_norm": 0.45099733935651065, |
| "learning_rate": 4.744186046511628e-05, |
| "loss": 0.4607, |
| "step": 384 |
| }, |
| { |
| "epoch": 0.4392470051340559, |
| "grad_norm": 0.46389402376612976, |
| "learning_rate": 4.742071881606765e-05, |
| "loss": 0.4529, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.44038790644609244, |
| "grad_norm": 0.5176225091984493, |
| "learning_rate": 4.739957716701903e-05, |
| "loss": 0.4797, |
| "step": 386 |
| }, |
| { |
| "epoch": 0.4415288077581289, |
| "grad_norm": 0.5614444078753256, |
| "learning_rate": 4.73784355179704e-05, |
| "loss": 0.4415, |
| "step": 387 |
| }, |
| { |
| "epoch": 0.44266970907016545, |
| "grad_norm": 0.5620355202952496, |
| "learning_rate": 4.735729386892178e-05, |
| "loss": 0.4901, |
| "step": 388 |
| }, |
| { |
| "epoch": 0.44381061038220193, |
| "grad_norm": 0.3554047434699461, |
| "learning_rate": 4.7336152219873156e-05, |
| "loss": 0.4486, |
| "step": 389 |
| }, |
| { |
| "epoch": 0.44495151169423847, |
| "grad_norm": 0.565055930787776, |
| "learning_rate": 4.7315010570824527e-05, |
| "loss": 0.4794, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.44609241300627495, |
| "grad_norm": 0.40372827032444986, |
| "learning_rate": 4.7293868921775904e-05, |
| "loss": 0.4549, |
| "step": 391 |
| }, |
| { |
| "epoch": 0.4472333143183115, |
| "grad_norm": 0.41175642665695783, |
| "learning_rate": 4.7272727272727275e-05, |
| "loss": 0.4515, |
| "step": 392 |
| }, |
| { |
| "epoch": 0.44837421563034796, |
| "grad_norm": 0.5428841648637599, |
| "learning_rate": 4.725158562367865e-05, |
| "loss": 0.4688, |
| "step": 393 |
| }, |
| { |
| "epoch": 0.4495151169423845, |
| "grad_norm": 0.3845089691572888, |
| "learning_rate": 4.723044397463002e-05, |
| "loss": 0.4599, |
| "step": 394 |
| }, |
| { |
| "epoch": 0.450656018254421, |
| "grad_norm": 0.5575460169748139, |
| "learning_rate": 4.7209302325581394e-05, |
| "loss": 0.46, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.4517969195664575, |
| "grad_norm": 0.47319289284357274, |
| "learning_rate": 4.718816067653277e-05, |
| "loss": 0.4743, |
| "step": 396 |
| }, |
| { |
| "epoch": 0.452937820878494, |
| "grad_norm": 0.421430478800123, |
| "learning_rate": 4.716701902748414e-05, |
| "loss": 0.487, |
| "step": 397 |
| }, |
| { |
| "epoch": 0.4540787221905305, |
| "grad_norm": 0.6101298495936283, |
| "learning_rate": 4.714587737843552e-05, |
| "loss": 0.4225, |
| "step": 398 |
| }, |
| { |
| "epoch": 0.455219623502567, |
| "grad_norm": 0.40737340116351733, |
| "learning_rate": 4.712473572938689e-05, |
| "loss": 0.4783, |
| "step": 399 |
| }, |
| { |
| "epoch": 0.45636052481460354, |
| "grad_norm": 0.46178856188238293, |
| "learning_rate": 4.710359408033827e-05, |
| "loss": 0.5043, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.45750142612664, |
| "grad_norm": 0.36605473430041985, |
| "learning_rate": 4.7082452431289645e-05, |
| "loss": 0.4727, |
| "step": 401 |
| }, |
| { |
| "epoch": 0.45864232743867656, |
| "grad_norm": 0.4601612072292561, |
| "learning_rate": 4.706131078224102e-05, |
| "loss": 0.4729, |
| "step": 402 |
| }, |
| { |
| "epoch": 0.4597832287507131, |
| "grad_norm": 0.39229637932652944, |
| "learning_rate": 4.704016913319239e-05, |
| "loss": 0.4318, |
| "step": 403 |
| }, |
| { |
| "epoch": 0.46092413006274957, |
| "grad_norm": 0.4978174882138765, |
| "learning_rate": 4.7019027484143764e-05, |
| "loss": 0.4593, |
| "step": 404 |
| }, |
| { |
| "epoch": 0.4620650313747861, |
| "grad_norm": 0.43613960694098686, |
| "learning_rate": 4.699788583509514e-05, |
| "loss": 0.4863, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.4632059326868226, |
| "grad_norm": 0.4748040163303423, |
| "learning_rate": 4.697674418604651e-05, |
| "loss": 0.4736, |
| "step": 406 |
| }, |
| { |
| "epoch": 0.4643468339988591, |
| "grad_norm": 0.4507681010912006, |
| "learning_rate": 4.695560253699789e-05, |
| "loss": 0.4537, |
| "step": 407 |
| }, |
| { |
| "epoch": 0.4654877353108956, |
| "grad_norm": 0.3923537491739542, |
| "learning_rate": 4.693446088794926e-05, |
| "loss": 0.4417, |
| "step": 408 |
| }, |
| { |
| "epoch": 0.46662863662293214, |
| "grad_norm": 0.4819511355971476, |
| "learning_rate": 4.691331923890064e-05, |
| "loss": 0.4409, |
| "step": 409 |
| }, |
| { |
| "epoch": 0.4677695379349686, |
| "grad_norm": 0.46716431084892146, |
| "learning_rate": 4.689217758985201e-05, |
| "loss": 0.4361, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.46891043924700515, |
| "grad_norm": 0.39268914274801164, |
| "learning_rate": 4.6871035940803386e-05, |
| "loss": 0.4527, |
| "step": 411 |
| }, |
| { |
| "epoch": 0.47005134055904163, |
| "grad_norm": 0.36742442071038406, |
| "learning_rate": 4.6849894291754756e-05, |
| "loss": 0.441, |
| "step": 412 |
| }, |
| { |
| "epoch": 0.47119224187107817, |
| "grad_norm": 0.3402798863969448, |
| "learning_rate": 4.6828752642706134e-05, |
| "loss": 0.4611, |
| "step": 413 |
| }, |
| { |
| "epoch": 0.47233314318311465, |
| "grad_norm": 0.4783227235848215, |
| "learning_rate": 4.680761099365751e-05, |
| "loss": 0.4468, |
| "step": 414 |
| }, |
| { |
| "epoch": 0.4734740444951512, |
| "grad_norm": 0.41760869890010066, |
| "learning_rate": 4.678646934460888e-05, |
| "loss": 0.419, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.47461494580718766, |
| "grad_norm": 0.45492695505371195, |
| "learning_rate": 4.676532769556026e-05, |
| "loss": 0.4539, |
| "step": 416 |
| }, |
| { |
| "epoch": 0.4757558471192242, |
| "grad_norm": 0.4242418558060538, |
| "learning_rate": 4.674418604651163e-05, |
| "loss": 0.439, |
| "step": 417 |
| }, |
| { |
| "epoch": 0.4768967484312607, |
| "grad_norm": 0.44887147636919633, |
| "learning_rate": 4.672304439746301e-05, |
| "loss": 0.4681, |
| "step": 418 |
| }, |
| { |
| "epoch": 0.4780376497432972, |
| "grad_norm": 0.5450796324350122, |
| "learning_rate": 4.670190274841438e-05, |
| "loss": 0.4648, |
| "step": 419 |
| }, |
| { |
| "epoch": 0.4791785510553337, |
| "grad_norm": 0.4650341425698157, |
| "learning_rate": 4.6680761099365756e-05, |
| "loss": 0.4831, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.4803194523673702, |
| "grad_norm": 0.48371809386595394, |
| "learning_rate": 4.6659619450317126e-05, |
| "loss": 0.45, |
| "step": 421 |
| }, |
| { |
| "epoch": 0.4814603536794067, |
| "grad_norm": 0.34547311035282235, |
| "learning_rate": 4.66384778012685e-05, |
| "loss": 0.4553, |
| "step": 422 |
| }, |
| { |
| "epoch": 0.48260125499144324, |
| "grad_norm": 0.4630732324267297, |
| "learning_rate": 4.6617336152219874e-05, |
| "loss": 0.4592, |
| "step": 423 |
| }, |
| { |
| "epoch": 0.4837421563034798, |
| "grad_norm": 0.3564440597661613, |
| "learning_rate": 4.6596194503171245e-05, |
| "loss": 0.4506, |
| "step": 424 |
| }, |
| { |
| "epoch": 0.48488305761551626, |
| "grad_norm": 0.39413931707375205, |
| "learning_rate": 4.657505285412262e-05, |
| "loss": 0.4722, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.4860239589275528, |
| "grad_norm": 0.377062944638173, |
| "learning_rate": 4.6553911205074e-05, |
| "loss": 0.4587, |
| "step": 426 |
| }, |
| { |
| "epoch": 0.48716486023958927, |
| "grad_norm": 0.3624696717743631, |
| "learning_rate": 4.653276955602537e-05, |
| "loss": 0.4587, |
| "step": 427 |
| }, |
| { |
| "epoch": 0.4883057615516258, |
| "grad_norm": 0.3380749179957887, |
| "learning_rate": 4.651162790697675e-05, |
| "loss": 0.4456, |
| "step": 428 |
| }, |
| { |
| "epoch": 0.4894466628636623, |
| "grad_norm": 0.3928759661904698, |
| "learning_rate": 4.6490486257928126e-05, |
| "loss": 0.4789, |
| "step": 429 |
| }, |
| { |
| "epoch": 0.4905875641756988, |
| "grad_norm": 0.36642927400424125, |
| "learning_rate": 4.6469344608879496e-05, |
| "loss": 0.465, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.4917284654877353, |
| "grad_norm": 0.4401740516737291, |
| "learning_rate": 4.644820295983087e-05, |
| "loss": 0.4472, |
| "step": 431 |
| }, |
| { |
| "epoch": 0.49286936679977184, |
| "grad_norm": 0.4635191449978593, |
| "learning_rate": 4.6427061310782244e-05, |
| "loss": 0.4637, |
| "step": 432 |
| }, |
| { |
| "epoch": 0.4940102681118083, |
| "grad_norm": 0.4321430146961908, |
| "learning_rate": 4.6405919661733615e-05, |
| "loss": 0.4705, |
| "step": 433 |
| }, |
| { |
| "epoch": 0.49515116942384485, |
| "grad_norm": 0.4583926087622166, |
| "learning_rate": 4.638477801268499e-05, |
| "loss": 0.45, |
| "step": 434 |
| }, |
| { |
| "epoch": 0.49629207073588133, |
| "grad_norm": 0.37924166776725926, |
| "learning_rate": 4.636363636363636e-05, |
| "loss": 0.4635, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.49743297204791787, |
| "grad_norm": 0.3929198503783768, |
| "learning_rate": 4.634249471458774e-05, |
| "loss": 0.4592, |
| "step": 436 |
| }, |
| { |
| "epoch": 0.49857387335995434, |
| "grad_norm": 0.38813120210535945, |
| "learning_rate": 4.632135306553911e-05, |
| "loss": 0.4705, |
| "step": 437 |
| }, |
| { |
| "epoch": 0.4997147746719909, |
| "grad_norm": 0.4762099160364005, |
| "learning_rate": 4.630021141649049e-05, |
| "loss": 0.4792, |
| "step": 438 |
| }, |
| { |
| "epoch": 0.5008556759840274, |
| "grad_norm": 0.46547600756258845, |
| "learning_rate": 4.627906976744186e-05, |
| "loss": 0.4808, |
| "step": 439 |
| }, |
| { |
| "epoch": 0.5019965772960638, |
| "grad_norm": 0.4606755516881231, |
| "learning_rate": 4.625792811839324e-05, |
| "loss": 0.429, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.5031374786081004, |
| "grad_norm": 0.4462475824492918, |
| "learning_rate": 4.6236786469344614e-05, |
| "loss": 0.4689, |
| "step": 441 |
| }, |
| { |
| "epoch": 0.5042783799201369, |
| "grad_norm": 0.47174621173295, |
| "learning_rate": 4.6215644820295985e-05, |
| "loss": 0.4489, |
| "step": 442 |
| }, |
| { |
| "epoch": 0.5054192812321734, |
| "grad_norm": 0.5015710382124482, |
| "learning_rate": 4.619450317124736e-05, |
| "loss": 0.4939, |
| "step": 443 |
| }, |
| { |
| "epoch": 0.50656018254421, |
| "grad_norm": 0.3818394201165467, |
| "learning_rate": 4.617336152219873e-05, |
| "loss": 0.4576, |
| "step": 444 |
| }, |
| { |
| "epoch": 0.5077010838562465, |
| "grad_norm": 0.4225972875649922, |
| "learning_rate": 4.615221987315011e-05, |
| "loss": 0.4654, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.5088419851682829, |
| "grad_norm": 0.40882987228929407, |
| "learning_rate": 4.613107822410148e-05, |
| "loss": 0.4527, |
| "step": 446 |
| }, |
| { |
| "epoch": 0.5099828864803194, |
| "grad_norm": 0.5069671408649863, |
| "learning_rate": 4.610993657505286e-05, |
| "loss": 0.4576, |
| "step": 447 |
| }, |
| { |
| "epoch": 0.511123787792356, |
| "grad_norm": 0.4386417973131366, |
| "learning_rate": 4.608879492600423e-05, |
| "loss": 0.4552, |
| "step": 448 |
| }, |
| { |
| "epoch": 0.5122646891043925, |
| "grad_norm": 0.4747784151033817, |
| "learning_rate": 4.60676532769556e-05, |
| "loss": 0.4561, |
| "step": 449 |
| }, |
| { |
| "epoch": 0.513405590416429, |
| "grad_norm": 0.40932899748081414, |
| "learning_rate": 4.604651162790698e-05, |
| "loss": 0.467, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.5145464917284654, |
| "grad_norm": 0.40171173472315463, |
| "learning_rate": 4.602536997885835e-05, |
| "loss": 0.4696, |
| "step": 451 |
| }, |
| { |
| "epoch": 0.515687393040502, |
| "grad_norm": 0.46145868641103904, |
| "learning_rate": 4.6004228329809726e-05, |
| "loss": 0.4737, |
| "step": 452 |
| }, |
| { |
| "epoch": 0.5168282943525385, |
| "grad_norm": 0.38520232110740626, |
| "learning_rate": 4.59830866807611e-05, |
| "loss": 0.4606, |
| "step": 453 |
| }, |
| { |
| "epoch": 0.517969195664575, |
| "grad_norm": 0.4728138540537639, |
| "learning_rate": 4.596194503171248e-05, |
| "loss": 0.4738, |
| "step": 454 |
| }, |
| { |
| "epoch": 0.5191100969766115, |
| "grad_norm": 0.40077436012399575, |
| "learning_rate": 4.594080338266385e-05, |
| "loss": 0.4459, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.5202509982886481, |
| "grad_norm": 0.595822283116385, |
| "learning_rate": 4.591966173361523e-05, |
| "loss": 0.4549, |
| "step": 456 |
| }, |
| { |
| "epoch": 0.5213918996006845, |
| "grad_norm": 0.3754288530168292, |
| "learning_rate": 4.58985200845666e-05, |
| "loss": 0.441, |
| "step": 457 |
| }, |
| { |
| "epoch": 0.522532800912721, |
| "grad_norm": 0.4243930283188664, |
| "learning_rate": 4.587737843551797e-05, |
| "loss": 0.4319, |
| "step": 458 |
| }, |
| { |
| "epoch": 0.5236737022247575, |
| "grad_norm": 0.453192510555693, |
| "learning_rate": 4.585623678646935e-05, |
| "loss": 0.4745, |
| "step": 459 |
| }, |
| { |
| "epoch": 0.5248146035367941, |
| "grad_norm": 0.3976339179450837, |
| "learning_rate": 4.583509513742072e-05, |
| "loss": 0.458, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.5259555048488306, |
| "grad_norm": 0.42197906453695416, |
| "learning_rate": 4.5813953488372096e-05, |
| "loss": 0.4591, |
| "step": 461 |
| }, |
| { |
| "epoch": 0.5270964061608671, |
| "grad_norm": 0.35718537422327623, |
| "learning_rate": 4.579281183932347e-05, |
| "loss": 0.4741, |
| "step": 462 |
| }, |
| { |
| "epoch": 0.5282373074729035, |
| "grad_norm": 0.409421659544212, |
| "learning_rate": 4.5771670190274844e-05, |
| "loss": 0.4496, |
| "step": 463 |
| }, |
| { |
| "epoch": 0.5293782087849401, |
| "grad_norm": 0.4168650729475048, |
| "learning_rate": 4.5750528541226215e-05, |
| "loss": 0.4372, |
| "step": 464 |
| }, |
| { |
| "epoch": 0.5305191100969766, |
| "grad_norm": 0.3801742006510912, |
| "learning_rate": 4.572938689217759e-05, |
| "loss": 0.4652, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.5316600114090131, |
| "grad_norm": 0.42600016168123134, |
| "learning_rate": 4.570824524312897e-05, |
| "loss": 0.4416, |
| "step": 466 |
| }, |
| { |
| "epoch": 0.5328009127210497, |
| "grad_norm": 0.46602265019553896, |
| "learning_rate": 4.568710359408034e-05, |
| "loss": 0.4805, |
| "step": 467 |
| }, |
| { |
| "epoch": 0.5339418140330862, |
| "grad_norm": 0.48053850937804116, |
| "learning_rate": 4.566596194503172e-05, |
| "loss": 0.4575, |
| "step": 468 |
| }, |
| { |
| "epoch": 0.5350827153451226, |
| "grad_norm": 0.3986350097784419, |
| "learning_rate": 4.564482029598309e-05, |
| "loss": 0.4786, |
| "step": 469 |
| }, |
| { |
| "epoch": 0.5362236166571591, |
| "grad_norm": 0.4807794088581916, |
| "learning_rate": 4.5623678646934466e-05, |
| "loss": 0.4512, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.5373645179691957, |
| "grad_norm": 0.433766824087754, |
| "learning_rate": 4.560253699788584e-05, |
| "loss": 0.4459, |
| "step": 471 |
| }, |
| { |
| "epoch": 0.5385054192812322, |
| "grad_norm": 0.4043451924904348, |
| "learning_rate": 4.5581395348837214e-05, |
| "loss": 0.4389, |
| "step": 472 |
| }, |
| { |
| "epoch": 0.5396463205932687, |
| "grad_norm": 0.4197291913156009, |
| "learning_rate": 4.5560253699788585e-05, |
| "loss": 0.4603, |
| "step": 473 |
| }, |
| { |
| "epoch": 0.5407872219053051, |
| "grad_norm": 0.4118103847467949, |
| "learning_rate": 4.553911205073996e-05, |
| "loss": 0.4201, |
| "step": 474 |
| }, |
| { |
| "epoch": 0.5419281232173417, |
| "grad_norm": 0.4168051352199324, |
| "learning_rate": 4.551797040169133e-05, |
| "loss": 0.4356, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.5430690245293782, |
| "grad_norm": 0.41758694905260396, |
| "learning_rate": 4.5496828752642704e-05, |
| "loss": 0.4762, |
| "step": 476 |
| }, |
| { |
| "epoch": 0.5442099258414147, |
| "grad_norm": 0.3919808019373763, |
| "learning_rate": 4.547568710359408e-05, |
| "loss": 0.4476, |
| "step": 477 |
| }, |
| { |
| "epoch": 0.5453508271534512, |
| "grad_norm": 0.40308219490841923, |
| "learning_rate": 4.545454545454546e-05, |
| "loss": 0.4436, |
| "step": 478 |
| }, |
| { |
| "epoch": 0.5464917284654878, |
| "grad_norm": 0.39908396946636293, |
| "learning_rate": 4.543340380549683e-05, |
| "loss": 0.4541, |
| "step": 479 |
| }, |
| { |
| "epoch": 0.5476326297775242, |
| "grad_norm": 0.46934983540992264, |
| "learning_rate": 4.541226215644821e-05, |
| "loss": 0.4576, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.5487735310895607, |
| "grad_norm": 0.3854077371954608, |
| "learning_rate": 4.5391120507399584e-05, |
| "loss": 0.4434, |
| "step": 481 |
| }, |
| { |
| "epoch": 0.5499144324015972, |
| "grad_norm": 0.5319242645223065, |
| "learning_rate": 4.5369978858350955e-05, |
| "loss": 0.4529, |
| "step": 482 |
| }, |
| { |
| "epoch": 0.5510553337136338, |
| "grad_norm": 0.44290253891327724, |
| "learning_rate": 4.5348837209302326e-05, |
| "loss": 0.4594, |
| "step": 483 |
| }, |
| { |
| "epoch": 0.5521962350256703, |
| "grad_norm": 0.4728615530783417, |
| "learning_rate": 4.53276955602537e-05, |
| "loss": 0.4448, |
| "step": 484 |
| }, |
| { |
| "epoch": 0.5533371363377068, |
| "grad_norm": 0.4404097761821421, |
| "learning_rate": 4.5306553911205074e-05, |
| "loss": 0.4426, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.5544780376497433, |
| "grad_norm": 0.4197335397577139, |
| "learning_rate": 4.528541226215645e-05, |
| "loss": 0.4493, |
| "step": 486 |
| }, |
| { |
| "epoch": 0.5556189389617798, |
| "grad_norm": 0.477125915431258, |
| "learning_rate": 4.526427061310782e-05, |
| "loss": 0.438, |
| "step": 487 |
| }, |
| { |
| "epoch": 0.5567598402738163, |
| "grad_norm": 0.4696081174949721, |
| "learning_rate": 4.52431289640592e-05, |
| "loss": 0.455, |
| "step": 488 |
| }, |
| { |
| "epoch": 0.5579007415858528, |
| "grad_norm": 0.47219356202492724, |
| "learning_rate": 4.522198731501057e-05, |
| "loss": 0.4576, |
| "step": 489 |
| }, |
| { |
| "epoch": 0.5590416428978894, |
| "grad_norm": 0.446771837110723, |
| "learning_rate": 4.520084566596195e-05, |
| "loss": 0.4403, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.5601825442099259, |
| "grad_norm": 0.45841656750886967, |
| "learning_rate": 4.517970401691332e-05, |
| "loss": 0.4703, |
| "step": 491 |
| }, |
| { |
| "epoch": 0.5613234455219623, |
| "grad_norm": 0.45874829723391797, |
| "learning_rate": 4.5158562367864696e-05, |
| "loss": 0.4514, |
| "step": 492 |
| }, |
| { |
| "epoch": 0.5624643468339988, |
| "grad_norm": 0.37739002522158893, |
| "learning_rate": 4.513742071881607e-05, |
| "loss": 0.441, |
| "step": 493 |
| }, |
| { |
| "epoch": 0.5636052481460354, |
| "grad_norm": 0.43801802347474533, |
| "learning_rate": 4.5116279069767444e-05, |
| "loss": 0.4535, |
| "step": 494 |
| }, |
| { |
| "epoch": 0.5647461494580719, |
| "grad_norm": 0.41614070993796926, |
| "learning_rate": 4.509513742071882e-05, |
| "loss": 0.4629, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.5658870507701084, |
| "grad_norm": 0.4865275595264474, |
| "learning_rate": 4.507399577167019e-05, |
| "loss": 0.4416, |
| "step": 496 |
| }, |
| { |
| "epoch": 0.5670279520821448, |
| "grad_norm": 0.40655687881400593, |
| "learning_rate": 4.505285412262157e-05, |
| "loss": 0.4574, |
| "step": 497 |
| }, |
| { |
| "epoch": 0.5681688533941814, |
| "grad_norm": 0.39883282407381804, |
| "learning_rate": 4.503171247357294e-05, |
| "loss": 0.4786, |
| "step": 498 |
| }, |
| { |
| "epoch": 0.5693097547062179, |
| "grad_norm": 0.37794544376846617, |
| "learning_rate": 4.501057082452432e-05, |
| "loss": 0.435, |
| "step": 499 |
| }, |
| { |
| "epoch": 0.5704506560182544, |
| "grad_norm": 0.4247854841899775, |
| "learning_rate": 4.498942917547569e-05, |
| "loss": 0.4476, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.5715915573302909, |
| "grad_norm": 0.43645967584889545, |
| "learning_rate": 4.496828752642706e-05, |
| "loss": 0.4454, |
| "step": 501 |
| }, |
| { |
| "epoch": 0.5727324586423275, |
| "grad_norm": 0.5187989306988888, |
| "learning_rate": 4.4947145877378436e-05, |
| "loss": 0.4461, |
| "step": 502 |
| }, |
| { |
| "epoch": 0.573873359954364, |
| "grad_norm": 0.37308352823075, |
| "learning_rate": 4.492600422832981e-05, |
| "loss": 0.4341, |
| "step": 503 |
| }, |
| { |
| "epoch": 0.5750142612664004, |
| "grad_norm": 0.5932345231021587, |
| "learning_rate": 4.4904862579281184e-05, |
| "loss": 0.4484, |
| "step": 504 |
| }, |
| { |
| "epoch": 0.576155162578437, |
| "grad_norm": 0.3279197317008337, |
| "learning_rate": 4.488372093023256e-05, |
| "loss": 0.4491, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.5772960638904735, |
| "grad_norm": 0.6100565413888734, |
| "learning_rate": 4.486257928118394e-05, |
| "loss": 0.4376, |
| "step": 506 |
| }, |
| { |
| "epoch": 0.57843696520251, |
| "grad_norm": 0.42409976663616067, |
| "learning_rate": 4.484143763213531e-05, |
| "loss": 0.4637, |
| "step": 507 |
| }, |
| { |
| "epoch": 0.5795778665145465, |
| "grad_norm": 0.47939120843371974, |
| "learning_rate": 4.482029598308669e-05, |
| "loss": 0.4512, |
| "step": 508 |
| }, |
| { |
| "epoch": 0.580718767826583, |
| "grad_norm": 0.6442312396725929, |
| "learning_rate": 4.479915433403806e-05, |
| "loss": 0.4742, |
| "step": 509 |
| }, |
| { |
| "epoch": 0.5818596691386195, |
| "grad_norm": 0.44420537485975053, |
| "learning_rate": 4.477801268498943e-05, |
| "loss": 0.4414, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.583000570450656, |
| "grad_norm": 0.4742282726695118, |
| "learning_rate": 4.4756871035940806e-05, |
| "loss": 0.438, |
| "step": 511 |
| }, |
| { |
| "epoch": 0.5841414717626925, |
| "grad_norm": 0.38983875431488607, |
| "learning_rate": 4.473572938689218e-05, |
| "loss": 0.4615, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.5852823730747291, |
| "grad_norm": 0.47490095846340535, |
| "learning_rate": 4.4714587737843555e-05, |
| "loss": 0.4709, |
| "step": 513 |
| }, |
| { |
| "epoch": 0.5864232743867656, |
| "grad_norm": 0.39696950465421044, |
| "learning_rate": 4.4693446088794925e-05, |
| "loss": 0.4367, |
| "step": 514 |
| }, |
| { |
| "epoch": 0.587564175698802, |
| "grad_norm": 0.45007345784469194, |
| "learning_rate": 4.46723044397463e-05, |
| "loss": 0.4566, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.5887050770108385, |
| "grad_norm": 0.4198390996183237, |
| "learning_rate": 4.465116279069767e-05, |
| "loss": 0.4286, |
| "step": 516 |
| }, |
| { |
| "epoch": 0.5898459783228751, |
| "grad_norm": 0.3779400456562858, |
| "learning_rate": 4.463002114164905e-05, |
| "loss": 0.4437, |
| "step": 517 |
| }, |
| { |
| "epoch": 0.5909868796349116, |
| "grad_norm": 0.41728607717894356, |
| "learning_rate": 4.460887949260043e-05, |
| "loss": 0.4614, |
| "step": 518 |
| }, |
| { |
| "epoch": 0.5921277809469481, |
| "grad_norm": 0.39261690895532547, |
| "learning_rate": 4.45877378435518e-05, |
| "loss": 0.4332, |
| "step": 519 |
| }, |
| { |
| "epoch": 0.5932686822589845, |
| "grad_norm": 0.38020096492819416, |
| "learning_rate": 4.4566596194503176e-05, |
| "loss": 0.4549, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.5944095835710211, |
| "grad_norm": 0.4517763945083566, |
| "learning_rate": 4.454545454545455e-05, |
| "loss": 0.45, |
| "step": 521 |
| }, |
| { |
| "epoch": 0.5955504848830576, |
| "grad_norm": 0.3645340466319383, |
| "learning_rate": 4.4524312896405925e-05, |
| "loss": 0.4473, |
| "step": 522 |
| }, |
| { |
| "epoch": 0.5966913861950941, |
| "grad_norm": 0.355065344453176, |
| "learning_rate": 4.4503171247357295e-05, |
| "loss": 0.4356, |
| "step": 523 |
| }, |
| { |
| "epoch": 0.5978322875071307, |
| "grad_norm": 0.37890494975171685, |
| "learning_rate": 4.448202959830867e-05, |
| "loss": 0.4399, |
| "step": 524 |
| }, |
| { |
| "epoch": 0.5989731888191672, |
| "grad_norm": 0.4555107224314838, |
| "learning_rate": 4.4460887949260043e-05, |
| "loss": 0.4521, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.6001140901312036, |
| "grad_norm": 0.4362531451617325, |
| "learning_rate": 4.443974630021142e-05, |
| "loss": 0.4389, |
| "step": 526 |
| }, |
| { |
| "epoch": 0.6012549914432401, |
| "grad_norm": 0.4126012364452474, |
| "learning_rate": 4.441860465116279e-05, |
| "loss": 0.4392, |
| "step": 527 |
| }, |
| { |
| "epoch": 0.6023958927552767, |
| "grad_norm": 0.4250152394750049, |
| "learning_rate": 4.439746300211416e-05, |
| "loss": 0.4513, |
| "step": 528 |
| }, |
| { |
| "epoch": 0.6035367940673132, |
| "grad_norm": 0.5082523070659859, |
| "learning_rate": 4.437632135306554e-05, |
| "loss": 0.4879, |
| "step": 529 |
| }, |
| { |
| "epoch": 0.6046776953793497, |
| "grad_norm": 0.4238617257851682, |
| "learning_rate": 4.435517970401691e-05, |
| "loss": 0.4898, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.6058185966913862, |
| "grad_norm": 0.5041686903889592, |
| "learning_rate": 4.433403805496829e-05, |
| "loss": 0.4484, |
| "step": 531 |
| }, |
| { |
| "epoch": 0.6069594980034227, |
| "grad_norm": 0.3551559352333077, |
| "learning_rate": 4.4312896405919665e-05, |
| "loss": 0.4463, |
| "step": 532 |
| }, |
| { |
| "epoch": 0.6081003993154592, |
| "grad_norm": 0.4417858792921517, |
| "learning_rate": 4.429175475687104e-05, |
| "loss": 0.4341, |
| "step": 533 |
| }, |
| { |
| "epoch": 0.6092413006274957, |
| "grad_norm": 0.36493597171615005, |
| "learning_rate": 4.4270613107822413e-05, |
| "loss": 0.4358, |
| "step": 534 |
| }, |
| { |
| "epoch": 0.6103822019395322, |
| "grad_norm": 0.4107588079345914, |
| "learning_rate": 4.424947145877379e-05, |
| "loss": 0.4407, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.6115231032515688, |
| "grad_norm": 0.4463977858001759, |
| "learning_rate": 4.422832980972516e-05, |
| "loss": 0.4586, |
| "step": 536 |
| }, |
| { |
| "epoch": 0.6126640045636053, |
| "grad_norm": 0.3835290921335774, |
| "learning_rate": 4.420718816067653e-05, |
| "loss": 0.4686, |
| "step": 537 |
| }, |
| { |
| "epoch": 0.6138049058756417, |
| "grad_norm": 0.4235575449577358, |
| "learning_rate": 4.418604651162791e-05, |
| "loss": 0.4529, |
| "step": 538 |
| }, |
| { |
| "epoch": 0.6149458071876782, |
| "grad_norm": 0.37221690443646094, |
| "learning_rate": 4.416490486257928e-05, |
| "loss": 0.4449, |
| "step": 539 |
| }, |
| { |
| "epoch": 0.6160867084997148, |
| "grad_norm": 0.4607925845239333, |
| "learning_rate": 4.414376321353066e-05, |
| "loss": 0.4696, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.6172276098117513, |
| "grad_norm": 0.4643989526274435, |
| "learning_rate": 4.412262156448203e-05, |
| "loss": 0.4588, |
| "step": 541 |
| }, |
| { |
| "epoch": 0.6183685111237878, |
| "grad_norm": 0.50099310470488, |
| "learning_rate": 4.4101479915433406e-05, |
| "loss": 0.4585, |
| "step": 542 |
| }, |
| { |
| "epoch": 0.6195094124358242, |
| "grad_norm": 0.41648393536142087, |
| "learning_rate": 4.408033826638478e-05, |
| "loss": 0.447, |
| "step": 543 |
| }, |
| { |
| "epoch": 0.6206503137478608, |
| "grad_norm": 0.46190569039774876, |
| "learning_rate": 4.4059196617336154e-05, |
| "loss": 0.4609, |
| "step": 544 |
| }, |
| { |
| "epoch": 0.6217912150598973, |
| "grad_norm": 0.4862977173283739, |
| "learning_rate": 4.403805496828753e-05, |
| "loss": 0.4375, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.6229321163719338, |
| "grad_norm": 0.43294207299393156, |
| "learning_rate": 4.40169133192389e-05, |
| "loss": 0.4614, |
| "step": 546 |
| }, |
| { |
| "epoch": 0.6240730176839704, |
| "grad_norm": 0.43760535192587485, |
| "learning_rate": 4.399577167019028e-05, |
| "loss": 0.4528, |
| "step": 547 |
| }, |
| { |
| "epoch": 0.6252139189960069, |
| "grad_norm": 0.4247353277953283, |
| "learning_rate": 4.397463002114165e-05, |
| "loss": 0.4612, |
| "step": 548 |
| }, |
| { |
| "epoch": 0.6263548203080433, |
| "grad_norm": 0.3829123288668259, |
| "learning_rate": 4.395348837209303e-05, |
| "loss": 0.4559, |
| "step": 549 |
| }, |
| { |
| "epoch": 0.6274957216200798, |
| "grad_norm": 0.4363586414650476, |
| "learning_rate": 4.39323467230444e-05, |
| "loss": 0.4386, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.6286366229321164, |
| "grad_norm": 0.43682485012615274, |
| "learning_rate": 4.3911205073995776e-05, |
| "loss": 0.4623, |
| "step": 551 |
| }, |
| { |
| "epoch": 0.6297775242441529, |
| "grad_norm": 0.4105313864106847, |
| "learning_rate": 4.389006342494715e-05, |
| "loss": 0.4538, |
| "step": 552 |
| }, |
| { |
| "epoch": 0.6309184255561894, |
| "grad_norm": 0.3949382109776014, |
| "learning_rate": 4.3868921775898524e-05, |
| "loss": 0.4633, |
| "step": 553 |
| }, |
| { |
| "epoch": 0.6320593268682259, |
| "grad_norm": 0.3787686846291949, |
| "learning_rate": 4.3847780126849895e-05, |
| "loss": 0.4492, |
| "step": 554 |
| }, |
| { |
| "epoch": 0.6332002281802624, |
| "grad_norm": 0.41141740644120794, |
| "learning_rate": 4.3826638477801266e-05, |
| "loss": 0.4499, |
| "step": 555 |
| }, |
| { |
| "epoch": 0.6343411294922989, |
| "grad_norm": 0.34586933918003787, |
| "learning_rate": 4.380549682875264e-05, |
| "loss": 0.4355, |
| "step": 556 |
| }, |
| { |
| "epoch": 0.6354820308043354, |
| "grad_norm": 0.3768323855768829, |
| "learning_rate": 4.378435517970402e-05, |
| "loss": 0.466, |
| "step": 557 |
| }, |
| { |
| "epoch": 0.6366229321163719, |
| "grad_norm": 0.3415297803284673, |
| "learning_rate": 4.37632135306554e-05, |
| "loss": 0.4622, |
| "step": 558 |
| }, |
| { |
| "epoch": 0.6377638334284085, |
| "grad_norm": 0.38672987304843115, |
| "learning_rate": 4.374207188160677e-05, |
| "loss": 0.4539, |
| "step": 559 |
| }, |
| { |
| "epoch": 0.638904734740445, |
| "grad_norm": 0.3891339211787994, |
| "learning_rate": 4.3720930232558146e-05, |
| "loss": 0.4655, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.6400456360524814, |
| "grad_norm": 0.37804765686531433, |
| "learning_rate": 4.369978858350952e-05, |
| "loss": 0.4554, |
| "step": 561 |
| }, |
| { |
| "epoch": 0.6411865373645179, |
| "grad_norm": 0.41386853119656003, |
| "learning_rate": 4.367864693446089e-05, |
| "loss": 0.4563, |
| "step": 562 |
| }, |
| { |
| "epoch": 0.6423274386765545, |
| "grad_norm": 0.3575661475152617, |
| "learning_rate": 4.3657505285412265e-05, |
| "loss": 0.4614, |
| "step": 563 |
| }, |
| { |
| "epoch": 0.643468339988591, |
| "grad_norm": 0.3888937986353574, |
| "learning_rate": 4.3636363636363636e-05, |
| "loss": 0.4419, |
| "step": 564 |
| }, |
| { |
| "epoch": 0.6446092413006275, |
| "grad_norm": 0.3728029276862445, |
| "learning_rate": 4.361522198731501e-05, |
| "loss": 0.434, |
| "step": 565 |
| }, |
| { |
| "epoch": 0.6457501426126641, |
| "grad_norm": 0.3175142322947935, |
| "learning_rate": 4.3594080338266384e-05, |
| "loss": 0.4273, |
| "step": 566 |
| }, |
| { |
| "epoch": 0.6468910439247005, |
| "grad_norm": 0.3594464916075461, |
| "learning_rate": 4.357293868921776e-05, |
| "loss": 0.4175, |
| "step": 567 |
| }, |
| { |
| "epoch": 0.648031945236737, |
| "grad_norm": 0.31869903166119784, |
| "learning_rate": 4.355179704016913e-05, |
| "loss": 0.4376, |
| "step": 568 |
| }, |
| { |
| "epoch": 0.6491728465487735, |
| "grad_norm": 0.3775961482431801, |
| "learning_rate": 4.353065539112051e-05, |
| "loss": 0.4469, |
| "step": 569 |
| }, |
| { |
| "epoch": 0.6503137478608101, |
| "grad_norm": 0.35464274630677184, |
| "learning_rate": 4.350951374207189e-05, |
| "loss": 0.4393, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.6514546491728466, |
| "grad_norm": 0.37213536538672665, |
| "learning_rate": 4.348837209302326e-05, |
| "loss": 0.4568, |
| "step": 571 |
| }, |
| { |
| "epoch": 0.652595550484883, |
| "grad_norm": 0.3781132343756537, |
| "learning_rate": 4.3467230443974635e-05, |
| "loss": 0.4535, |
| "step": 572 |
| }, |
| { |
| "epoch": 0.6537364517969195, |
| "grad_norm": 0.36912322283896176, |
| "learning_rate": 4.3446088794926006e-05, |
| "loss": 0.4391, |
| "step": 573 |
| }, |
| { |
| "epoch": 0.6548773531089561, |
| "grad_norm": 0.391374326479424, |
| "learning_rate": 4.342494714587738e-05, |
| "loss": 0.4449, |
| "step": 574 |
| }, |
| { |
| "epoch": 0.6560182544209926, |
| "grad_norm": 0.4036376938390525, |
| "learning_rate": 4.3403805496828754e-05, |
| "loss": 0.4497, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.6571591557330291, |
| "grad_norm": 0.45153276772782586, |
| "learning_rate": 4.338266384778013e-05, |
| "loss": 0.4557, |
| "step": 576 |
| }, |
| { |
| "epoch": 0.6583000570450656, |
| "grad_norm": 0.364556752115259, |
| "learning_rate": 4.33615221987315e-05, |
| "loss": 0.437, |
| "step": 577 |
| }, |
| { |
| "epoch": 0.6594409583571021, |
| "grad_norm": 0.4115989364215942, |
| "learning_rate": 4.334038054968288e-05, |
| "loss": 0.4429, |
| "step": 578 |
| }, |
| { |
| "epoch": 0.6605818596691386, |
| "grad_norm": 0.34885336248603255, |
| "learning_rate": 4.331923890063425e-05, |
| "loss": 0.4508, |
| "step": 579 |
| }, |
| { |
| "epoch": 0.6617227609811751, |
| "grad_norm": 0.34730597454686063, |
| "learning_rate": 4.329809725158562e-05, |
| "loss": 0.4549, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.6628636622932116, |
| "grad_norm": 0.3908862001840564, |
| "learning_rate": 4.3276955602537e-05, |
| "loss": 0.4316, |
| "step": 581 |
| }, |
| { |
| "epoch": 0.6640045636052482, |
| "grad_norm": 0.3632548361740951, |
| "learning_rate": 4.325581395348837e-05, |
| "loss": 0.4666, |
| "step": 582 |
| }, |
| { |
| "epoch": 0.6651454649172847, |
| "grad_norm": 0.32265295885901846, |
| "learning_rate": 4.3234672304439746e-05, |
| "loss": 0.416, |
| "step": 583 |
| }, |
| { |
| "epoch": 0.6662863662293211, |
| "grad_norm": 0.3505405594741678, |
| "learning_rate": 4.3213530655391124e-05, |
| "loss": 0.4508, |
| "step": 584 |
| }, |
| { |
| "epoch": 0.6674272675413577, |
| "grad_norm": 0.35717088625469834, |
| "learning_rate": 4.31923890063425e-05, |
| "loss": 0.4081, |
| "step": 585 |
| }, |
| { |
| "epoch": 0.6685681688533942, |
| "grad_norm": 0.3283024234495235, |
| "learning_rate": 4.317124735729387e-05, |
| "loss": 0.4228, |
| "step": 586 |
| }, |
| { |
| "epoch": 0.6697090701654307, |
| "grad_norm": 0.4160501781111346, |
| "learning_rate": 4.315010570824525e-05, |
| "loss": 0.4404, |
| "step": 587 |
| }, |
| { |
| "epoch": 0.6708499714774672, |
| "grad_norm": 0.3756742508086247, |
| "learning_rate": 4.312896405919662e-05, |
| "loss": 0.426, |
| "step": 588 |
| }, |
| { |
| "epoch": 0.6719908727895038, |
| "grad_norm": 0.40139465792011, |
| "learning_rate": 4.310782241014799e-05, |
| "loss": 0.4593, |
| "step": 589 |
| }, |
| { |
| "epoch": 0.6731317741015402, |
| "grad_norm": 0.4386921676603729, |
| "learning_rate": 4.308668076109937e-05, |
| "loss": 0.4357, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.6742726754135767, |
| "grad_norm": 0.3823795585192541, |
| "learning_rate": 4.306553911205074e-05, |
| "loss": 0.4359, |
| "step": 591 |
| }, |
| { |
| "epoch": 0.6754135767256132, |
| "grad_norm": 0.43423365898558286, |
| "learning_rate": 4.3044397463002116e-05, |
| "loss": 0.429, |
| "step": 592 |
| }, |
| { |
| "epoch": 0.6765544780376498, |
| "grad_norm": 0.3641873671007196, |
| "learning_rate": 4.302325581395349e-05, |
| "loss": 0.4553, |
| "step": 593 |
| }, |
| { |
| "epoch": 0.6776953793496863, |
| "grad_norm": 0.4026856912219095, |
| "learning_rate": 4.3002114164904865e-05, |
| "loss": 0.4276, |
| "step": 594 |
| }, |
| { |
| "epoch": 0.6788362806617227, |
| "grad_norm": 0.3701040977337515, |
| "learning_rate": 4.2980972515856235e-05, |
| "loss": 0.4598, |
| "step": 595 |
| }, |
| { |
| "epoch": 0.6799771819737592, |
| "grad_norm": 0.36586205284253925, |
| "learning_rate": 4.295983086680761e-05, |
| "loss": 0.4536, |
| "step": 596 |
| }, |
| { |
| "epoch": 0.6811180832857958, |
| "grad_norm": 0.4076865049400923, |
| "learning_rate": 4.293868921775899e-05, |
| "loss": 0.4317, |
| "step": 597 |
| }, |
| { |
| "epoch": 0.6822589845978323, |
| "grad_norm": 0.3642152640530504, |
| "learning_rate": 4.291754756871036e-05, |
| "loss": 0.4408, |
| "step": 598 |
| }, |
| { |
| "epoch": 0.6833998859098688, |
| "grad_norm": 0.3863091802215573, |
| "learning_rate": 4.289640591966174e-05, |
| "loss": 0.4371, |
| "step": 599 |
| }, |
| { |
| "epoch": 0.6845407872219053, |
| "grad_norm": 0.3639715030666885, |
| "learning_rate": 4.287526427061311e-05, |
| "loss": 0.4774, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.6856816885339418, |
| "grad_norm": 0.45375108529568486, |
| "learning_rate": 4.2854122621564486e-05, |
| "loss": 0.437, |
| "step": 601 |
| }, |
| { |
| "epoch": 0.6868225898459783, |
| "grad_norm": 0.3575285998180481, |
| "learning_rate": 4.283298097251586e-05, |
| "loss": 0.4373, |
| "step": 602 |
| }, |
| { |
| "epoch": 0.6879634911580148, |
| "grad_norm": 0.4316339298329872, |
| "learning_rate": 4.2811839323467235e-05, |
| "loss": 0.4435, |
| "step": 603 |
| }, |
| { |
| "epoch": 0.6891043924700513, |
| "grad_norm": 0.3982746161067394, |
| "learning_rate": 4.2790697674418605e-05, |
| "loss": 0.448, |
| "step": 604 |
| }, |
| { |
| "epoch": 0.6902452937820879, |
| "grad_norm": 0.4099138290097522, |
| "learning_rate": 4.276955602536998e-05, |
| "loss": 0.4492, |
| "step": 605 |
| }, |
| { |
| "epoch": 0.6913861950941244, |
| "grad_norm": 0.4411788104044321, |
| "learning_rate": 4.2748414376321353e-05, |
| "loss": 0.4558, |
| "step": 606 |
| }, |
| { |
| "epoch": 0.6925270964061608, |
| "grad_norm": 0.3529679384408867, |
| "learning_rate": 4.2727272727272724e-05, |
| "loss": 0.4384, |
| "step": 607 |
| }, |
| { |
| "epoch": 0.6936679977181974, |
| "grad_norm": 0.5228971823650882, |
| "learning_rate": 4.27061310782241e-05, |
| "loss": 0.4532, |
| "step": 608 |
| }, |
| { |
| "epoch": 0.6948088990302339, |
| "grad_norm": 0.4448630770296031, |
| "learning_rate": 4.268498942917548e-05, |
| "loss": 0.4329, |
| "step": 609 |
| }, |
| { |
| "epoch": 0.6959498003422704, |
| "grad_norm": 0.45059670019551196, |
| "learning_rate": 4.2663847780126857e-05, |
| "loss": 0.4423, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.6970907016543069, |
| "grad_norm": 0.4150170695256607, |
| "learning_rate": 4.264270613107823e-05, |
| "loss": 0.4394, |
| "step": 611 |
| }, |
| { |
| "epoch": 0.6982316029663435, |
| "grad_norm": 0.4796338142973703, |
| "learning_rate": 4.2621564482029605e-05, |
| "loss": 0.4503, |
| "step": 612 |
| }, |
| { |
| "epoch": 0.6993725042783799, |
| "grad_norm": 0.44780277545991026, |
| "learning_rate": 4.2600422832980975e-05, |
| "loss": 0.4455, |
| "step": 613 |
| }, |
| { |
| "epoch": 0.7005134055904164, |
| "grad_norm": 0.3768482859123564, |
| "learning_rate": 4.257928118393235e-05, |
| "loss": 0.4379, |
| "step": 614 |
| }, |
| { |
| "epoch": 0.7016543069024529, |
| "grad_norm": 0.5435524540760096, |
| "learning_rate": 4.2558139534883724e-05, |
| "loss": 0.4411, |
| "step": 615 |
| }, |
| { |
| "epoch": 0.7027952082144895, |
| "grad_norm": 0.34397188124743944, |
| "learning_rate": 4.2536997885835094e-05, |
| "loss": 0.4418, |
| "step": 616 |
| }, |
| { |
| "epoch": 0.703936109526526, |
| "grad_norm": 0.40313198481577966, |
| "learning_rate": 4.251585623678647e-05, |
| "loss": 0.4481, |
| "step": 617 |
| }, |
| { |
| "epoch": 0.7050770108385624, |
| "grad_norm": 0.3711854195341639, |
| "learning_rate": 4.249471458773784e-05, |
| "loss": 0.4185, |
| "step": 618 |
| }, |
| { |
| "epoch": 0.7062179121505989, |
| "grad_norm": 0.437619206266504, |
| "learning_rate": 4.247357293868922e-05, |
| "loss": 0.4521, |
| "step": 619 |
| }, |
| { |
| "epoch": 0.7073588134626355, |
| "grad_norm": 0.3930220809028567, |
| "learning_rate": 4.245243128964059e-05, |
| "loss": 0.4405, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.708499714774672, |
| "grad_norm": 0.38247345005779415, |
| "learning_rate": 4.243128964059197e-05, |
| "loss": 0.4208, |
| "step": 621 |
| }, |
| { |
| "epoch": 0.7096406160867085, |
| "grad_norm": 0.3437369531927041, |
| "learning_rate": 4.241014799154334e-05, |
| "loss": 0.4266, |
| "step": 622 |
| }, |
| { |
| "epoch": 0.710781517398745, |
| "grad_norm": 0.386284137063292, |
| "learning_rate": 4.2389006342494716e-05, |
| "loss": 0.4654, |
| "step": 623 |
| }, |
| { |
| "epoch": 0.7119224187107815, |
| "grad_norm": 0.41209070817450555, |
| "learning_rate": 4.2367864693446094e-05, |
| "loss": 0.4607, |
| "step": 624 |
| }, |
| { |
| "epoch": 0.713063320022818, |
| "grad_norm": 0.3597165445736412, |
| "learning_rate": 4.2346723044397464e-05, |
| "loss": 0.4375, |
| "step": 625 |
| }, |
| { |
| "epoch": 0.7142042213348545, |
| "grad_norm": 0.408138228545156, |
| "learning_rate": 4.232558139534884e-05, |
| "loss": 0.4502, |
| "step": 626 |
| }, |
| { |
| "epoch": 0.7153451226468911, |
| "grad_norm": 0.36219755317558133, |
| "learning_rate": 4.230443974630021e-05, |
| "loss": 0.4339, |
| "step": 627 |
| }, |
| { |
| "epoch": 0.7164860239589276, |
| "grad_norm": 0.46705886769040666, |
| "learning_rate": 4.228329809725159e-05, |
| "loss": 0.4471, |
| "step": 628 |
| }, |
| { |
| "epoch": 0.7176269252709641, |
| "grad_norm": 0.3737277765853678, |
| "learning_rate": 4.226215644820296e-05, |
| "loss": 0.4381, |
| "step": 629 |
| }, |
| { |
| "epoch": 0.7187678265830005, |
| "grad_norm": 0.45979676093246313, |
| "learning_rate": 4.224101479915434e-05, |
| "loss": 0.4127, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.7199087278950371, |
| "grad_norm": 0.3527512994318668, |
| "learning_rate": 4.221987315010571e-05, |
| "loss": 0.4266, |
| "step": 631 |
| }, |
| { |
| "epoch": 0.7210496292070736, |
| "grad_norm": 0.45287617912713696, |
| "learning_rate": 4.2198731501057086e-05, |
| "loss": 0.4532, |
| "step": 632 |
| }, |
| { |
| "epoch": 0.7221905305191101, |
| "grad_norm": 0.4378159828504365, |
| "learning_rate": 4.217758985200846e-05, |
| "loss": 0.452, |
| "step": 633 |
| }, |
| { |
| "epoch": 0.7233314318311466, |
| "grad_norm": 0.3612319011735092, |
| "learning_rate": 4.215644820295983e-05, |
| "loss": 0.4402, |
| "step": 634 |
| }, |
| { |
| "epoch": 0.7244723331431832, |
| "grad_norm": 0.5319032554121892, |
| "learning_rate": 4.2135306553911205e-05, |
| "loss": 0.4515, |
| "step": 635 |
| }, |
| { |
| "epoch": 0.7256132344552196, |
| "grad_norm": 0.36897795859259513, |
| "learning_rate": 4.211416490486258e-05, |
| "loss": 0.4321, |
| "step": 636 |
| }, |
| { |
| "epoch": 0.7267541357672561, |
| "grad_norm": 0.6220522354879798, |
| "learning_rate": 4.209302325581396e-05, |
| "loss": 0.49, |
| "step": 637 |
| }, |
| { |
| "epoch": 0.7278950370792926, |
| "grad_norm": 0.34250504144074906, |
| "learning_rate": 4.207188160676533e-05, |
| "loss": 0.4517, |
| "step": 638 |
| }, |
| { |
| "epoch": 0.7290359383913292, |
| "grad_norm": 0.4460241276659019, |
| "learning_rate": 4.205073995771671e-05, |
| "loss": 0.442, |
| "step": 639 |
| }, |
| { |
| "epoch": 0.7301768397033657, |
| "grad_norm": 0.4474909489048968, |
| "learning_rate": 4.202959830866808e-05, |
| "loss": 0.444, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.7313177410154021, |
| "grad_norm": 0.3577226115951208, |
| "learning_rate": 4.2008456659619456e-05, |
| "loss": 0.4335, |
| "step": 641 |
| }, |
| { |
| "epoch": 0.7324586423274386, |
| "grad_norm": 0.4436466435939107, |
| "learning_rate": 4.198731501057083e-05, |
| "loss": 0.4245, |
| "step": 642 |
| }, |
| { |
| "epoch": 0.7335995436394752, |
| "grad_norm": 0.3619666710909827, |
| "learning_rate": 4.19661733615222e-05, |
| "loss": 0.4435, |
| "step": 643 |
| }, |
| { |
| "epoch": 0.7347404449515117, |
| "grad_norm": 0.4557130869879469, |
| "learning_rate": 4.1945031712473575e-05, |
| "loss": 0.4601, |
| "step": 644 |
| }, |
| { |
| "epoch": 0.7358813462635482, |
| "grad_norm": 0.3990712259993655, |
| "learning_rate": 4.1923890063424946e-05, |
| "loss": 0.442, |
| "step": 645 |
| }, |
| { |
| "epoch": 0.7370222475755848, |
| "grad_norm": 0.35936180822504243, |
| "learning_rate": 4.190274841437632e-05, |
| "loss": 0.4258, |
| "step": 646 |
| }, |
| { |
| "epoch": 0.7381631488876212, |
| "grad_norm": 0.4412671459103769, |
| "learning_rate": 4.1881606765327694e-05, |
| "loss": 0.4366, |
| "step": 647 |
| }, |
| { |
| "epoch": 0.7393040501996577, |
| "grad_norm": 0.3480605482648498, |
| "learning_rate": 4.186046511627907e-05, |
| "loss": 0.424, |
| "step": 648 |
| }, |
| { |
| "epoch": 0.7404449515116942, |
| "grad_norm": 0.4545769918829186, |
| "learning_rate": 4.183932346723045e-05, |
| "loss": 0.4431, |
| "step": 649 |
| }, |
| { |
| "epoch": 0.7415858528237308, |
| "grad_norm": 0.38397067925303624, |
| "learning_rate": 4.181818181818182e-05, |
| "loss": 0.4333, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.7427267541357673, |
| "grad_norm": 0.4006047262191059, |
| "learning_rate": 4.17970401691332e-05, |
| "loss": 0.4293, |
| "step": 651 |
| }, |
| { |
| "epoch": 0.7438676554478038, |
| "grad_norm": 0.33535629621116786, |
| "learning_rate": 4.177589852008457e-05, |
| "loss": 0.4369, |
| "step": 652 |
| }, |
| { |
| "epoch": 0.7450085567598402, |
| "grad_norm": 0.4430399993047538, |
| "learning_rate": 4.1754756871035945e-05, |
| "loss": 0.459, |
| "step": 653 |
| }, |
| { |
| "epoch": 0.7461494580718768, |
| "grad_norm": 0.48222782093736927, |
| "learning_rate": 4.1733615221987316e-05, |
| "loss": 0.457, |
| "step": 654 |
| }, |
| { |
| "epoch": 0.7472903593839133, |
| "grad_norm": 0.38117402510092147, |
| "learning_rate": 4.171247357293869e-05, |
| "loss": 0.4575, |
| "step": 655 |
| }, |
| { |
| "epoch": 0.7484312606959498, |
| "grad_norm": 0.5246051148892976, |
| "learning_rate": 4.1691331923890064e-05, |
| "loss": 0.4431, |
| "step": 656 |
| }, |
| { |
| "epoch": 0.7495721620079863, |
| "grad_norm": 0.34928037914201676, |
| "learning_rate": 4.167019027484144e-05, |
| "loss": 0.4386, |
| "step": 657 |
| }, |
| { |
| "epoch": 0.7507130633200229, |
| "grad_norm": 0.48475845301346415, |
| "learning_rate": 4.164904862579281e-05, |
| "loss": 0.4512, |
| "step": 658 |
| }, |
| { |
| "epoch": 0.7518539646320593, |
| "grad_norm": 0.33139730820658986, |
| "learning_rate": 4.162790697674418e-05, |
| "loss": 0.422, |
| "step": 659 |
| }, |
| { |
| "epoch": 0.7529948659440958, |
| "grad_norm": 0.46025984093444033, |
| "learning_rate": 4.160676532769556e-05, |
| "loss": 0.4546, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.7541357672561323, |
| "grad_norm": 0.43742732367054127, |
| "learning_rate": 4.158562367864694e-05, |
| "loss": 0.4365, |
| "step": 661 |
| }, |
| { |
| "epoch": 0.7552766685681689, |
| "grad_norm": 0.39466973998333554, |
| "learning_rate": 4.1564482029598315e-05, |
| "loss": 0.447, |
| "step": 662 |
| }, |
| { |
| "epoch": 0.7564175698802054, |
| "grad_norm": 0.4164710504296488, |
| "learning_rate": 4.1543340380549686e-05, |
| "loss": 0.4341, |
| "step": 663 |
| }, |
| { |
| "epoch": 0.7575584711922418, |
| "grad_norm": 0.3445633470818866, |
| "learning_rate": 4.152219873150106e-05, |
| "loss": 0.461, |
| "step": 664 |
| }, |
| { |
| "epoch": 0.7586993725042784, |
| "grad_norm": 0.3580827508193947, |
| "learning_rate": 4.1501057082452434e-05, |
| "loss": 0.4172, |
| "step": 665 |
| }, |
| { |
| "epoch": 0.7598402738163149, |
| "grad_norm": 0.3676001265506075, |
| "learning_rate": 4.147991543340381e-05, |
| "loss": 0.4428, |
| "step": 666 |
| }, |
| { |
| "epoch": 0.7609811751283514, |
| "grad_norm": 0.3505942690631605, |
| "learning_rate": 4.145877378435518e-05, |
| "loss": 0.4432, |
| "step": 667 |
| }, |
| { |
| "epoch": 0.7621220764403879, |
| "grad_norm": 0.3891067229029948, |
| "learning_rate": 4.143763213530655e-05, |
| "loss": 0.4467, |
| "step": 668 |
| }, |
| { |
| "epoch": 0.7632629777524245, |
| "grad_norm": 0.3734083446553611, |
| "learning_rate": 4.141649048625793e-05, |
| "loss": 0.4269, |
| "step": 669 |
| }, |
| { |
| "epoch": 0.764403879064461, |
| "grad_norm": 0.39030324929612525, |
| "learning_rate": 4.13953488372093e-05, |
| "loss": 0.4421, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.7655447803764974, |
| "grad_norm": 0.42958890638506364, |
| "learning_rate": 4.137420718816068e-05, |
| "loss": 0.4062, |
| "step": 671 |
| }, |
| { |
| "epoch": 0.7666856816885339, |
| "grad_norm": 0.30539972744468097, |
| "learning_rate": 4.135306553911205e-05, |
| "loss": 0.4479, |
| "step": 672 |
| }, |
| { |
| "epoch": 0.7678265830005705, |
| "grad_norm": 0.3702174124250884, |
| "learning_rate": 4.1331923890063427e-05, |
| "loss": 0.435, |
| "step": 673 |
| }, |
| { |
| "epoch": 0.768967484312607, |
| "grad_norm": 0.33009900205382475, |
| "learning_rate": 4.13107822410148e-05, |
| "loss": 0.4345, |
| "step": 674 |
| }, |
| { |
| "epoch": 0.7701083856246435, |
| "grad_norm": 0.3245007702409929, |
| "learning_rate": 4.1289640591966175e-05, |
| "loss": 0.4453, |
| "step": 675 |
| }, |
| { |
| "epoch": 0.7712492869366799, |
| "grad_norm": 0.40666300271812594, |
| "learning_rate": 4.126849894291755e-05, |
| "loss": 0.4605, |
| "step": 676 |
| }, |
| { |
| "epoch": 0.7723901882487165, |
| "grad_norm": 0.36635310340240823, |
| "learning_rate": 4.124735729386892e-05, |
| "loss": 0.4264, |
| "step": 677 |
| }, |
| { |
| "epoch": 0.773531089560753, |
| "grad_norm": 0.4299812969188949, |
| "learning_rate": 4.12262156448203e-05, |
| "loss": 0.4224, |
| "step": 678 |
| }, |
| { |
| "epoch": 0.7746719908727895, |
| "grad_norm": 0.359428711940889, |
| "learning_rate": 4.120507399577167e-05, |
| "loss": 0.4417, |
| "step": 679 |
| }, |
| { |
| "epoch": 0.775812892184826, |
| "grad_norm": 0.4677963722655903, |
| "learning_rate": 4.118393234672305e-05, |
| "loss": 0.4574, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.7769537934968626, |
| "grad_norm": 0.4233752207812584, |
| "learning_rate": 4.116279069767442e-05, |
| "loss": 0.4481, |
| "step": 681 |
| }, |
| { |
| "epoch": 0.778094694808899, |
| "grad_norm": 0.3835874030237872, |
| "learning_rate": 4.1141649048625797e-05, |
| "loss": 0.4298, |
| "step": 682 |
| }, |
| { |
| "epoch": 0.7792355961209355, |
| "grad_norm": 0.37669645948536934, |
| "learning_rate": 4.112050739957717e-05, |
| "loss": 0.4513, |
| "step": 683 |
| }, |
| { |
| "epoch": 0.780376497432972, |
| "grad_norm": 0.36373603981263314, |
| "learning_rate": 4.1099365750528545e-05, |
| "loss": 0.443, |
| "step": 684 |
| }, |
| { |
| "epoch": 0.7815173987450086, |
| "grad_norm": 0.3753684899759581, |
| "learning_rate": 4.1078224101479915e-05, |
| "loss": 0.4269, |
| "step": 685 |
| }, |
| { |
| "epoch": 0.7826583000570451, |
| "grad_norm": 0.3605434844405242, |
| "learning_rate": 4.1057082452431286e-05, |
| "loss": 0.4323, |
| "step": 686 |
| }, |
| { |
| "epoch": 0.7837992013690815, |
| "grad_norm": 0.3462661479430446, |
| "learning_rate": 4.1035940803382664e-05, |
| "loss": 0.4326, |
| "step": 687 |
| }, |
| { |
| "epoch": 0.7849401026811181, |
| "grad_norm": 0.4204439751599984, |
| "learning_rate": 4.101479915433404e-05, |
| "loss": 0.4373, |
| "step": 688 |
| }, |
| { |
| "epoch": 0.7860810039931546, |
| "grad_norm": 0.3280914793709163, |
| "learning_rate": 4.099365750528542e-05, |
| "loss": 0.4535, |
| "step": 689 |
| }, |
| { |
| "epoch": 0.7872219053051911, |
| "grad_norm": 0.35041349522279136, |
| "learning_rate": 4.097251585623679e-05, |
| "loss": 0.4381, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.7883628066172276, |
| "grad_norm": 0.3988862381741282, |
| "learning_rate": 4.0951374207188167e-05, |
| "loss": 0.4651, |
| "step": 691 |
| }, |
| { |
| "epoch": 0.7895037079292642, |
| "grad_norm": 0.3424394514285045, |
| "learning_rate": 4.093023255813954e-05, |
| "loss": 0.4441, |
| "step": 692 |
| }, |
| { |
| "epoch": 0.7906446092413006, |
| "grad_norm": 0.3490799047610466, |
| "learning_rate": 4.0909090909090915e-05, |
| "loss": 0.4404, |
| "step": 693 |
| }, |
| { |
| "epoch": 0.7917855105533371, |
| "grad_norm": 0.3503575821053399, |
| "learning_rate": 4.0887949260042285e-05, |
| "loss": 0.425, |
| "step": 694 |
| }, |
| { |
| "epoch": 0.7929264118653736, |
| "grad_norm": 0.365659614058789, |
| "learning_rate": 4.0866807610993656e-05, |
| "loss": 0.4339, |
| "step": 695 |
| }, |
| { |
| "epoch": 0.7940673131774102, |
| "grad_norm": 0.39539532500596763, |
| "learning_rate": 4.0845665961945034e-05, |
| "loss": 0.4446, |
| "step": 696 |
| }, |
| { |
| "epoch": 0.7952082144894467, |
| "grad_norm": 0.3250304775152391, |
| "learning_rate": 4.0824524312896404e-05, |
| "loss": 0.438, |
| "step": 697 |
| }, |
| { |
| "epoch": 0.7963491158014832, |
| "grad_norm": 0.4909285073320248, |
| "learning_rate": 4.080338266384778e-05, |
| "loss": 0.4463, |
| "step": 698 |
| }, |
| { |
| "epoch": 0.7974900171135196, |
| "grad_norm": 0.37590033225791325, |
| "learning_rate": 4.078224101479915e-05, |
| "loss": 0.4491, |
| "step": 699 |
| }, |
| { |
| "epoch": 0.7986309184255562, |
| "grad_norm": 0.45070180362214113, |
| "learning_rate": 4.076109936575053e-05, |
| "loss": 0.4299, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.7997718197375927, |
| "grad_norm": 0.4251332388315139, |
| "learning_rate": 4.073995771670191e-05, |
| "loss": 0.4571, |
| "step": 701 |
| }, |
| { |
| "epoch": 0.8009127210496292, |
| "grad_norm": 0.48990786595017877, |
| "learning_rate": 4.0718816067653285e-05, |
| "loss": 0.4453, |
| "step": 702 |
| }, |
| { |
| "epoch": 0.8020536223616657, |
| "grad_norm": 0.5002547392375538, |
| "learning_rate": 4.0697674418604655e-05, |
| "loss": 0.4207, |
| "step": 703 |
| }, |
| { |
| "epoch": 0.8031945236737023, |
| "grad_norm": 0.4659070323210945, |
| "learning_rate": 4.0676532769556026e-05, |
| "loss": 0.4535, |
| "step": 704 |
| }, |
| { |
| "epoch": 0.8043354249857387, |
| "grad_norm": 0.5731835975203691, |
| "learning_rate": 4.0655391120507404e-05, |
| "loss": 0.4349, |
| "step": 705 |
| }, |
| { |
| "epoch": 0.8054763262977752, |
| "grad_norm": 0.4228304566829878, |
| "learning_rate": 4.0634249471458774e-05, |
| "loss": 0.4291, |
| "step": 706 |
| }, |
| { |
| "epoch": 0.8066172276098118, |
| "grad_norm": 0.5720554991869126, |
| "learning_rate": 4.061310782241015e-05, |
| "loss": 0.4554, |
| "step": 707 |
| }, |
| { |
| "epoch": 0.8077581289218483, |
| "grad_norm": 0.4853803231526458, |
| "learning_rate": 4.059196617336152e-05, |
| "loss": 0.4836, |
| "step": 708 |
| }, |
| { |
| "epoch": 0.8088990302338848, |
| "grad_norm": 0.4971208032161558, |
| "learning_rate": 4.05708245243129e-05, |
| "loss": 0.4413, |
| "step": 709 |
| }, |
| { |
| "epoch": 0.8100399315459212, |
| "grad_norm": 0.5127734596329273, |
| "learning_rate": 4.054968287526427e-05, |
| "loss": 0.4554, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.8111808328579578, |
| "grad_norm": 0.6714943186997662, |
| "learning_rate": 4.052854122621565e-05, |
| "loss": 0.4447, |
| "step": 711 |
| }, |
| { |
| "epoch": 0.8123217341699943, |
| "grad_norm": 0.40895342997960105, |
| "learning_rate": 4.050739957716702e-05, |
| "loss": 0.4287, |
| "step": 712 |
| }, |
| { |
| "epoch": 0.8134626354820308, |
| "grad_norm": 0.5479757534782737, |
| "learning_rate": 4.0486257928118396e-05, |
| "loss": 0.4436, |
| "step": 713 |
| }, |
| { |
| "epoch": 0.8146035367940673, |
| "grad_norm": 0.5279155713449081, |
| "learning_rate": 4.046511627906977e-05, |
| "loss": 0.4381, |
| "step": 714 |
| }, |
| { |
| "epoch": 0.8157444381061039, |
| "grad_norm": 0.434094864002774, |
| "learning_rate": 4.0443974630021144e-05, |
| "loss": 0.4305, |
| "step": 715 |
| }, |
| { |
| "epoch": 0.8168853394181403, |
| "grad_norm": 0.6187261983111648, |
| "learning_rate": 4.042283298097252e-05, |
| "loss": 0.4264, |
| "step": 716 |
| }, |
| { |
| "epoch": 0.8180262407301768, |
| "grad_norm": 0.3828855462779502, |
| "learning_rate": 4.040169133192389e-05, |
| "loss": 0.4303, |
| "step": 717 |
| }, |
| { |
| "epoch": 0.8191671420422133, |
| "grad_norm": 0.5189377615161761, |
| "learning_rate": 4.038054968287527e-05, |
| "loss": 0.4384, |
| "step": 718 |
| }, |
| { |
| "epoch": 0.8203080433542499, |
| "grad_norm": 0.41329860126451856, |
| "learning_rate": 4.035940803382664e-05, |
| "loss": 0.4325, |
| "step": 719 |
| }, |
| { |
| "epoch": 0.8214489446662864, |
| "grad_norm": 0.3937265367654548, |
| "learning_rate": 4.033826638477802e-05, |
| "loss": 0.422, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.8225898459783229, |
| "grad_norm": 0.43755932917102885, |
| "learning_rate": 4.031712473572939e-05, |
| "loss": 0.4028, |
| "step": 721 |
| }, |
| { |
| "epoch": 0.8237307472903593, |
| "grad_norm": 0.4157908856486112, |
| "learning_rate": 4.029598308668076e-05, |
| "loss": 0.438, |
| "step": 722 |
| }, |
| { |
| "epoch": 0.8248716486023959, |
| "grad_norm": 0.502065224922776, |
| "learning_rate": 4.027484143763214e-05, |
| "loss": 0.4181, |
| "step": 723 |
| }, |
| { |
| "epoch": 0.8260125499144324, |
| "grad_norm": 0.3780582332426594, |
| "learning_rate": 4.025369978858351e-05, |
| "loss": 0.4201, |
| "step": 724 |
| }, |
| { |
| "epoch": 0.8271534512264689, |
| "grad_norm": 0.34860648789791515, |
| "learning_rate": 4.0232558139534885e-05, |
| "loss": 0.4546, |
| "step": 725 |
| }, |
| { |
| "epoch": 0.8282943525385055, |
| "grad_norm": 0.43374943583927567, |
| "learning_rate": 4.0211416490486256e-05, |
| "loss": 0.4166, |
| "step": 726 |
| }, |
| { |
| "epoch": 0.829435253850542, |
| "grad_norm": 0.34017878511195426, |
| "learning_rate": 4.019027484143763e-05, |
| "loss": 0.4584, |
| "step": 727 |
| }, |
| { |
| "epoch": 0.8305761551625784, |
| "grad_norm": 0.4520337372094188, |
| "learning_rate": 4.016913319238901e-05, |
| "loss": 0.4515, |
| "step": 728 |
| }, |
| { |
| "epoch": 0.8317170564746149, |
| "grad_norm": 0.4484495076947604, |
| "learning_rate": 4.014799154334038e-05, |
| "loss": 0.4477, |
| "step": 729 |
| }, |
| { |
| "epoch": 0.8328579577866515, |
| "grad_norm": 0.39071842952373664, |
| "learning_rate": 4.012684989429176e-05, |
| "loss": 0.44, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.833998859098688, |
| "grad_norm": 0.4709103934572106, |
| "learning_rate": 4.010570824524313e-05, |
| "loss": 0.4105, |
| "step": 731 |
| }, |
| { |
| "epoch": 0.8351397604107245, |
| "grad_norm": 0.3693181402492397, |
| "learning_rate": 4.008456659619451e-05, |
| "loss": 0.4423, |
| "step": 732 |
| }, |
| { |
| "epoch": 0.8362806617227609, |
| "grad_norm": 0.3438695738762642, |
| "learning_rate": 4.006342494714588e-05, |
| "loss": 0.4151, |
| "step": 733 |
| }, |
| { |
| "epoch": 0.8374215630347975, |
| "grad_norm": 0.3749841228080736, |
| "learning_rate": 4.0042283298097255e-05, |
| "loss": 0.4387, |
| "step": 734 |
| }, |
| { |
| "epoch": 0.838562464346834, |
| "grad_norm": 0.31093266972310163, |
| "learning_rate": 4.0021141649048626e-05, |
| "loss": 0.4245, |
| "step": 735 |
| }, |
| { |
| "epoch": 0.8397033656588705, |
| "grad_norm": 0.3842543200844472, |
| "learning_rate": 4e-05, |
| "loss": 0.4361, |
| "step": 736 |
| }, |
| { |
| "epoch": 0.840844266970907, |
| "grad_norm": 0.3766287686752745, |
| "learning_rate": 3.9978858350951374e-05, |
| "loss": 0.4228, |
| "step": 737 |
| }, |
| { |
| "epoch": 0.8419851682829436, |
| "grad_norm": 0.35517260008860085, |
| "learning_rate": 3.9957716701902745e-05, |
| "loss": 0.4359, |
| "step": 738 |
| }, |
| { |
| "epoch": 0.84312606959498, |
| "grad_norm": 0.4419338843198511, |
| "learning_rate": 3.993657505285412e-05, |
| "loss": 0.4304, |
| "step": 739 |
| }, |
| { |
| "epoch": 0.8442669709070165, |
| "grad_norm": 0.513467064125959, |
| "learning_rate": 3.99154334038055e-05, |
| "loss": 0.4403, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.845407872219053, |
| "grad_norm": 0.4810139092669358, |
| "learning_rate": 3.989429175475688e-05, |
| "loss": 0.4677, |
| "step": 741 |
| }, |
| { |
| "epoch": 0.8465487735310896, |
| "grad_norm": 0.39212605153330693, |
| "learning_rate": 3.987315010570825e-05, |
| "loss": 0.4425, |
| "step": 742 |
| }, |
| { |
| "epoch": 0.8476896748431261, |
| "grad_norm": 0.41040105526904663, |
| "learning_rate": 3.9852008456659625e-05, |
| "loss": 0.4298, |
| "step": 743 |
| }, |
| { |
| "epoch": 0.8488305761551626, |
| "grad_norm": 0.462499235826406, |
| "learning_rate": 3.9830866807610996e-05, |
| "loss": 0.4075, |
| "step": 744 |
| }, |
| { |
| "epoch": 0.849971477467199, |
| "grad_norm": 0.38462443248324996, |
| "learning_rate": 3.980972515856237e-05, |
| "loss": 0.4281, |
| "step": 745 |
| }, |
| { |
| "epoch": 0.8511123787792356, |
| "grad_norm": 0.38604978434276516, |
| "learning_rate": 3.9788583509513744e-05, |
| "loss": 0.4358, |
| "step": 746 |
| }, |
| { |
| "epoch": 0.8522532800912721, |
| "grad_norm": 0.3984294927193753, |
| "learning_rate": 3.9767441860465115e-05, |
| "loss": 0.4151, |
| "step": 747 |
| }, |
| { |
| "epoch": 0.8533941814033086, |
| "grad_norm": 0.36801620604521634, |
| "learning_rate": 3.974630021141649e-05, |
| "loss": 0.4293, |
| "step": 748 |
| }, |
| { |
| "epoch": 0.8545350827153452, |
| "grad_norm": 0.3374397580238608, |
| "learning_rate": 3.972515856236786e-05, |
| "loss": 0.4139, |
| "step": 749 |
| }, |
| { |
| "epoch": 0.8556759840273817, |
| "grad_norm": 0.42454353852197896, |
| "learning_rate": 3.970401691331924e-05, |
| "loss": 0.4351, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.8568168853394181, |
| "grad_norm": 0.3940551073101426, |
| "learning_rate": 3.968287526427061e-05, |
| "loss": 0.4324, |
| "step": 751 |
| }, |
| { |
| "epoch": 0.8579577866514546, |
| "grad_norm": 0.40588237811060596, |
| "learning_rate": 3.966173361522199e-05, |
| "loss": 0.4194, |
| "step": 752 |
| }, |
| { |
| "epoch": 0.8590986879634912, |
| "grad_norm": 0.420807467022524, |
| "learning_rate": 3.9640591966173366e-05, |
| "loss": 0.4491, |
| "step": 753 |
| }, |
| { |
| "epoch": 0.8602395892755277, |
| "grad_norm": 0.4540159144239781, |
| "learning_rate": 3.961945031712474e-05, |
| "loss": 0.4092, |
| "step": 754 |
| }, |
| { |
| "epoch": 0.8613804905875642, |
| "grad_norm": 0.46954467807566896, |
| "learning_rate": 3.9598308668076114e-05, |
| "loss": 0.4504, |
| "step": 755 |
| }, |
| { |
| "epoch": 0.8625213918996006, |
| "grad_norm": 0.4221414094460434, |
| "learning_rate": 3.9577167019027485e-05, |
| "loss": 0.4364, |
| "step": 756 |
| }, |
| { |
| "epoch": 0.8636622932116372, |
| "grad_norm": 0.41911229551531937, |
| "learning_rate": 3.955602536997886e-05, |
| "loss": 0.4395, |
| "step": 757 |
| }, |
| { |
| "epoch": 0.8648031945236737, |
| "grad_norm": 0.4334933603326218, |
| "learning_rate": 3.953488372093023e-05, |
| "loss": 0.4341, |
| "step": 758 |
| }, |
| { |
| "epoch": 0.8659440958357102, |
| "grad_norm": 0.427182558681861, |
| "learning_rate": 3.951374207188161e-05, |
| "loss": 0.4234, |
| "step": 759 |
| }, |
| { |
| "epoch": 0.8670849971477467, |
| "grad_norm": 0.4679381773381296, |
| "learning_rate": 3.949260042283298e-05, |
| "loss": 0.4461, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.8682258984597833, |
| "grad_norm": 0.3997765960963666, |
| "learning_rate": 3.947145877378436e-05, |
| "loss": 0.4553, |
| "step": 761 |
| }, |
| { |
| "epoch": 0.8693667997718197, |
| "grad_norm": 0.47893488084130953, |
| "learning_rate": 3.945031712473573e-05, |
| "loss": 0.4383, |
| "step": 762 |
| }, |
| { |
| "epoch": 0.8705077010838562, |
| "grad_norm": 0.387091880809383, |
| "learning_rate": 3.942917547568711e-05, |
| "loss": 0.4231, |
| "step": 763 |
| }, |
| { |
| "epoch": 0.8716486023958927, |
| "grad_norm": 0.3967940661402007, |
| "learning_rate": 3.940803382663848e-05, |
| "loss": 0.429, |
| "step": 764 |
| }, |
| { |
| "epoch": 0.8727895037079293, |
| "grad_norm": 0.45115954790258245, |
| "learning_rate": 3.9386892177589855e-05, |
| "loss": 0.4412, |
| "step": 765 |
| }, |
| { |
| "epoch": 0.8739304050199658, |
| "grad_norm": 0.31431831863394666, |
| "learning_rate": 3.9365750528541225e-05, |
| "loss": 0.4142, |
| "step": 766 |
| }, |
| { |
| "epoch": 0.8750713063320023, |
| "grad_norm": 0.5271261001088663, |
| "learning_rate": 3.93446088794926e-05, |
| "loss": 0.4582, |
| "step": 767 |
| }, |
| { |
| "epoch": 0.8762122076440388, |
| "grad_norm": 0.37960216759246174, |
| "learning_rate": 3.932346723044398e-05, |
| "loss": 0.4451, |
| "step": 768 |
| }, |
| { |
| "epoch": 0.8773531089560753, |
| "grad_norm": 0.5153918934168993, |
| "learning_rate": 3.930232558139535e-05, |
| "loss": 0.4421, |
| "step": 769 |
| }, |
| { |
| "epoch": 0.8784940102681118, |
| "grad_norm": 0.40777595518236365, |
| "learning_rate": 3.928118393234673e-05, |
| "loss": 0.4329, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.8796349115801483, |
| "grad_norm": 0.4054358265499145, |
| "learning_rate": 3.92600422832981e-05, |
| "loss": 0.4492, |
| "step": 771 |
| }, |
| { |
| "epoch": 0.8807758128921849, |
| "grad_norm": 0.45758804362718547, |
| "learning_rate": 3.923890063424948e-05, |
| "loss": 0.4491, |
| "step": 772 |
| }, |
| { |
| "epoch": 0.8819167142042214, |
| "grad_norm": 0.4327636489834873, |
| "learning_rate": 3.921775898520085e-05, |
| "loss": 0.419, |
| "step": 773 |
| }, |
| { |
| "epoch": 0.8830576155162578, |
| "grad_norm": 0.5308983247497958, |
| "learning_rate": 3.919661733615222e-05, |
| "loss": 0.4195, |
| "step": 774 |
| }, |
| { |
| "epoch": 0.8841985168282943, |
| "grad_norm": 0.44186009558843903, |
| "learning_rate": 3.9175475687103596e-05, |
| "loss": 0.422, |
| "step": 775 |
| }, |
| { |
| "epoch": 0.8853394181403309, |
| "grad_norm": 0.49860412199875853, |
| "learning_rate": 3.9154334038054966e-05, |
| "loss": 0.4508, |
| "step": 776 |
| }, |
| { |
| "epoch": 0.8864803194523674, |
| "grad_norm": 0.5386256866496492, |
| "learning_rate": 3.9133192389006344e-05, |
| "loss": 0.4199, |
| "step": 777 |
| }, |
| { |
| "epoch": 0.8876212207644039, |
| "grad_norm": 0.42135087233790125, |
| "learning_rate": 3.9112050739957714e-05, |
| "loss": 0.439, |
| "step": 778 |
| }, |
| { |
| "epoch": 0.8887621220764403, |
| "grad_norm": 0.47429413048860186, |
| "learning_rate": 3.909090909090909e-05, |
| "loss": 0.4075, |
| "step": 779 |
| }, |
| { |
| "epoch": 0.8899030233884769, |
| "grad_norm": 0.507620116383454, |
| "learning_rate": 3.906976744186047e-05, |
| "loss": 0.4559, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.8910439247005134, |
| "grad_norm": 0.4130323507865215, |
| "learning_rate": 3.904862579281185e-05, |
| "loss": 0.4335, |
| "step": 781 |
| }, |
| { |
| "epoch": 0.8921848260125499, |
| "grad_norm": 0.4919769248710124, |
| "learning_rate": 3.902748414376322e-05, |
| "loss": 0.4326, |
| "step": 782 |
| }, |
| { |
| "epoch": 0.8933257273245864, |
| "grad_norm": 0.3618438803737931, |
| "learning_rate": 3.900634249471459e-05, |
| "loss": 0.4149, |
| "step": 783 |
| }, |
| { |
| "epoch": 0.894466628636623, |
| "grad_norm": 0.3766229644972282, |
| "learning_rate": 3.8985200845665966e-05, |
| "loss": 0.4413, |
| "step": 784 |
| }, |
| { |
| "epoch": 0.8956075299486594, |
| "grad_norm": 0.40065923481523696, |
| "learning_rate": 3.8964059196617336e-05, |
| "loss": 0.4256, |
| "step": 785 |
| }, |
| { |
| "epoch": 0.8967484312606959, |
| "grad_norm": 0.38682593556698847, |
| "learning_rate": 3.8942917547568714e-05, |
| "loss": 0.4383, |
| "step": 786 |
| }, |
| { |
| "epoch": 0.8978893325727325, |
| "grad_norm": 0.36589625639407924, |
| "learning_rate": 3.8921775898520084e-05, |
| "loss": 0.3975, |
| "step": 787 |
| }, |
| { |
| "epoch": 0.899030233884769, |
| "grad_norm": 0.4992171026288852, |
| "learning_rate": 3.890063424947146e-05, |
| "loss": 0.4273, |
| "step": 788 |
| }, |
| { |
| "epoch": 0.9001711351968055, |
| "grad_norm": 0.34110261273539755, |
| "learning_rate": 3.887949260042283e-05, |
| "loss": 0.4338, |
| "step": 789 |
| }, |
| { |
| "epoch": 0.901312036508842, |
| "grad_norm": 0.5241888214017549, |
| "learning_rate": 3.885835095137421e-05, |
| "loss": 0.4486, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.9024529378208785, |
| "grad_norm": 0.46893161994221105, |
| "learning_rate": 3.883720930232558e-05, |
| "loss": 0.4326, |
| "step": 791 |
| }, |
| { |
| "epoch": 0.903593839132915, |
| "grad_norm": 0.42273170967522355, |
| "learning_rate": 3.881606765327696e-05, |
| "loss": 0.4277, |
| "step": 792 |
| }, |
| { |
| "epoch": 0.9047347404449515, |
| "grad_norm": 0.4549507679298303, |
| "learning_rate": 3.8794926004228336e-05, |
| "loss": 0.4494, |
| "step": 793 |
| }, |
| { |
| "epoch": 0.905875641756988, |
| "grad_norm": 0.381434633502494, |
| "learning_rate": 3.8773784355179706e-05, |
| "loss": 0.4204, |
| "step": 794 |
| }, |
| { |
| "epoch": 0.9070165430690246, |
| "grad_norm": 0.449182413711188, |
| "learning_rate": 3.8752642706131084e-05, |
| "loss": 0.4151, |
| "step": 795 |
| }, |
| { |
| "epoch": 0.908157444381061, |
| "grad_norm": 0.39996145027480806, |
| "learning_rate": 3.8731501057082454e-05, |
| "loss": 0.4182, |
| "step": 796 |
| }, |
| { |
| "epoch": 0.9092983456930975, |
| "grad_norm": 0.4239131825637012, |
| "learning_rate": 3.871035940803383e-05, |
| "loss": 0.4333, |
| "step": 797 |
| }, |
| { |
| "epoch": 0.910439247005134, |
| "grad_norm": 0.38140086906401804, |
| "learning_rate": 3.86892177589852e-05, |
| "loss": 0.4263, |
| "step": 798 |
| }, |
| { |
| "epoch": 0.9115801483171706, |
| "grad_norm": 0.4151234690517119, |
| "learning_rate": 3.866807610993658e-05, |
| "loss": 0.3942, |
| "step": 799 |
| }, |
| { |
| "epoch": 0.9127210496292071, |
| "grad_norm": 0.48901591188294186, |
| "learning_rate": 3.864693446088795e-05, |
| "loss": 0.4268, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.9138619509412436, |
| "grad_norm": 0.4229848535149157, |
| "learning_rate": 3.862579281183932e-05, |
| "loss": 0.4217, |
| "step": 801 |
| }, |
| { |
| "epoch": 0.91500285225328, |
| "grad_norm": 0.45973123150821005, |
| "learning_rate": 3.86046511627907e-05, |
| "loss": 0.458, |
| "step": 802 |
| }, |
| { |
| "epoch": 0.9161437535653166, |
| "grad_norm": 0.4609648365948717, |
| "learning_rate": 3.858350951374207e-05, |
| "loss": 0.4307, |
| "step": 803 |
| }, |
| { |
| "epoch": 0.9172846548773531, |
| "grad_norm": 0.36201542515906165, |
| "learning_rate": 3.856236786469345e-05, |
| "loss": 0.415, |
| "step": 804 |
| }, |
| { |
| "epoch": 0.9184255561893896, |
| "grad_norm": 0.4138868996856232, |
| "learning_rate": 3.8541226215644824e-05, |
| "loss": 0.4219, |
| "step": 805 |
| }, |
| { |
| "epoch": 0.9195664575014262, |
| "grad_norm": 0.425798724139904, |
| "learning_rate": 3.8520084566596195e-05, |
| "loss": 0.4324, |
| "step": 806 |
| }, |
| { |
| "epoch": 0.9207073588134627, |
| "grad_norm": 0.44617326124449147, |
| "learning_rate": 3.849894291754757e-05, |
| "loss": 0.4272, |
| "step": 807 |
| }, |
| { |
| "epoch": 0.9218482601254991, |
| "grad_norm": 0.3669146140612319, |
| "learning_rate": 3.847780126849895e-05, |
| "loss": 0.4435, |
| "step": 808 |
| }, |
| { |
| "epoch": 0.9229891614375356, |
| "grad_norm": 0.5009260847096676, |
| "learning_rate": 3.845665961945032e-05, |
| "loss": 0.4495, |
| "step": 809 |
| }, |
| { |
| "epoch": 0.9241300627495722, |
| "grad_norm": 0.304056760658916, |
| "learning_rate": 3.843551797040169e-05, |
| "loss": 0.4315, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.9252709640616087, |
| "grad_norm": 0.4554386377496836, |
| "learning_rate": 3.841437632135307e-05, |
| "loss": 0.4365, |
| "step": 811 |
| }, |
| { |
| "epoch": 0.9264118653736452, |
| "grad_norm": 0.3152473341936204, |
| "learning_rate": 3.839323467230444e-05, |
| "loss": 0.4526, |
| "step": 812 |
| }, |
| { |
| "epoch": 0.9275527666856817, |
| "grad_norm": 0.3689287886745172, |
| "learning_rate": 3.837209302325582e-05, |
| "loss": 0.4277, |
| "step": 813 |
| }, |
| { |
| "epoch": 0.9286936679977182, |
| "grad_norm": 0.38617380995316614, |
| "learning_rate": 3.835095137420719e-05, |
| "loss": 0.4392, |
| "step": 814 |
| }, |
| { |
| "epoch": 0.9298345693097547, |
| "grad_norm": 0.31250501424875, |
| "learning_rate": 3.8329809725158565e-05, |
| "loss": 0.4252, |
| "step": 815 |
| }, |
| { |
| "epoch": 0.9309754706217912, |
| "grad_norm": 0.40260110774923935, |
| "learning_rate": 3.8308668076109936e-05, |
| "loss": 0.4337, |
| "step": 816 |
| }, |
| { |
| "epoch": 0.9321163719338277, |
| "grad_norm": 0.3434225940584785, |
| "learning_rate": 3.8287526427061307e-05, |
| "loss": 0.4188, |
| "step": 817 |
| }, |
| { |
| "epoch": 0.9332572732458643, |
| "grad_norm": 0.4583302147431869, |
| "learning_rate": 3.8266384778012684e-05, |
| "loss": 0.428, |
| "step": 818 |
| }, |
| { |
| "epoch": 0.9343981745579008, |
| "grad_norm": 0.4823100075277545, |
| "learning_rate": 3.824524312896406e-05, |
| "loss": 0.435, |
| "step": 819 |
| }, |
| { |
| "epoch": 0.9355390758699372, |
| "grad_norm": 0.3756749669723151, |
| "learning_rate": 3.822410147991544e-05, |
| "loss": 0.4361, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.9366799771819737, |
| "grad_norm": 0.5240760743232675, |
| "learning_rate": 3.820295983086681e-05, |
| "loss": 0.4233, |
| "step": 821 |
| }, |
| { |
| "epoch": 0.9378208784940103, |
| "grad_norm": 0.39250208092232536, |
| "learning_rate": 3.818181818181819e-05, |
| "loss": 0.4221, |
| "step": 822 |
| }, |
| { |
| "epoch": 0.9389617798060468, |
| "grad_norm": 0.4739585449842317, |
| "learning_rate": 3.816067653276956e-05, |
| "loss": 0.4365, |
| "step": 823 |
| }, |
| { |
| "epoch": 0.9401026811180833, |
| "grad_norm": 0.4937231941861391, |
| "learning_rate": 3.8139534883720935e-05, |
| "loss": 0.4357, |
| "step": 824 |
| }, |
| { |
| "epoch": 0.9412435824301197, |
| "grad_norm": 0.449294685022375, |
| "learning_rate": 3.8118393234672306e-05, |
| "loss": 0.4595, |
| "step": 825 |
| }, |
| { |
| "epoch": 0.9423844837421563, |
| "grad_norm": 0.3384444427709971, |
| "learning_rate": 3.809725158562368e-05, |
| "loss": 0.4436, |
| "step": 826 |
| }, |
| { |
| "epoch": 0.9435253850541928, |
| "grad_norm": 0.48955378365349317, |
| "learning_rate": 3.8076109936575054e-05, |
| "loss": 0.4386, |
| "step": 827 |
| }, |
| { |
| "epoch": 0.9446662863662293, |
| "grad_norm": 0.3002513126100782, |
| "learning_rate": 3.8054968287526425e-05, |
| "loss": 0.428, |
| "step": 828 |
| }, |
| { |
| "epoch": 0.9458071876782659, |
| "grad_norm": 0.47686476251659693, |
| "learning_rate": 3.80338266384778e-05, |
| "loss": 0.4161, |
| "step": 829 |
| }, |
| { |
| "epoch": 0.9469480889903024, |
| "grad_norm": 0.3337526894068287, |
| "learning_rate": 3.801268498942917e-05, |
| "loss": 0.4491, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.9480889903023388, |
| "grad_norm": 0.42874888426843116, |
| "learning_rate": 3.799154334038055e-05, |
| "loss": 0.4322, |
| "step": 831 |
| }, |
| { |
| "epoch": 0.9492298916143753, |
| "grad_norm": 0.3686621986757173, |
| "learning_rate": 3.797040169133193e-05, |
| "loss": 0.4452, |
| "step": 832 |
| }, |
| { |
| "epoch": 0.9503707929264119, |
| "grad_norm": 0.4427227801549552, |
| "learning_rate": 3.7949260042283305e-05, |
| "loss": 0.4409, |
| "step": 833 |
| }, |
| { |
| "epoch": 0.9515116942384484, |
| "grad_norm": 0.40716707620144105, |
| "learning_rate": 3.7928118393234676e-05, |
| "loss": 0.44, |
| "step": 834 |
| }, |
| { |
| "epoch": 0.9526525955504849, |
| "grad_norm": 0.4778332483885651, |
| "learning_rate": 3.790697674418605e-05, |
| "loss": 0.4321, |
| "step": 835 |
| }, |
| { |
| "epoch": 0.9537934968625214, |
| "grad_norm": 0.5215688673683716, |
| "learning_rate": 3.7885835095137424e-05, |
| "loss": 0.4139, |
| "step": 836 |
| }, |
| { |
| "epoch": 0.9549343981745579, |
| "grad_norm": 0.3973314649903041, |
| "learning_rate": 3.7864693446088795e-05, |
| "loss": 0.4481, |
| "step": 837 |
| }, |
| { |
| "epoch": 0.9560752994865944, |
| "grad_norm": 0.5913338031384034, |
| "learning_rate": 3.784355179704017e-05, |
| "loss": 0.441, |
| "step": 838 |
| }, |
| { |
| "epoch": 0.9572162007986309, |
| "grad_norm": 0.3517543605151038, |
| "learning_rate": 3.782241014799154e-05, |
| "loss": 0.449, |
| "step": 839 |
| }, |
| { |
| "epoch": 0.9583571021106674, |
| "grad_norm": 0.5008792785960227, |
| "learning_rate": 3.780126849894292e-05, |
| "loss": 0.4196, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.959498003422704, |
| "grad_norm": 0.3855644570106595, |
| "learning_rate": 3.778012684989429e-05, |
| "loss": 0.4401, |
| "step": 841 |
| }, |
| { |
| "epoch": 0.9606389047347405, |
| "grad_norm": 0.46893696225907877, |
| "learning_rate": 3.775898520084567e-05, |
| "loss": 0.4411, |
| "step": 842 |
| }, |
| { |
| "epoch": 0.9617798060467769, |
| "grad_norm": 0.36157623560181484, |
| "learning_rate": 3.773784355179704e-05, |
| "loss": 0.4438, |
| "step": 843 |
| }, |
| { |
| "epoch": 0.9629207073588134, |
| "grad_norm": 0.44243231770872754, |
| "learning_rate": 3.771670190274842e-05, |
| "loss": 0.4406, |
| "step": 844 |
| }, |
| { |
| "epoch": 0.96406160867085, |
| "grad_norm": 0.3107496198874931, |
| "learning_rate": 3.7695560253699794e-05, |
| "loss": 0.4165, |
| "step": 845 |
| }, |
| { |
| "epoch": 0.9652025099828865, |
| "grad_norm": 0.4615834197410632, |
| "learning_rate": 3.7674418604651165e-05, |
| "loss": 0.426, |
| "step": 846 |
| }, |
| { |
| "epoch": 0.966343411294923, |
| "grad_norm": 0.349515582638473, |
| "learning_rate": 3.765327695560254e-05, |
| "loss": 0.4296, |
| "step": 847 |
| }, |
| { |
| "epoch": 0.9674843126069596, |
| "grad_norm": 0.4222707991171846, |
| "learning_rate": 3.763213530655391e-05, |
| "loss": 0.4452, |
| "step": 848 |
| }, |
| { |
| "epoch": 0.968625213918996, |
| "grad_norm": 0.39191168304626894, |
| "learning_rate": 3.761099365750529e-05, |
| "loss": 0.4399, |
| "step": 849 |
| }, |
| { |
| "epoch": 0.9697661152310325, |
| "grad_norm": 0.4116727814128068, |
| "learning_rate": 3.758985200845666e-05, |
| "loss": 0.4559, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.970907016543069, |
| "grad_norm": 0.40681194079162225, |
| "learning_rate": 3.756871035940804e-05, |
| "loss": 0.4731, |
| "step": 851 |
| }, |
| { |
| "epoch": 0.9720479178551056, |
| "grad_norm": 0.44083783057627335, |
| "learning_rate": 3.754756871035941e-05, |
| "loss": 0.4335, |
| "step": 852 |
| }, |
| { |
| "epoch": 0.9731888191671421, |
| "grad_norm": 0.37049492685038155, |
| "learning_rate": 3.752642706131078e-05, |
| "loss": 0.4293, |
| "step": 853 |
| }, |
| { |
| "epoch": 0.9743297204791785, |
| "grad_norm": 0.4375755567317356, |
| "learning_rate": 3.750528541226216e-05, |
| "loss": 0.4354, |
| "step": 854 |
| }, |
| { |
| "epoch": 0.975470621791215, |
| "grad_norm": 0.33832049022487304, |
| "learning_rate": 3.748414376321353e-05, |
| "loss": 0.4293, |
| "step": 855 |
| }, |
| { |
| "epoch": 0.9766115231032516, |
| "grad_norm": 0.5121821917096232, |
| "learning_rate": 3.7463002114164906e-05, |
| "loss": 0.4537, |
| "step": 856 |
| }, |
| { |
| "epoch": 0.9777524244152881, |
| "grad_norm": 0.3818306200016373, |
| "learning_rate": 3.7441860465116276e-05, |
| "loss": 0.4541, |
| "step": 857 |
| }, |
| { |
| "epoch": 0.9788933257273246, |
| "grad_norm": 0.4215507470795194, |
| "learning_rate": 3.7420718816067654e-05, |
| "loss": 0.4173, |
| "step": 858 |
| }, |
| { |
| "epoch": 0.980034227039361, |
| "grad_norm": 0.396875209227274, |
| "learning_rate": 3.739957716701903e-05, |
| "loss": 0.4227, |
| "step": 859 |
| }, |
| { |
| "epoch": 0.9811751283513976, |
| "grad_norm": 0.44522795502012974, |
| "learning_rate": 3.737843551797041e-05, |
| "loss": 0.4254, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.9823160296634341, |
| "grad_norm": 0.36390782196922894, |
| "learning_rate": 3.735729386892178e-05, |
| "loss": 0.435, |
| "step": 861 |
| }, |
| { |
| "epoch": 0.9834569309754706, |
| "grad_norm": 0.4051435067373246, |
| "learning_rate": 3.733615221987315e-05, |
| "loss": 0.4469, |
| "step": 862 |
| }, |
| { |
| "epoch": 0.9845978322875071, |
| "grad_norm": 0.4300788624635129, |
| "learning_rate": 3.731501057082453e-05, |
| "loss": 0.4337, |
| "step": 863 |
| }, |
| { |
| "epoch": 0.9857387335995437, |
| "grad_norm": 0.4518840799967234, |
| "learning_rate": 3.72938689217759e-05, |
| "loss": 0.431, |
| "step": 864 |
| }, |
| { |
| "epoch": 0.9868796349115802, |
| "grad_norm": 0.3917993799092396, |
| "learning_rate": 3.7272727272727276e-05, |
| "loss": 0.4052, |
| "step": 865 |
| }, |
| { |
| "epoch": 0.9880205362236166, |
| "grad_norm": 0.4311974148203897, |
| "learning_rate": 3.7251585623678646e-05, |
| "loss": 0.4479, |
| "step": 866 |
| }, |
| { |
| "epoch": 0.9891614375356532, |
| "grad_norm": 0.33663593761732574, |
| "learning_rate": 3.7230443974630024e-05, |
| "loss": 0.4426, |
| "step": 867 |
| }, |
| { |
| "epoch": 0.9903023388476897, |
| "grad_norm": 0.41764811028477467, |
| "learning_rate": 3.7209302325581394e-05, |
| "loss": 0.4231, |
| "step": 868 |
| }, |
| { |
| "epoch": 0.9914432401597262, |
| "grad_norm": 0.37959761334860076, |
| "learning_rate": 3.718816067653277e-05, |
| "loss": 0.437, |
| "step": 869 |
| }, |
| { |
| "epoch": 0.9925841414717627, |
| "grad_norm": 0.3751287923553439, |
| "learning_rate": 3.716701902748414e-05, |
| "loss": 0.4359, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.9937250427837993, |
| "grad_norm": 0.39059717878139527, |
| "learning_rate": 3.714587737843552e-05, |
| "loss": 0.4266, |
| "step": 871 |
| }, |
| { |
| "epoch": 0.9948659440958357, |
| "grad_norm": 0.3668029831866769, |
| "learning_rate": 3.71247357293869e-05, |
| "loss": 0.4346, |
| "step": 872 |
| }, |
| { |
| "epoch": 0.9960068454078722, |
| "grad_norm": 0.3758670658619597, |
| "learning_rate": 3.710359408033827e-05, |
| "loss": 0.4247, |
| "step": 873 |
| }, |
| { |
| "epoch": 0.9971477467199087, |
| "grad_norm": 0.3831467505102513, |
| "learning_rate": 3.7082452431289646e-05, |
| "loss": 0.4424, |
| "step": 874 |
| }, |
| { |
| "epoch": 0.9982886480319453, |
| "grad_norm": 0.37818254454581546, |
| "learning_rate": 3.7061310782241016e-05, |
| "loss": 0.4441, |
| "step": 875 |
| }, |
| { |
| "epoch": 0.9994295493439818, |
| "grad_norm": 0.38315281097821646, |
| "learning_rate": 3.7040169133192394e-05, |
| "loss": 0.4294, |
| "step": 876 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 0.38315281097821646, |
| "learning_rate": 3.7019027484143764e-05, |
| "loss": 0.438, |
| "step": 877 |
| }, |
| { |
| "epoch": 1.0011409013120365, |
| "grad_norm": 0.5455887151600689, |
| "learning_rate": 3.699788583509514e-05, |
| "loss": 0.3733, |
| "step": 878 |
| }, |
| { |
| "epoch": 1.002281802624073, |
| "grad_norm": 0.36610896066989357, |
| "learning_rate": 3.697674418604651e-05, |
| "loss": 0.3699, |
| "step": 879 |
| }, |
| { |
| "epoch": 1.0034227039361094, |
| "grad_norm": 0.40501415591406764, |
| "learning_rate": 3.695560253699788e-05, |
| "loss": 0.381, |
| "step": 880 |
| }, |
| { |
| "epoch": 1.0045636052481461, |
| "grad_norm": 0.3825535046773347, |
| "learning_rate": 3.693446088794926e-05, |
| "loss": 0.3808, |
| "step": 881 |
| }, |
| { |
| "epoch": 1.0057045065601826, |
| "grad_norm": 0.35102076165474366, |
| "learning_rate": 3.691331923890063e-05, |
| "loss": 0.3537, |
| "step": 882 |
| }, |
| { |
| "epoch": 1.006845407872219, |
| "grad_norm": 0.34649503103224494, |
| "learning_rate": 3.689217758985201e-05, |
| "loss": 0.3736, |
| "step": 883 |
| }, |
| { |
| "epoch": 1.0079863091842556, |
| "grad_norm": 0.4681279678485136, |
| "learning_rate": 3.6871035940803386e-05, |
| "loss": 0.3548, |
| "step": 884 |
| }, |
| { |
| "epoch": 1.009127210496292, |
| "grad_norm": 0.37489320591183883, |
| "learning_rate": 3.6849894291754764e-05, |
| "loss": 0.3703, |
| "step": 885 |
| }, |
| { |
| "epoch": 1.0102681118083285, |
| "grad_norm": 0.42678389224608554, |
| "learning_rate": 3.6828752642706135e-05, |
| "loss": 0.381, |
| "step": 886 |
| }, |
| { |
| "epoch": 1.011409013120365, |
| "grad_norm": 0.4198303145341611, |
| "learning_rate": 3.680761099365751e-05, |
| "loss": 0.3937, |
| "step": 887 |
| }, |
| { |
| "epoch": 1.0125499144324015, |
| "grad_norm": 0.4041636421810587, |
| "learning_rate": 3.678646934460888e-05, |
| "loss": 0.3726, |
| "step": 888 |
| }, |
| { |
| "epoch": 1.0136908157444382, |
| "grad_norm": 0.3401962730269841, |
| "learning_rate": 3.676532769556025e-05, |
| "loss": 0.3655, |
| "step": 889 |
| }, |
| { |
| "epoch": 1.0148317170564747, |
| "grad_norm": 0.3770208662137992, |
| "learning_rate": 3.674418604651163e-05, |
| "loss": 0.3802, |
| "step": 890 |
| }, |
| { |
| "epoch": 1.0159726183685112, |
| "grad_norm": 0.46172099327646693, |
| "learning_rate": 3.6723044397463e-05, |
| "loss": 0.4032, |
| "step": 891 |
| }, |
| { |
| "epoch": 1.0171135196805476, |
| "grad_norm": 0.3596991346706822, |
| "learning_rate": 3.670190274841438e-05, |
| "loss": 0.3815, |
| "step": 892 |
| }, |
| { |
| "epoch": 1.0182544209925841, |
| "grad_norm": 0.49205467040562745, |
| "learning_rate": 3.668076109936575e-05, |
| "loss": 0.3497, |
| "step": 893 |
| }, |
| { |
| "epoch": 1.0193953223046206, |
| "grad_norm": 0.33360531637571633, |
| "learning_rate": 3.665961945031713e-05, |
| "loss": 0.3668, |
| "step": 894 |
| }, |
| { |
| "epoch": 1.020536223616657, |
| "grad_norm": 0.425656228941723, |
| "learning_rate": 3.66384778012685e-05, |
| "loss": 0.3691, |
| "step": 895 |
| }, |
| { |
| "epoch": 1.0216771249286936, |
| "grad_norm": 0.3723856412773373, |
| "learning_rate": 3.6617336152219875e-05, |
| "loss": 0.3686, |
| "step": 896 |
| }, |
| { |
| "epoch": 1.0228180262407303, |
| "grad_norm": 0.4885789857759674, |
| "learning_rate": 3.659619450317125e-05, |
| "loss": 0.3668, |
| "step": 897 |
| }, |
| { |
| "epoch": 1.0239589275527667, |
| "grad_norm": 0.3445642312108721, |
| "learning_rate": 3.6575052854122623e-05, |
| "loss": 0.3674, |
| "step": 898 |
| }, |
| { |
| "epoch": 1.0250998288648032, |
| "grad_norm": 0.5026457518007369, |
| "learning_rate": 3.6553911205074e-05, |
| "loss": 0.3539, |
| "step": 899 |
| }, |
| { |
| "epoch": 1.0262407301768397, |
| "grad_norm": 0.3074717187657618, |
| "learning_rate": 3.653276955602537e-05, |
| "loss": 0.3758, |
| "step": 900 |
| }, |
| { |
| "epoch": 1.0273816314888762, |
| "grad_norm": 0.39374297057459845, |
| "learning_rate": 3.651162790697675e-05, |
| "loss": 0.3406, |
| "step": 901 |
| }, |
| { |
| "epoch": 1.0285225328009127, |
| "grad_norm": 0.3715766019853829, |
| "learning_rate": 3.649048625792812e-05, |
| "loss": 0.3701, |
| "step": 902 |
| }, |
| { |
| "epoch": 1.0296634341129491, |
| "grad_norm": 0.36179693452859935, |
| "learning_rate": 3.64693446088795e-05, |
| "loss": 0.4046, |
| "step": 903 |
| }, |
| { |
| "epoch": 1.0308043354249858, |
| "grad_norm": 0.35887466882761154, |
| "learning_rate": 3.644820295983087e-05, |
| "loss": 0.373, |
| "step": 904 |
| }, |
| { |
| "epoch": 1.0319452367370223, |
| "grad_norm": 0.4627643573370423, |
| "learning_rate": 3.642706131078224e-05, |
| "loss": 0.3546, |
| "step": 905 |
| }, |
| { |
| "epoch": 1.0330861380490588, |
| "grad_norm": 0.3414800929229019, |
| "learning_rate": 3.6405919661733616e-05, |
| "loss": 0.382, |
| "step": 906 |
| }, |
| { |
| "epoch": 1.0342270393610953, |
| "grad_norm": 0.4167762624955359, |
| "learning_rate": 3.638477801268499e-05, |
| "loss": 0.3707, |
| "step": 907 |
| }, |
| { |
| "epoch": 1.0353679406731318, |
| "grad_norm": 0.3634328382063604, |
| "learning_rate": 3.6363636363636364e-05, |
| "loss": 0.3892, |
| "step": 908 |
| }, |
| { |
| "epoch": 1.0365088419851682, |
| "grad_norm": 0.39913860764154596, |
| "learning_rate": 3.6342494714587735e-05, |
| "loss": 0.3689, |
| "step": 909 |
| }, |
| { |
| "epoch": 1.0376497432972047, |
| "grad_norm": 0.31150134389469925, |
| "learning_rate": 3.632135306553911e-05, |
| "loss": 0.3513, |
| "step": 910 |
| }, |
| { |
| "epoch": 1.0387906446092412, |
| "grad_norm": 0.44513217932997295, |
| "learning_rate": 3.630021141649049e-05, |
| "loss": 0.4083, |
| "step": 911 |
| }, |
| { |
| "epoch": 1.039931545921278, |
| "grad_norm": 0.29893701924629945, |
| "learning_rate": 3.627906976744187e-05, |
| "loss": 0.3476, |
| "step": 912 |
| }, |
| { |
| "epoch": 1.0410724472333144, |
| "grad_norm": 0.36380817324672665, |
| "learning_rate": 3.625792811839324e-05, |
| "loss": 0.3694, |
| "step": 913 |
| }, |
| { |
| "epoch": 1.0422133485453509, |
| "grad_norm": 0.3811952404266299, |
| "learning_rate": 3.623678646934461e-05, |
| "loss": 0.3512, |
| "step": 914 |
| }, |
| { |
| "epoch": 1.0433542498573873, |
| "grad_norm": 0.3487936521280187, |
| "learning_rate": 3.6215644820295986e-05, |
| "loss": 0.3605, |
| "step": 915 |
| }, |
| { |
| "epoch": 1.0444951511694238, |
| "grad_norm": 0.295589207624635, |
| "learning_rate": 3.619450317124736e-05, |
| "loss": 0.356, |
| "step": 916 |
| }, |
| { |
| "epoch": 1.0456360524814603, |
| "grad_norm": 0.3361959857373228, |
| "learning_rate": 3.6173361522198734e-05, |
| "loss": 0.361, |
| "step": 917 |
| }, |
| { |
| "epoch": 1.0467769537934968, |
| "grad_norm": 0.31037775502179515, |
| "learning_rate": 3.6152219873150105e-05, |
| "loss": 0.3599, |
| "step": 918 |
| }, |
| { |
| "epoch": 1.0479178551055335, |
| "grad_norm": 0.3480868223602575, |
| "learning_rate": 3.613107822410148e-05, |
| "loss": 0.3727, |
| "step": 919 |
| }, |
| { |
| "epoch": 1.04905875641757, |
| "grad_norm": 0.3759497242879828, |
| "learning_rate": 3.610993657505285e-05, |
| "loss": 0.3744, |
| "step": 920 |
| }, |
| { |
| "epoch": 1.0501996577296064, |
| "grad_norm": 0.44625648445941285, |
| "learning_rate": 3.608879492600423e-05, |
| "loss": 0.3982, |
| "step": 921 |
| }, |
| { |
| "epoch": 1.051340559041643, |
| "grad_norm": 0.3551571522322082, |
| "learning_rate": 3.60676532769556e-05, |
| "loss": 0.3683, |
| "step": 922 |
| }, |
| { |
| "epoch": 1.0524814603536794, |
| "grad_norm": 0.3958943914214428, |
| "learning_rate": 3.604651162790698e-05, |
| "loss": 0.3663, |
| "step": 923 |
| }, |
| { |
| "epoch": 1.0536223616657159, |
| "grad_norm": 0.39346013363138627, |
| "learning_rate": 3.6025369978858356e-05, |
| "loss": 0.3785, |
| "step": 924 |
| }, |
| { |
| "epoch": 1.0547632629777524, |
| "grad_norm": 0.35191600740827306, |
| "learning_rate": 3.600422832980973e-05, |
| "loss": 0.3897, |
| "step": 925 |
| }, |
| { |
| "epoch": 1.0559041642897888, |
| "grad_norm": 0.38676608726463646, |
| "learning_rate": 3.5983086680761104e-05, |
| "loss": 0.3729, |
| "step": 926 |
| }, |
| { |
| "epoch": 1.0570450656018255, |
| "grad_norm": 0.34962772536528997, |
| "learning_rate": 3.5961945031712475e-05, |
| "loss": 0.3795, |
| "step": 927 |
| }, |
| { |
| "epoch": 1.058185966913862, |
| "grad_norm": 0.3155978123445468, |
| "learning_rate": 3.594080338266385e-05, |
| "loss": 0.3533, |
| "step": 928 |
| }, |
| { |
| "epoch": 1.0593268682258985, |
| "grad_norm": 0.35223510579525524, |
| "learning_rate": 3.591966173361522e-05, |
| "loss": 0.3706, |
| "step": 929 |
| }, |
| { |
| "epoch": 1.060467769537935, |
| "grad_norm": 0.35708465819842106, |
| "learning_rate": 3.58985200845666e-05, |
| "loss": 0.3617, |
| "step": 930 |
| }, |
| { |
| "epoch": 1.0616086708499715, |
| "grad_norm": 0.3678947649013475, |
| "learning_rate": 3.587737843551797e-05, |
| "loss": 0.3913, |
| "step": 931 |
| }, |
| { |
| "epoch": 1.062749572162008, |
| "grad_norm": 0.37804755879302426, |
| "learning_rate": 3.585623678646934e-05, |
| "loss": 0.3643, |
| "step": 932 |
| }, |
| { |
| "epoch": 1.0638904734740444, |
| "grad_norm": 0.4209197871034825, |
| "learning_rate": 3.583509513742072e-05, |
| "loss": 0.383, |
| "step": 933 |
| }, |
| { |
| "epoch": 1.0650313747860811, |
| "grad_norm": 0.3875534348488064, |
| "learning_rate": 3.581395348837209e-05, |
| "loss": 0.3537, |
| "step": 934 |
| }, |
| { |
| "epoch": 1.0661722760981176, |
| "grad_norm": 0.339713004433371, |
| "learning_rate": 3.579281183932347e-05, |
| "loss": 0.3528, |
| "step": 935 |
| }, |
| { |
| "epoch": 1.067313177410154, |
| "grad_norm": 0.40452497385972475, |
| "learning_rate": 3.5771670190274845e-05, |
| "loss": 0.3799, |
| "step": 936 |
| }, |
| { |
| "epoch": 1.0684540787221906, |
| "grad_norm": 0.41269005352350124, |
| "learning_rate": 3.575052854122622e-05, |
| "loss": 0.3588, |
| "step": 937 |
| }, |
| { |
| "epoch": 1.069594980034227, |
| "grad_norm": 0.37102780996281465, |
| "learning_rate": 3.572938689217759e-05, |
| "loss": 0.3515, |
| "step": 938 |
| }, |
| { |
| "epoch": 1.0707358813462635, |
| "grad_norm": 0.35749919468134717, |
| "learning_rate": 3.570824524312897e-05, |
| "loss": 0.3922, |
| "step": 939 |
| }, |
| { |
| "epoch": 1.0718767826583, |
| "grad_norm": 0.449335797414619, |
| "learning_rate": 3.568710359408034e-05, |
| "loss": 0.3972, |
| "step": 940 |
| }, |
| { |
| "epoch": 1.0730176839703365, |
| "grad_norm": 0.3711962817643683, |
| "learning_rate": 3.566596194503171e-05, |
| "loss": 0.3781, |
| "step": 941 |
| }, |
| { |
| "epoch": 1.074158585282373, |
| "grad_norm": 0.39709697007286004, |
| "learning_rate": 3.564482029598309e-05, |
| "loss": 0.3747, |
| "step": 942 |
| }, |
| { |
| "epoch": 1.0752994865944097, |
| "grad_norm": 0.37530640138836563, |
| "learning_rate": 3.562367864693446e-05, |
| "loss": 0.3683, |
| "step": 943 |
| }, |
| { |
| "epoch": 1.0764403879064461, |
| "grad_norm": 0.42805517019254413, |
| "learning_rate": 3.560253699788584e-05, |
| "loss": 0.3743, |
| "step": 944 |
| }, |
| { |
| "epoch": 1.0775812892184826, |
| "grad_norm": 0.3788980861432765, |
| "learning_rate": 3.558139534883721e-05, |
| "loss": 0.3519, |
| "step": 945 |
| }, |
| { |
| "epoch": 1.078722190530519, |
| "grad_norm": 0.37575065677709124, |
| "learning_rate": 3.5560253699788586e-05, |
| "loss": 0.3635, |
| "step": 946 |
| }, |
| { |
| "epoch": 1.0798630918425556, |
| "grad_norm": 0.35473463562879565, |
| "learning_rate": 3.5539112050739956e-05, |
| "loss": 0.3917, |
| "step": 947 |
| }, |
| { |
| "epoch": 1.081003993154592, |
| "grad_norm": 0.5421488353080326, |
| "learning_rate": 3.5517970401691334e-05, |
| "loss": 0.3616, |
| "step": 948 |
| }, |
| { |
| "epoch": 1.0821448944666285, |
| "grad_norm": 0.322957416404556, |
| "learning_rate": 3.5496828752642705e-05, |
| "loss": 0.3765, |
| "step": 949 |
| }, |
| { |
| "epoch": 1.0832857957786652, |
| "grad_norm": 0.4798700548633995, |
| "learning_rate": 3.547568710359408e-05, |
| "loss": 0.3581, |
| "step": 950 |
| }, |
| { |
| "epoch": 1.0844266970907017, |
| "grad_norm": 0.31013380709522304, |
| "learning_rate": 3.545454545454546e-05, |
| "loss": 0.3625, |
| "step": 951 |
| }, |
| { |
| "epoch": 1.0855675984027382, |
| "grad_norm": 0.4206561403068851, |
| "learning_rate": 3.543340380549683e-05, |
| "loss": 0.3749, |
| "step": 952 |
| }, |
| { |
| "epoch": 1.0867084997147747, |
| "grad_norm": 0.39833895686913684, |
| "learning_rate": 3.541226215644821e-05, |
| "loss": 0.3822, |
| "step": 953 |
| }, |
| { |
| "epoch": 1.0878494010268112, |
| "grad_norm": 0.43256364250638124, |
| "learning_rate": 3.539112050739958e-05, |
| "loss": 0.3673, |
| "step": 954 |
| }, |
| { |
| "epoch": 1.0889903023388476, |
| "grad_norm": 0.3675468188829437, |
| "learning_rate": 3.5369978858350956e-05, |
| "loss": 0.3717, |
| "step": 955 |
| }, |
| { |
| "epoch": 1.0901312036508841, |
| "grad_norm": 0.3218252926229577, |
| "learning_rate": 3.5348837209302326e-05, |
| "loss": 0.375, |
| "step": 956 |
| }, |
| { |
| "epoch": 1.0912721049629206, |
| "grad_norm": 0.40359712151176336, |
| "learning_rate": 3.5327695560253704e-05, |
| "loss": 0.3895, |
| "step": 957 |
| }, |
| { |
| "epoch": 1.0924130062749573, |
| "grad_norm": 0.32814056788493573, |
| "learning_rate": 3.5306553911205075e-05, |
| "loss": 0.3496, |
| "step": 958 |
| }, |
| { |
| "epoch": 1.0935539075869938, |
| "grad_norm": 0.3700443887990979, |
| "learning_rate": 3.5285412262156445e-05, |
| "loss": 0.3813, |
| "step": 959 |
| }, |
| { |
| "epoch": 1.0946948088990303, |
| "grad_norm": 0.30296344478274184, |
| "learning_rate": 3.526427061310782e-05, |
| "loss": 0.3705, |
| "step": 960 |
| }, |
| { |
| "epoch": 1.0958357102110667, |
| "grad_norm": 0.35084001507574447, |
| "learning_rate": 3.5243128964059193e-05, |
| "loss": 0.356, |
| "step": 961 |
| }, |
| { |
| "epoch": 1.0969766115231032, |
| "grad_norm": 0.34046673230652147, |
| "learning_rate": 3.522198731501057e-05, |
| "loss": 0.3738, |
| "step": 962 |
| }, |
| { |
| "epoch": 1.0981175128351397, |
| "grad_norm": 0.3591654008324595, |
| "learning_rate": 3.520084566596195e-05, |
| "loss": 0.3641, |
| "step": 963 |
| }, |
| { |
| "epoch": 1.0992584141471762, |
| "grad_norm": 0.34788679397569927, |
| "learning_rate": 3.5179704016913326e-05, |
| "loss": 0.3861, |
| "step": 964 |
| }, |
| { |
| "epoch": 1.1003993154592129, |
| "grad_norm": 0.3489129692311338, |
| "learning_rate": 3.5158562367864696e-05, |
| "loss": 0.3963, |
| "step": 965 |
| }, |
| { |
| "epoch": 1.1015402167712494, |
| "grad_norm": 0.32115244707275425, |
| "learning_rate": 3.5137420718816074e-05, |
| "loss": 0.3772, |
| "step": 966 |
| }, |
| { |
| "epoch": 1.1026811180832858, |
| "grad_norm": 0.36405343778575244, |
| "learning_rate": 3.5116279069767445e-05, |
| "loss": 0.3593, |
| "step": 967 |
| }, |
| { |
| "epoch": 1.1038220193953223, |
| "grad_norm": 0.25934220146881914, |
| "learning_rate": 3.5095137420718815e-05, |
| "loss": 0.369, |
| "step": 968 |
| }, |
| { |
| "epoch": 1.1049629207073588, |
| "grad_norm": 0.3459709424202936, |
| "learning_rate": 3.507399577167019e-05, |
| "loss": 0.3605, |
| "step": 969 |
| }, |
| { |
| "epoch": 1.1061038220193953, |
| "grad_norm": 0.25530816000283274, |
| "learning_rate": 3.5052854122621563e-05, |
| "loss": 0.3702, |
| "step": 970 |
| }, |
| { |
| "epoch": 1.1072447233314318, |
| "grad_norm": 0.2977170590076077, |
| "learning_rate": 3.503171247357294e-05, |
| "loss": 0.3737, |
| "step": 971 |
| }, |
| { |
| "epoch": 1.1083856246434682, |
| "grad_norm": 0.2827441362997686, |
| "learning_rate": 3.501057082452431e-05, |
| "loss": 0.3759, |
| "step": 972 |
| }, |
| { |
| "epoch": 1.109526525955505, |
| "grad_norm": 0.3244608088210149, |
| "learning_rate": 3.498942917547569e-05, |
| "loss": 0.3682, |
| "step": 973 |
| }, |
| { |
| "epoch": 1.1106674272675414, |
| "grad_norm": 0.3347264076238722, |
| "learning_rate": 3.496828752642706e-05, |
| "loss": 0.3742, |
| "step": 974 |
| }, |
| { |
| "epoch": 1.111808328579578, |
| "grad_norm": 0.30216880984211525, |
| "learning_rate": 3.494714587737844e-05, |
| "loss": 0.3725, |
| "step": 975 |
| }, |
| { |
| "epoch": 1.1129492298916144, |
| "grad_norm": 0.3289978042150018, |
| "learning_rate": 3.4926004228329815e-05, |
| "loss": 0.3644, |
| "step": 976 |
| }, |
| { |
| "epoch": 1.1140901312036509, |
| "grad_norm": 0.3238466739022087, |
| "learning_rate": 3.4904862579281185e-05, |
| "loss": 0.3519, |
| "step": 977 |
| }, |
| { |
| "epoch": 1.1152310325156873, |
| "grad_norm": 0.31017493518179495, |
| "learning_rate": 3.488372093023256e-05, |
| "loss": 0.3626, |
| "step": 978 |
| }, |
| { |
| "epoch": 1.1163719338277238, |
| "grad_norm": 0.3038861691228129, |
| "learning_rate": 3.4862579281183933e-05, |
| "loss": 0.3588, |
| "step": 979 |
| }, |
| { |
| "epoch": 1.1175128351397605, |
| "grad_norm": 0.2929692138971038, |
| "learning_rate": 3.484143763213531e-05, |
| "loss": 0.3654, |
| "step": 980 |
| }, |
| { |
| "epoch": 1.118653736451797, |
| "grad_norm": 0.33957255149071514, |
| "learning_rate": 3.482029598308668e-05, |
| "loss": 0.3741, |
| "step": 981 |
| }, |
| { |
| "epoch": 1.1197946377638335, |
| "grad_norm": 0.37962287901659914, |
| "learning_rate": 3.479915433403806e-05, |
| "loss": 0.372, |
| "step": 982 |
| }, |
| { |
| "epoch": 1.12093553907587, |
| "grad_norm": 0.30917582487173056, |
| "learning_rate": 3.477801268498943e-05, |
| "loss": 0.3625, |
| "step": 983 |
| }, |
| { |
| "epoch": 1.1220764403879064, |
| "grad_norm": 0.43355534624442893, |
| "learning_rate": 3.47568710359408e-05, |
| "loss": 0.3857, |
| "step": 984 |
| }, |
| { |
| "epoch": 1.123217341699943, |
| "grad_norm": 0.3181982233605466, |
| "learning_rate": 3.473572938689218e-05, |
| "loss": 0.3809, |
| "step": 985 |
| }, |
| { |
| "epoch": 1.1243582430119794, |
| "grad_norm": 0.32622169252857547, |
| "learning_rate": 3.471458773784355e-05, |
| "loss": 0.3713, |
| "step": 986 |
| }, |
| { |
| "epoch": 1.1254991443240159, |
| "grad_norm": 0.3327070573503023, |
| "learning_rate": 3.4693446088794926e-05, |
| "loss": 0.3657, |
| "step": 987 |
| }, |
| { |
| "epoch": 1.1266400456360526, |
| "grad_norm": 0.3124938302800057, |
| "learning_rate": 3.4672304439746304e-05, |
| "loss": 0.3831, |
| "step": 988 |
| }, |
| { |
| "epoch": 1.127780946948089, |
| "grad_norm": 0.31952256373453464, |
| "learning_rate": 3.465116279069768e-05, |
| "loss": 0.3727, |
| "step": 989 |
| }, |
| { |
| "epoch": 1.1289218482601255, |
| "grad_norm": 0.3063299430772017, |
| "learning_rate": 3.463002114164905e-05, |
| "loss": 0.3891, |
| "step": 990 |
| }, |
| { |
| "epoch": 1.130062749572162, |
| "grad_norm": 0.31462507340453627, |
| "learning_rate": 3.460887949260043e-05, |
| "loss": 0.3541, |
| "step": 991 |
| }, |
| { |
| "epoch": 1.1312036508841985, |
| "grad_norm": 0.2890799992114355, |
| "learning_rate": 3.45877378435518e-05, |
| "loss": 0.3833, |
| "step": 992 |
| }, |
| { |
| "epoch": 1.132344552196235, |
| "grad_norm": 0.3615461671219346, |
| "learning_rate": 3.456659619450317e-05, |
| "loss": 0.3704, |
| "step": 993 |
| }, |
| { |
| "epoch": 1.1334854535082715, |
| "grad_norm": 0.3249056891163027, |
| "learning_rate": 3.454545454545455e-05, |
| "loss": 0.3745, |
| "step": 994 |
| }, |
| { |
| "epoch": 1.1346263548203082, |
| "grad_norm": 0.3137775773273121, |
| "learning_rate": 3.452431289640592e-05, |
| "loss": 0.3851, |
| "step": 995 |
| }, |
| { |
| "epoch": 1.1357672561323446, |
| "grad_norm": 0.29114940686367663, |
| "learning_rate": 3.4503171247357296e-05, |
| "loss": 0.3771, |
| "step": 996 |
| }, |
| { |
| "epoch": 1.1369081574443811, |
| "grad_norm": 0.37624711275686223, |
| "learning_rate": 3.448202959830867e-05, |
| "loss": 0.3526, |
| "step": 997 |
| }, |
| { |
| "epoch": 1.1380490587564176, |
| "grad_norm": 0.31260376745882296, |
| "learning_rate": 3.4460887949260044e-05, |
| "loss": 0.3741, |
| "step": 998 |
| }, |
| { |
| "epoch": 1.139189960068454, |
| "grad_norm": 0.33182211780658555, |
| "learning_rate": 3.4439746300211415e-05, |
| "loss": 0.3672, |
| "step": 999 |
| }, |
| { |
| "epoch": 1.1403308613804906, |
| "grad_norm": 0.25997411455628916, |
| "learning_rate": 3.441860465116279e-05, |
| "loss": 0.364, |
| "step": 1000 |
| }, |
| { |
| "epoch": 1.1426126640045635, |
| "grad_norm": 0.33589476782435423, |
| "learning_rate": 3.439746300211416e-05, |
| "loss": 0.3859, |
| "step": 1001 |
| }, |
| { |
| "epoch": 1.1437535653166, |
| "grad_norm": 0.323028968776872, |
| "learning_rate": 3.437632135306554e-05, |
| "loss": 0.3721, |
| "step": 1002 |
| }, |
| { |
| "epoch": 1.1448944666286367, |
| "grad_norm": 0.34849839129010746, |
| "learning_rate": 3.435517970401692e-05, |
| "loss": 0.3605, |
| "step": 1003 |
| }, |
| { |
| "epoch": 1.1460353679406732, |
| "grad_norm": 0.3593724612628344, |
| "learning_rate": 3.433403805496829e-05, |
| "loss": 0.3712, |
| "step": 1004 |
| }, |
| { |
| "epoch": 1.1471762692527097, |
| "grad_norm": 0.3840174852626447, |
| "learning_rate": 3.4312896405919666e-05, |
| "loss": 0.3756, |
| "step": 1005 |
| }, |
| { |
| "epoch": 1.1483171705647461, |
| "grad_norm": 0.3786901323232798, |
| "learning_rate": 3.429175475687104e-05, |
| "loss": 0.3897, |
| "step": 1006 |
| }, |
| { |
| "epoch": 1.1494580718767826, |
| "grad_norm": 0.4736250302570152, |
| "learning_rate": 3.4270613107822414e-05, |
| "loss": 0.3835, |
| "step": 1007 |
| }, |
| { |
| "epoch": 1.150598973188819, |
| "grad_norm": 0.3205148314531088, |
| "learning_rate": 3.4249471458773785e-05, |
| "loss": 0.373, |
| "step": 1008 |
| }, |
| { |
| "epoch": 1.1517398745008558, |
| "grad_norm": 0.3674962877821635, |
| "learning_rate": 3.422832980972516e-05, |
| "loss": 0.3809, |
| "step": 1009 |
| }, |
| { |
| "epoch": 1.1528807758128923, |
| "grad_norm": 0.38716940285294676, |
| "learning_rate": 3.420718816067653e-05, |
| "loss": 0.3718, |
| "step": 1010 |
| }, |
| { |
| "epoch": 1.1540216771249288, |
| "grad_norm": 0.34612933762885156, |
| "learning_rate": 3.4186046511627904e-05, |
| "loss": 0.3958, |
| "step": 1011 |
| }, |
| { |
| "epoch": 1.1551625784369652, |
| "grad_norm": 0.3261036228115381, |
| "learning_rate": 3.416490486257928e-05, |
| "loss": 0.3663, |
| "step": 1012 |
| }, |
| { |
| "epoch": 1.1563034797490017, |
| "grad_norm": 0.3798365061694364, |
| "learning_rate": 3.414376321353065e-05, |
| "loss": 0.371, |
| "step": 1013 |
| }, |
| { |
| "epoch": 1.1574443810610382, |
| "grad_norm": 0.36082089990308985, |
| "learning_rate": 3.412262156448203e-05, |
| "loss": 0.3807, |
| "step": 1014 |
| }, |
| { |
| "epoch": 1.1585852823730747, |
| "grad_norm": 0.3809948230684839, |
| "learning_rate": 3.410147991543341e-05, |
| "loss": 0.3696, |
| "step": 1015 |
| }, |
| { |
| "epoch": 1.1597261836851112, |
| "grad_norm": 0.3946308893177107, |
| "learning_rate": 3.4080338266384784e-05, |
| "loss": 0.3773, |
| "step": 1016 |
| }, |
| { |
| "epoch": 1.1608670849971476, |
| "grad_norm": 0.364442606139928, |
| "learning_rate": 3.4059196617336155e-05, |
| "loss": 0.361, |
| "step": 1017 |
| }, |
| { |
| "epoch": 1.1620079863091843, |
| "grad_norm": 0.3609258874429056, |
| "learning_rate": 3.403805496828753e-05, |
| "loss": 0.3856, |
| "step": 1018 |
| }, |
| { |
| "epoch": 1.1631488876212208, |
| "grad_norm": 1.1416343579165587, |
| "learning_rate": 3.40169133192389e-05, |
| "loss": 0.375, |
| "step": 1019 |
| }, |
| { |
| "epoch": 1.1642897889332573, |
| "grad_norm": 0.45946054420108273, |
| "learning_rate": 3.3995771670190274e-05, |
| "loss": 0.3431, |
| "step": 1020 |
| }, |
| { |
| "epoch": 1.1654306902452938, |
| "grad_norm": 0.3364729235961614, |
| "learning_rate": 3.397463002114165e-05, |
| "loss": 0.3501, |
| "step": 1021 |
| }, |
| { |
| "epoch": 1.1665715915573303, |
| "grad_norm": 0.4080943198746895, |
| "learning_rate": 3.395348837209302e-05, |
| "loss": 0.3789, |
| "step": 1022 |
| }, |
| { |
| "epoch": 1.1677124928693667, |
| "grad_norm": 0.44524984811221446, |
| "learning_rate": 3.39323467230444e-05, |
| "loss": 0.3677, |
| "step": 1023 |
| }, |
| { |
| "epoch": 1.1688533941814032, |
| "grad_norm": 0.41099661440760105, |
| "learning_rate": 3.391120507399577e-05, |
| "loss": 0.3556, |
| "step": 1024 |
| }, |
| { |
| "epoch": 1.16999429549344, |
| "grad_norm": 0.3574833952927246, |
| "learning_rate": 3.389006342494715e-05, |
| "loss": 0.3618, |
| "step": 1025 |
| }, |
| { |
| "epoch": 1.1711351968054764, |
| "grad_norm": 0.49459507464416336, |
| "learning_rate": 3.386892177589852e-05, |
| "loss": 0.3549, |
| "step": 1026 |
| }, |
| { |
| "epoch": 1.1722760981175129, |
| "grad_norm": 0.322252465481915, |
| "learning_rate": 3.3847780126849896e-05, |
| "loss": 0.3683, |
| "step": 1027 |
| }, |
| { |
| "epoch": 1.1734169994295494, |
| "grad_norm": 0.3327748277535711, |
| "learning_rate": 3.382663847780127e-05, |
| "loss": 0.3587, |
| "step": 1028 |
| }, |
| { |
| "epoch": 1.1745579007415858, |
| "grad_norm": 0.30790560462013256, |
| "learning_rate": 3.3805496828752644e-05, |
| "loss": 0.3778, |
| "step": 1029 |
| }, |
| { |
| "epoch": 1.1756988020536223, |
| "grad_norm": 0.4146437969607725, |
| "learning_rate": 3.378435517970402e-05, |
| "loss": 0.3831, |
| "step": 1030 |
| }, |
| { |
| "epoch": 1.1768397033656588, |
| "grad_norm": 0.3029308530687607, |
| "learning_rate": 3.376321353065539e-05, |
| "loss": 0.376, |
| "step": 1031 |
| }, |
| { |
| "epoch": 1.1779806046776953, |
| "grad_norm": 0.3883708207491139, |
| "learning_rate": 3.374207188160677e-05, |
| "loss": 0.3827, |
| "step": 1032 |
| }, |
| { |
| "epoch": 1.179121505989732, |
| "grad_norm": 0.3264973115537315, |
| "learning_rate": 3.372093023255814e-05, |
| "loss": 0.3709, |
| "step": 1033 |
| }, |
| { |
| "epoch": 1.1802624073017685, |
| "grad_norm": 0.3224818782407752, |
| "learning_rate": 3.369978858350952e-05, |
| "loss": 0.3479, |
| "step": 1034 |
| }, |
| { |
| "epoch": 1.181403308613805, |
| "grad_norm": 0.35837329807660184, |
| "learning_rate": 3.367864693446089e-05, |
| "loss": 0.3537, |
| "step": 1035 |
| }, |
| { |
| "epoch": 1.1825442099258414, |
| "grad_norm": 0.32978113024365624, |
| "learning_rate": 3.3657505285412266e-05, |
| "loss": 0.3828, |
| "step": 1036 |
| }, |
| { |
| "epoch": 1.183685111237878, |
| "grad_norm": 0.30478082030156534, |
| "learning_rate": 3.3636363636363636e-05, |
| "loss": 0.3719, |
| "step": 1037 |
| }, |
| { |
| "epoch": 1.1848260125499144, |
| "grad_norm": 0.4039017489716658, |
| "learning_rate": 3.361522198731501e-05, |
| "loss": 0.3802, |
| "step": 1038 |
| }, |
| { |
| "epoch": 1.1859669138619509, |
| "grad_norm": 0.3137696235218554, |
| "learning_rate": 3.3594080338266385e-05, |
| "loss": 0.3497, |
| "step": 1039 |
| }, |
| { |
| "epoch": 1.1871078151739876, |
| "grad_norm": 0.3770758080250968, |
| "learning_rate": 3.357293868921776e-05, |
| "loss": 0.3857, |
| "step": 1040 |
| }, |
| { |
| "epoch": 1.188248716486024, |
| "grad_norm": 0.378178032731245, |
| "learning_rate": 3.355179704016913e-05, |
| "loss": 0.3825, |
| "step": 1041 |
| }, |
| { |
| "epoch": 1.1893896177980605, |
| "grad_norm": 0.3339076735915448, |
| "learning_rate": 3.353065539112051e-05, |
| "loss": 0.3554, |
| "step": 1042 |
| }, |
| { |
| "epoch": 1.190530519110097, |
| "grad_norm": 0.32202843560236183, |
| "learning_rate": 3.350951374207189e-05, |
| "loss": 0.385, |
| "step": 1043 |
| }, |
| { |
| "epoch": 1.1916714204221335, |
| "grad_norm": 0.39205899390410504, |
| "learning_rate": 3.348837209302326e-05, |
| "loss": 0.3939, |
| "step": 1044 |
| }, |
| { |
| "epoch": 1.19281232173417, |
| "grad_norm": 0.31585241143330434, |
| "learning_rate": 3.3467230443974636e-05, |
| "loss": 0.3697, |
| "step": 1045 |
| }, |
| { |
| "epoch": 1.1939532230462064, |
| "grad_norm": 0.34721707553982895, |
| "learning_rate": 3.3446088794926007e-05, |
| "loss": 0.3725, |
| "step": 1046 |
| }, |
| { |
| "epoch": 1.195094124358243, |
| "grad_norm": 0.3224483535503891, |
| "learning_rate": 3.342494714587738e-05, |
| "loss": 0.4002, |
| "step": 1047 |
| }, |
| { |
| "epoch": 1.1962350256702796, |
| "grad_norm": 0.34957765959120296, |
| "learning_rate": 3.3403805496828755e-05, |
| "loss": 0.3627, |
| "step": 1048 |
| }, |
| { |
| "epoch": 1.197375926982316, |
| "grad_norm": 0.38328812141174423, |
| "learning_rate": 3.3382663847780125e-05, |
| "loss": 0.4014, |
| "step": 1049 |
| }, |
| { |
| "epoch": 1.1985168282943526, |
| "grad_norm": 0.3758584671491964, |
| "learning_rate": 3.33615221987315e-05, |
| "loss": 0.3696, |
| "step": 1050 |
| }, |
| { |
| "epoch": 1.199657729606389, |
| "grad_norm": 0.39611799283306404, |
| "learning_rate": 3.3340380549682874e-05, |
| "loss": 0.387, |
| "step": 1051 |
| }, |
| { |
| "epoch": 1.2007986309184255, |
| "grad_norm": 0.2987773907456679, |
| "learning_rate": 3.331923890063425e-05, |
| "loss": 0.3879, |
| "step": 1052 |
| }, |
| { |
| "epoch": 1.201939532230462, |
| "grad_norm": 0.36426120265256484, |
| "learning_rate": 3.329809725158562e-05, |
| "loss": 0.362, |
| "step": 1053 |
| }, |
| { |
| "epoch": 1.2030804335424985, |
| "grad_norm": 0.36560634288948834, |
| "learning_rate": 3.3276955602537e-05, |
| "loss": 0.3509, |
| "step": 1054 |
| }, |
| { |
| "epoch": 1.2042213348545352, |
| "grad_norm": 0.3035450394151478, |
| "learning_rate": 3.3255813953488377e-05, |
| "loss": 0.376, |
| "step": 1055 |
| }, |
| { |
| "epoch": 1.2053622361665717, |
| "grad_norm": 0.39302963075707603, |
| "learning_rate": 3.323467230443975e-05, |
| "loss": 0.3699, |
| "step": 1056 |
| }, |
| { |
| "epoch": 1.2065031374786082, |
| "grad_norm": 0.35343270894560463, |
| "learning_rate": 3.3213530655391125e-05, |
| "loss": 0.3707, |
| "step": 1057 |
| }, |
| { |
| "epoch": 1.2076440387906446, |
| "grad_norm": 0.32922752073268385, |
| "learning_rate": 3.3192389006342495e-05, |
| "loss": 0.362, |
| "step": 1058 |
| }, |
| { |
| "epoch": 1.2087849401026811, |
| "grad_norm": 0.3497263627038085, |
| "learning_rate": 3.317124735729387e-05, |
| "loss": 0.3544, |
| "step": 1059 |
| }, |
| { |
| "epoch": 1.2099258414147176, |
| "grad_norm": 0.38176942411154147, |
| "learning_rate": 3.3150105708245244e-05, |
| "loss": 0.3786, |
| "step": 1060 |
| }, |
| { |
| "epoch": 1.211066742726754, |
| "grad_norm": 0.3655333255212552, |
| "learning_rate": 3.312896405919662e-05, |
| "loss": 0.3844, |
| "step": 1061 |
| }, |
| { |
| "epoch": 1.2122076440387906, |
| "grad_norm": 0.3562621090561445, |
| "learning_rate": 3.310782241014799e-05, |
| "loss": 0.3634, |
| "step": 1062 |
| }, |
| { |
| "epoch": 1.213348545350827, |
| "grad_norm": 0.3900734684566537, |
| "learning_rate": 3.308668076109937e-05, |
| "loss": 0.3837, |
| "step": 1063 |
| }, |
| { |
| "epoch": 1.2144894466628637, |
| "grad_norm": 0.3140299737074897, |
| "learning_rate": 3.306553911205074e-05, |
| "loss": 0.381, |
| "step": 1064 |
| }, |
| { |
| "epoch": 1.2156303479749002, |
| "grad_norm": 0.3167356658778117, |
| "learning_rate": 3.304439746300211e-05, |
| "loss": 0.3751, |
| "step": 1065 |
| }, |
| { |
| "epoch": 1.2167712492869367, |
| "grad_norm": 0.32986942860860147, |
| "learning_rate": 3.302325581395349e-05, |
| "loss": 0.3671, |
| "step": 1066 |
| }, |
| { |
| "epoch": 1.2179121505989732, |
| "grad_norm": 0.3980484794765463, |
| "learning_rate": 3.3002114164904865e-05, |
| "loss": 0.3922, |
| "step": 1067 |
| }, |
| { |
| "epoch": 1.2190530519110097, |
| "grad_norm": 0.31025070985969755, |
| "learning_rate": 3.298097251585624e-05, |
| "loss": 0.3612, |
| "step": 1068 |
| }, |
| { |
| "epoch": 1.2201939532230461, |
| "grad_norm": 0.3954022677035698, |
| "learning_rate": 3.2959830866807614e-05, |
| "loss": 0.3775, |
| "step": 1069 |
| }, |
| { |
| "epoch": 1.2213348545350828, |
| "grad_norm": 0.3398950566588672, |
| "learning_rate": 3.293868921775899e-05, |
| "loss": 0.3922, |
| "step": 1070 |
| }, |
| { |
| "epoch": 1.2224757558471193, |
| "grad_norm": 0.35576853153937077, |
| "learning_rate": 3.291754756871036e-05, |
| "loss": 0.3746, |
| "step": 1071 |
| }, |
| { |
| "epoch": 1.2236166571591558, |
| "grad_norm": 0.30802379229978166, |
| "learning_rate": 3.289640591966173e-05, |
| "loss": 0.3823, |
| "step": 1072 |
| }, |
| { |
| "epoch": 1.2247575584711923, |
| "grad_norm": 0.3493515239803427, |
| "learning_rate": 3.287526427061311e-05, |
| "loss": 0.3541, |
| "step": 1073 |
| }, |
| { |
| "epoch": 1.2258984597832288, |
| "grad_norm": 0.27413733268524487, |
| "learning_rate": 3.285412262156448e-05, |
| "loss": 0.3654, |
| "step": 1074 |
| }, |
| { |
| "epoch": 1.2270393610952652, |
| "grad_norm": 0.30282716452139746, |
| "learning_rate": 3.283298097251586e-05, |
| "loss": 0.3711, |
| "step": 1075 |
| }, |
| { |
| "epoch": 1.2281802624073017, |
| "grad_norm": 0.31145240791005063, |
| "learning_rate": 3.281183932346723e-05, |
| "loss": 0.3583, |
| "step": 1076 |
| }, |
| { |
| "epoch": 1.2293211637193382, |
| "grad_norm": 0.27129448563700514, |
| "learning_rate": 3.2790697674418606e-05, |
| "loss": 0.3643, |
| "step": 1077 |
| }, |
| { |
| "epoch": 1.2304620650313747, |
| "grad_norm": 0.3295699887395353, |
| "learning_rate": 3.276955602536998e-05, |
| "loss": 0.347, |
| "step": 1078 |
| }, |
| { |
| "epoch": 1.2316029663434114, |
| "grad_norm": 0.3276186811143977, |
| "learning_rate": 3.2748414376321354e-05, |
| "loss": 0.3712, |
| "step": 1079 |
| }, |
| { |
| "epoch": 1.2327438676554479, |
| "grad_norm": 0.34011289116421417, |
| "learning_rate": 3.272727272727273e-05, |
| "loss": 0.3789, |
| "step": 1080 |
| }, |
| { |
| "epoch": 1.2338847689674843, |
| "grad_norm": 0.3430954560333754, |
| "learning_rate": 3.27061310782241e-05, |
| "loss": 0.388, |
| "step": 1081 |
| }, |
| { |
| "epoch": 1.2350256702795208, |
| "grad_norm": 0.28511422528620944, |
| "learning_rate": 3.268498942917548e-05, |
| "loss": 0.3712, |
| "step": 1082 |
| }, |
| { |
| "epoch": 1.2361665715915573, |
| "grad_norm": 0.3469153679842275, |
| "learning_rate": 3.266384778012685e-05, |
| "loss": 0.3479, |
| "step": 1083 |
| }, |
| { |
| "epoch": 1.2373074729035938, |
| "grad_norm": 0.31225044583573147, |
| "learning_rate": 3.264270613107823e-05, |
| "loss": 0.366, |
| "step": 1084 |
| }, |
| { |
| "epoch": 1.2384483742156303, |
| "grad_norm": 0.3205379010988631, |
| "learning_rate": 3.26215644820296e-05, |
| "loss": 0.3818, |
| "step": 1085 |
| }, |
| { |
| "epoch": 1.239589275527667, |
| "grad_norm": 0.2840480563572048, |
| "learning_rate": 3.2600422832980976e-05, |
| "loss": 0.3624, |
| "step": 1086 |
| }, |
| { |
| "epoch": 1.2407301768397034, |
| "grad_norm": 0.3526143479765244, |
| "learning_rate": 3.257928118393235e-05, |
| "loss": 0.3703, |
| "step": 1087 |
| }, |
| { |
| "epoch": 1.24187107815174, |
| "grad_norm": 0.2684134070992963, |
| "learning_rate": 3.2558139534883724e-05, |
| "loss": 0.3577, |
| "step": 1088 |
| }, |
| { |
| "epoch": 1.2430119794637764, |
| "grad_norm": 0.32212969469475267, |
| "learning_rate": 3.2536997885835095e-05, |
| "loss": 0.3402, |
| "step": 1089 |
| }, |
| { |
| "epoch": 1.2441528807758129, |
| "grad_norm": 0.3240375629745611, |
| "learning_rate": 3.2515856236786466e-05, |
| "loss": 0.394, |
| "step": 1090 |
| }, |
| { |
| "epoch": 1.2452937820878494, |
| "grad_norm": 0.2829430362775589, |
| "learning_rate": 3.249471458773784e-05, |
| "loss": 0.3738, |
| "step": 1091 |
| }, |
| { |
| "epoch": 1.2464346833998858, |
| "grad_norm": 0.3415069305615634, |
| "learning_rate": 3.247357293868922e-05, |
| "loss": 0.3772, |
| "step": 1092 |
| }, |
| { |
| "epoch": 1.2475755847119223, |
| "grad_norm": 0.35519434270985334, |
| "learning_rate": 3.245243128964059e-05, |
| "loss": 0.3958, |
| "step": 1093 |
| }, |
| { |
| "epoch": 1.248716486023959, |
| "grad_norm": 0.2887851058517861, |
| "learning_rate": 3.243128964059197e-05, |
| "loss": 0.3746, |
| "step": 1094 |
| }, |
| { |
| "epoch": 1.2498573873359955, |
| "grad_norm": 0.3456876409406876, |
| "learning_rate": 3.2410147991543346e-05, |
| "loss": 0.3647, |
| "step": 1095 |
| }, |
| { |
| "epoch": 1.250998288648032, |
| "grad_norm": 0.2923005180020809, |
| "learning_rate": 3.238900634249472e-05, |
| "loss": 0.3703, |
| "step": 1096 |
| }, |
| { |
| "epoch": 1.2521391899600685, |
| "grad_norm": 0.33057352412548374, |
| "learning_rate": 3.2367864693446094e-05, |
| "loss": 0.3643, |
| "step": 1097 |
| }, |
| { |
| "epoch": 1.253280091272105, |
| "grad_norm": 0.36994392272110005, |
| "learning_rate": 3.2346723044397465e-05, |
| "loss": 0.3662, |
| "step": 1098 |
| }, |
| { |
| "epoch": 1.2544209925841414, |
| "grad_norm": 0.2935627570419225, |
| "learning_rate": 3.2325581395348836e-05, |
| "loss": 0.3835, |
| "step": 1099 |
| }, |
| { |
| "epoch": 1.2555618938961781, |
| "grad_norm": 0.35305608593147453, |
| "learning_rate": 3.230443974630021e-05, |
| "loss": 0.3741, |
| "step": 1100 |
| }, |
| { |
| "epoch": 1.2567027952082146, |
| "grad_norm": 0.30256830350387487, |
| "learning_rate": 3.2283298097251584e-05, |
| "loss": 0.3688, |
| "step": 1101 |
| }, |
| { |
| "epoch": 1.257843696520251, |
| "grad_norm": 0.3460982440766267, |
| "learning_rate": 3.226215644820296e-05, |
| "loss": 0.3653, |
| "step": 1102 |
| }, |
| { |
| "epoch": 1.2589845978322876, |
| "grad_norm": 0.34403905347729535, |
| "learning_rate": 3.224101479915433e-05, |
| "loss": 0.3857, |
| "step": 1103 |
| }, |
| { |
| "epoch": 1.260125499144324, |
| "grad_norm": 0.3530369430889585, |
| "learning_rate": 3.221987315010571e-05, |
| "loss": 0.372, |
| "step": 1104 |
| }, |
| { |
| "epoch": 1.2612664004563605, |
| "grad_norm": 0.3342161669182045, |
| "learning_rate": 3.219873150105708e-05, |
| "loss": 0.3725, |
| "step": 1105 |
| }, |
| { |
| "epoch": 1.262407301768397, |
| "grad_norm": 0.36362820655955347, |
| "learning_rate": 3.217758985200846e-05, |
| "loss": 0.3684, |
| "step": 1106 |
| }, |
| { |
| "epoch": 1.2635482030804335, |
| "grad_norm": 0.3241577589711961, |
| "learning_rate": 3.2156448202959835e-05, |
| "loss": 0.3756, |
| "step": 1107 |
| }, |
| { |
| "epoch": 1.26468910439247, |
| "grad_norm": 0.29636070346273397, |
| "learning_rate": 3.2135306553911206e-05, |
| "loss": 0.3747, |
| "step": 1108 |
| }, |
| { |
| "epoch": 1.2658300057045064, |
| "grad_norm": 0.3752679637351401, |
| "learning_rate": 3.211416490486258e-05, |
| "loss": 0.3628, |
| "step": 1109 |
| }, |
| { |
| "epoch": 1.2669709070165431, |
| "grad_norm": 0.36063049502355077, |
| "learning_rate": 3.2093023255813954e-05, |
| "loss": 0.3788, |
| "step": 1110 |
| }, |
| { |
| "epoch": 1.2681118083285796, |
| "grad_norm": 0.36063293655342027, |
| "learning_rate": 3.207188160676533e-05, |
| "loss": 0.3647, |
| "step": 1111 |
| }, |
| { |
| "epoch": 1.269252709640616, |
| "grad_norm": 0.3876467932971257, |
| "learning_rate": 3.20507399577167e-05, |
| "loss": 0.3588, |
| "step": 1112 |
| }, |
| { |
| "epoch": 1.2703936109526526, |
| "grad_norm": 0.3877619528273815, |
| "learning_rate": 3.202959830866808e-05, |
| "loss": 0.3611, |
| "step": 1113 |
| }, |
| { |
| "epoch": 1.271534512264689, |
| "grad_norm": 0.3317695650930697, |
| "learning_rate": 3.200845665961945e-05, |
| "loss": 0.3613, |
| "step": 1114 |
| }, |
| { |
| "epoch": 1.2726754135767255, |
| "grad_norm": 0.3586599244171131, |
| "learning_rate": 3.198731501057083e-05, |
| "loss": 0.3581, |
| "step": 1115 |
| }, |
| { |
| "epoch": 1.2738163148887622, |
| "grad_norm": 0.32768476193965773, |
| "learning_rate": 3.19661733615222e-05, |
| "loss": 0.3624, |
| "step": 1116 |
| }, |
| { |
| "epoch": 1.2749572162007987, |
| "grad_norm": 0.342066085799505, |
| "learning_rate": 3.194503171247357e-05, |
| "loss": 0.3931, |
| "step": 1117 |
| }, |
| { |
| "epoch": 1.2760981175128352, |
| "grad_norm": 0.3212269324643488, |
| "learning_rate": 3.1923890063424947e-05, |
| "loss": 0.3824, |
| "step": 1118 |
| }, |
| { |
| "epoch": 1.2772390188248717, |
| "grad_norm": 0.34149971973938353, |
| "learning_rate": 3.1902748414376324e-05, |
| "loss": 0.3761, |
| "step": 1119 |
| }, |
| { |
| "epoch": 1.2783799201369082, |
| "grad_norm": 0.3708195020111092, |
| "learning_rate": 3.18816067653277e-05, |
| "loss": 0.3832, |
| "step": 1120 |
| }, |
| { |
| "epoch": 1.2795208214489446, |
| "grad_norm": 0.3662946332428756, |
| "learning_rate": 3.186046511627907e-05, |
| "loss": 0.3788, |
| "step": 1121 |
| }, |
| { |
| "epoch": 1.2806617227609811, |
| "grad_norm": 0.2718084249660657, |
| "learning_rate": 3.183932346723045e-05, |
| "loss": 0.3762, |
| "step": 1122 |
| }, |
| { |
| "epoch": 1.2818026240730176, |
| "grad_norm": 0.3549508122271783, |
| "learning_rate": 3.181818181818182e-05, |
| "loss": 0.3507, |
| "step": 1123 |
| }, |
| { |
| "epoch": 1.282943525385054, |
| "grad_norm": 0.3456469078705619, |
| "learning_rate": 3.17970401691332e-05, |
| "loss": 0.3788, |
| "step": 1124 |
| }, |
| { |
| "epoch": 1.2840844266970908, |
| "grad_norm": 0.29275976639218304, |
| "learning_rate": 3.177589852008457e-05, |
| "loss": 0.3637, |
| "step": 1125 |
| }, |
| { |
| "epoch": 1.2852253280091273, |
| "grad_norm": 0.35290128309001717, |
| "learning_rate": 3.175475687103594e-05, |
| "loss": 0.3851, |
| "step": 1126 |
| }, |
| { |
| "epoch": 1.2863662293211637, |
| "grad_norm": 0.31844354937645575, |
| "learning_rate": 3.173361522198732e-05, |
| "loss": 0.3786, |
| "step": 1127 |
| }, |
| { |
| "epoch": 1.2875071306332002, |
| "grad_norm": 0.3607279985421439, |
| "learning_rate": 3.171247357293869e-05, |
| "loss": 0.3584, |
| "step": 1128 |
| }, |
| { |
| "epoch": 1.2886480319452367, |
| "grad_norm": 0.3258151642831157, |
| "learning_rate": 3.1691331923890065e-05, |
| "loss": 0.3807, |
| "step": 1129 |
| }, |
| { |
| "epoch": 1.2897889332572732, |
| "grad_norm": 0.3584686282000858, |
| "learning_rate": 3.1670190274841435e-05, |
| "loss": 0.4076, |
| "step": 1130 |
| }, |
| { |
| "epoch": 1.2909298345693099, |
| "grad_norm": 0.34153478084703925, |
| "learning_rate": 3.164904862579281e-05, |
| "loss": 0.3538, |
| "step": 1131 |
| }, |
| { |
| "epoch": 1.2920707358813464, |
| "grad_norm": 0.3176502770934002, |
| "learning_rate": 3.162790697674419e-05, |
| "loss": 0.3794, |
| "step": 1132 |
| }, |
| { |
| "epoch": 1.2932116371933828, |
| "grad_norm": 0.3577524872996408, |
| "learning_rate": 3.160676532769556e-05, |
| "loss": 0.365, |
| "step": 1133 |
| }, |
| { |
| "epoch": 1.2943525385054193, |
| "grad_norm": 0.33076084058529676, |
| "learning_rate": 3.158562367864694e-05, |
| "loss": 0.3514, |
| "step": 1134 |
| }, |
| { |
| "epoch": 1.2954934398174558, |
| "grad_norm": 0.38879916007923476, |
| "learning_rate": 3.156448202959831e-05, |
| "loss": 0.3743, |
| "step": 1135 |
| }, |
| { |
| "epoch": 1.2966343411294923, |
| "grad_norm": 0.3635936827843266, |
| "learning_rate": 3.154334038054969e-05, |
| "loss": 0.3655, |
| "step": 1136 |
| }, |
| { |
| "epoch": 1.2977752424415288, |
| "grad_norm": 0.3476006034419538, |
| "learning_rate": 3.152219873150106e-05, |
| "loss": 0.3889, |
| "step": 1137 |
| }, |
| { |
| "epoch": 1.2989161437535652, |
| "grad_norm": 0.3667463941248487, |
| "learning_rate": 3.1501057082452435e-05, |
| "loss": 0.3971, |
| "step": 1138 |
| }, |
| { |
| "epoch": 1.3000570450656017, |
| "grad_norm": 0.310954947386153, |
| "learning_rate": 3.1479915433403805e-05, |
| "loss": 0.3753, |
| "step": 1139 |
| }, |
| { |
| "epoch": 1.3011979463776384, |
| "grad_norm": 0.36203472318239655, |
| "learning_rate": 3.145877378435518e-05, |
| "loss": 0.3613, |
| "step": 1140 |
| }, |
| { |
| "epoch": 1.302338847689675, |
| "grad_norm": 0.3494822072314133, |
| "learning_rate": 3.1437632135306554e-05, |
| "loss": 0.3851, |
| "step": 1141 |
| }, |
| { |
| "epoch": 1.3034797490017114, |
| "grad_norm": 0.3516976871249764, |
| "learning_rate": 3.141649048625793e-05, |
| "loss": 0.3574, |
| "step": 1142 |
| }, |
| { |
| "epoch": 1.3046206503137479, |
| "grad_norm": 0.26973236707033366, |
| "learning_rate": 3.13953488372093e-05, |
| "loss": 0.3791, |
| "step": 1143 |
| }, |
| { |
| "epoch": 1.3057615516257843, |
| "grad_norm": 0.3429365439599929, |
| "learning_rate": 3.137420718816067e-05, |
| "loss": 0.366, |
| "step": 1144 |
| }, |
| { |
| "epoch": 1.3069024529378208, |
| "grad_norm": 0.3165521363453188, |
| "learning_rate": 3.135306553911205e-05, |
| "loss": 0.3884, |
| "step": 1145 |
| }, |
| { |
| "epoch": 1.3080433542498575, |
| "grad_norm": 0.297830837920527, |
| "learning_rate": 3.133192389006343e-05, |
| "loss": 0.3801, |
| "step": 1146 |
| }, |
| { |
| "epoch": 1.309184255561894, |
| "grad_norm": 0.2876870489632023, |
| "learning_rate": 3.1310782241014805e-05, |
| "loss": 0.3721, |
| "step": 1147 |
| }, |
| { |
| "epoch": 1.3103251568739305, |
| "grad_norm": 0.3037731699134318, |
| "learning_rate": 3.1289640591966176e-05, |
| "loss": 0.3752, |
| "step": 1148 |
| }, |
| { |
| "epoch": 1.311466058185967, |
| "grad_norm": 0.32611078798800086, |
| "learning_rate": 3.126849894291755e-05, |
| "loss": 0.3733, |
| "step": 1149 |
| }, |
| { |
| "epoch": 1.3126069594980034, |
| "grad_norm": 0.2895325920295833, |
| "learning_rate": 3.1247357293868924e-05, |
| "loss": 0.3544, |
| "step": 1150 |
| }, |
| { |
| "epoch": 1.31374786081004, |
| "grad_norm": 0.3654499418349549, |
| "learning_rate": 3.1226215644820294e-05, |
| "loss": 0.3733, |
| "step": 1151 |
| }, |
| { |
| "epoch": 1.3148887621220764, |
| "grad_norm": 0.36567998053713596, |
| "learning_rate": 3.120507399577167e-05, |
| "loss": 0.3732, |
| "step": 1152 |
| }, |
| { |
| "epoch": 1.3160296634341129, |
| "grad_norm": 0.34435773909859874, |
| "learning_rate": 3.118393234672304e-05, |
| "loss": 0.3913, |
| "step": 1153 |
| }, |
| { |
| "epoch": 1.3171705647461494, |
| "grad_norm": 0.3957305592645215, |
| "learning_rate": 3.116279069767442e-05, |
| "loss": 0.3661, |
| "step": 1154 |
| }, |
| { |
| "epoch": 1.3183114660581858, |
| "grad_norm": 0.3374259528454154, |
| "learning_rate": 3.114164904862579e-05, |
| "loss": 0.3574, |
| "step": 1155 |
| }, |
| { |
| "epoch": 1.3194523673702225, |
| "grad_norm": 0.2937491447581557, |
| "learning_rate": 3.112050739957717e-05, |
| "loss": 0.3655, |
| "step": 1156 |
| }, |
| { |
| "epoch": 1.320593268682259, |
| "grad_norm": 0.36541534067539616, |
| "learning_rate": 3.109936575052854e-05, |
| "loss": 0.3967, |
| "step": 1157 |
| }, |
| { |
| "epoch": 1.3217341699942955, |
| "grad_norm": 0.32171994431469614, |
| "learning_rate": 3.1078224101479916e-05, |
| "loss": 0.3623, |
| "step": 1158 |
| }, |
| { |
| "epoch": 1.322875071306332, |
| "grad_norm": 0.32768929811903286, |
| "learning_rate": 3.1057082452431294e-05, |
| "loss": 0.381, |
| "step": 1159 |
| }, |
| { |
| "epoch": 1.3240159726183685, |
| "grad_norm": 0.28688520786573474, |
| "learning_rate": 3.1035940803382664e-05, |
| "loss": 0.3614, |
| "step": 1160 |
| }, |
| { |
| "epoch": 1.3251568739304052, |
| "grad_norm": 0.3447454760055943, |
| "learning_rate": 3.101479915433404e-05, |
| "loss": 0.3835, |
| "step": 1161 |
| }, |
| { |
| "epoch": 1.3262977752424416, |
| "grad_norm": 0.30834089409956156, |
| "learning_rate": 3.099365750528541e-05, |
| "loss": 0.3676, |
| "step": 1162 |
| }, |
| { |
| "epoch": 1.3274386765544781, |
| "grad_norm": 0.3769175020167887, |
| "learning_rate": 3.097251585623679e-05, |
| "loss": 0.3812, |
| "step": 1163 |
| }, |
| { |
| "epoch": 1.3285795778665146, |
| "grad_norm": 0.3603398260692364, |
| "learning_rate": 3.095137420718816e-05, |
| "loss": 0.3885, |
| "step": 1164 |
| }, |
| { |
| "epoch": 1.329720479178551, |
| "grad_norm": 0.3818452964334466, |
| "learning_rate": 3.093023255813954e-05, |
| "loss": 0.3666, |
| "step": 1165 |
| }, |
| { |
| "epoch": 1.3308613804905876, |
| "grad_norm": 0.341137726154606, |
| "learning_rate": 3.090909090909091e-05, |
| "loss": 0.3626, |
| "step": 1166 |
| }, |
| { |
| "epoch": 1.332002281802624, |
| "grad_norm": 0.3677916501891261, |
| "learning_rate": 3.0887949260042286e-05, |
| "loss": 0.3742, |
| "step": 1167 |
| }, |
| { |
| "epoch": 1.3331431831146605, |
| "grad_norm": 0.35238646235818044, |
| "learning_rate": 3.086680761099366e-05, |
| "loss": 0.3776, |
| "step": 1168 |
| }, |
| { |
| "epoch": 1.334284084426697, |
| "grad_norm": 0.3241309831420008, |
| "learning_rate": 3.084566596194503e-05, |
| "loss": 0.4018, |
| "step": 1169 |
| }, |
| { |
| "epoch": 1.3354249857387335, |
| "grad_norm": 0.4211931838361165, |
| "learning_rate": 3.0824524312896405e-05, |
| "loss": 0.3666, |
| "step": 1170 |
| }, |
| { |
| "epoch": 1.3365658870507702, |
| "grad_norm": 0.3432763006800476, |
| "learning_rate": 3.080338266384778e-05, |
| "loss": 0.3815, |
| "step": 1171 |
| }, |
| { |
| "epoch": 1.3377067883628067, |
| "grad_norm": 0.3527700767350808, |
| "learning_rate": 3.078224101479916e-05, |
| "loss": 0.3583, |
| "step": 1172 |
| }, |
| { |
| "epoch": 1.3388476896748431, |
| "grad_norm": 0.37964149901308397, |
| "learning_rate": 3.076109936575053e-05, |
| "loss": 0.3579, |
| "step": 1173 |
| }, |
| { |
| "epoch": 1.3399885909868796, |
| "grad_norm": 0.28730602914506537, |
| "learning_rate": 3.073995771670191e-05, |
| "loss": 0.3468, |
| "step": 1174 |
| }, |
| { |
| "epoch": 1.341129492298916, |
| "grad_norm": 0.3002903212997977, |
| "learning_rate": 3.071881606765328e-05, |
| "loss": 0.3559, |
| "step": 1175 |
| }, |
| { |
| "epoch": 1.3422703936109526, |
| "grad_norm": 0.3297601544213069, |
| "learning_rate": 3.0697674418604656e-05, |
| "loss": 0.3726, |
| "step": 1176 |
| }, |
| { |
| "epoch": 1.3434112949229893, |
| "grad_norm": 0.2746332939414886, |
| "learning_rate": 3.067653276955603e-05, |
| "loss": 0.3701, |
| "step": 1177 |
| }, |
| { |
| "epoch": 1.3445521962350258, |
| "grad_norm": 0.260651600604371, |
| "learning_rate": 3.06553911205074e-05, |
| "loss": 0.3519, |
| "step": 1178 |
| }, |
| { |
| "epoch": 1.3456930975470622, |
| "grad_norm": 0.30888849266481555, |
| "learning_rate": 3.0634249471458775e-05, |
| "loss": 0.4036, |
| "step": 1179 |
| }, |
| { |
| "epoch": 1.3468339988590987, |
| "grad_norm": 0.2709838548905517, |
| "learning_rate": 3.0613107822410146e-05, |
| "loss": 0.3742, |
| "step": 1180 |
| }, |
| { |
| "epoch": 1.3479749001711352, |
| "grad_norm": 0.28410459677729055, |
| "learning_rate": 3.059196617336152e-05, |
| "loss": 0.3694, |
| "step": 1181 |
| }, |
| { |
| "epoch": 1.3491158014831717, |
| "grad_norm": 0.27240207939222316, |
| "learning_rate": 3.0570824524312894e-05, |
| "loss": 0.3541, |
| "step": 1182 |
| }, |
| { |
| "epoch": 1.3502567027952082, |
| "grad_norm": 0.28022840245663483, |
| "learning_rate": 3.054968287526427e-05, |
| "loss": 0.3556, |
| "step": 1183 |
| }, |
| { |
| "epoch": 1.3513976041072446, |
| "grad_norm": 0.3026651192991682, |
| "learning_rate": 3.052854122621565e-05, |
| "loss": 0.3787, |
| "step": 1184 |
| }, |
| { |
| "epoch": 1.3525385054192811, |
| "grad_norm": 0.3221082586985108, |
| "learning_rate": 3.0507399577167023e-05, |
| "loss": 0.3677, |
| "step": 1185 |
| }, |
| { |
| "epoch": 1.3536794067313178, |
| "grad_norm": 0.2993724031412757, |
| "learning_rate": 3.0486257928118394e-05, |
| "loss": 0.3961, |
| "step": 1186 |
| }, |
| { |
| "epoch": 1.3548203080433543, |
| "grad_norm": 0.36575229654879793, |
| "learning_rate": 3.0465116279069768e-05, |
| "loss": 0.3806, |
| "step": 1187 |
| }, |
| { |
| "epoch": 1.3559612093553908, |
| "grad_norm": 0.2618840843711902, |
| "learning_rate": 3.0443974630021145e-05, |
| "loss": 0.3809, |
| "step": 1188 |
| }, |
| { |
| "epoch": 1.3571021106674273, |
| "grad_norm": 0.33259999621304476, |
| "learning_rate": 3.0422832980972516e-05, |
| "loss": 0.3664, |
| "step": 1189 |
| }, |
| { |
| "epoch": 1.3582430119794637, |
| "grad_norm": 0.2906615103162758, |
| "learning_rate": 3.0401691331923893e-05, |
| "loss": 0.3631, |
| "step": 1190 |
| }, |
| { |
| "epoch": 1.3593839132915002, |
| "grad_norm": 0.30230407067896337, |
| "learning_rate": 3.0380549682875264e-05, |
| "loss": 0.3699, |
| "step": 1191 |
| }, |
| { |
| "epoch": 1.360524814603537, |
| "grad_norm": 0.31210182393997005, |
| "learning_rate": 3.035940803382664e-05, |
| "loss": 0.3714, |
| "step": 1192 |
| }, |
| { |
| "epoch": 1.3616657159155734, |
| "grad_norm": 0.2808150657733752, |
| "learning_rate": 3.0338266384778012e-05, |
| "loss": 0.3594, |
| "step": 1193 |
| }, |
| { |
| "epoch": 1.3628066172276099, |
| "grad_norm": 0.3136491102076318, |
| "learning_rate": 3.031712473572939e-05, |
| "loss": 0.3671, |
| "step": 1194 |
| }, |
| { |
| "epoch": 1.3639475185396464, |
| "grad_norm": 0.32544208291676757, |
| "learning_rate": 3.0295983086680764e-05, |
| "loss": 0.3636, |
| "step": 1195 |
| }, |
| { |
| "epoch": 1.3650884198516828, |
| "grad_norm": 0.29240635927645753, |
| "learning_rate": 3.0274841437632134e-05, |
| "loss": 0.3688, |
| "step": 1196 |
| }, |
| { |
| "epoch": 1.3662293211637193, |
| "grad_norm": 0.3045047299850035, |
| "learning_rate": 3.0253699788583512e-05, |
| "loss": 0.3695, |
| "step": 1197 |
| }, |
| { |
| "epoch": 1.3673702224757558, |
| "grad_norm": 0.3038195498825899, |
| "learning_rate": 3.0232558139534883e-05, |
| "loss": 0.3655, |
| "step": 1198 |
| }, |
| { |
| "epoch": 1.3685111237877923, |
| "grad_norm": 0.34280384948848547, |
| "learning_rate": 3.021141649048626e-05, |
| "loss": 0.3787, |
| "step": 1199 |
| }, |
| { |
| "epoch": 1.3696520250998288, |
| "grad_norm": 0.29537008807280474, |
| "learning_rate": 3.019027484143763e-05, |
| "loss": 0.3649, |
| "step": 1200 |
| }, |
| { |
| "epoch": 1.3707929264118655, |
| "grad_norm": 0.2988844933069723, |
| "learning_rate": 3.0169133192389008e-05, |
| "loss": 0.3577, |
| "step": 1201 |
| }, |
| { |
| "epoch": 1.371933827723902, |
| "grad_norm": 0.3038068470192822, |
| "learning_rate": 3.0147991543340382e-05, |
| "loss": 0.3624, |
| "step": 1202 |
| }, |
| { |
| "epoch": 1.3730747290359384, |
| "grad_norm": 0.3195670739387878, |
| "learning_rate": 3.012684989429176e-05, |
| "loss": 0.3622, |
| "step": 1203 |
| }, |
| { |
| "epoch": 1.374215630347975, |
| "grad_norm": 0.33884359703682376, |
| "learning_rate": 3.010570824524313e-05, |
| "loss": 0.3712, |
| "step": 1204 |
| }, |
| { |
| "epoch": 1.3753565316600114, |
| "grad_norm": 0.30921551673694203, |
| "learning_rate": 3.00845665961945e-05, |
| "loss": 0.3871, |
| "step": 1205 |
| }, |
| { |
| "epoch": 1.3764974329720479, |
| "grad_norm": 0.38097121845936377, |
| "learning_rate": 3.006342494714588e-05, |
| "loss": 0.3861, |
| "step": 1206 |
| }, |
| { |
| "epoch": 1.3776383342840846, |
| "grad_norm": 0.2684259417323635, |
| "learning_rate": 3.0042283298097253e-05, |
| "loss": 0.3782, |
| "step": 1207 |
| }, |
| { |
| "epoch": 1.378779235596121, |
| "grad_norm": 0.2956290222982241, |
| "learning_rate": 3.002114164904863e-05, |
| "loss": 0.3713, |
| "step": 1208 |
| }, |
| { |
| "epoch": 1.3799201369081575, |
| "grad_norm": 0.370180759036463, |
| "learning_rate": 3e-05, |
| "loss": 0.3828, |
| "step": 1209 |
| }, |
| { |
| "epoch": 1.381061038220194, |
| "grad_norm": 0.25836075503652833, |
| "learning_rate": 2.9978858350951378e-05, |
| "loss": 0.3647, |
| "step": 1210 |
| }, |
| { |
| "epoch": 1.3822019395322305, |
| "grad_norm": 0.3468462978558045, |
| "learning_rate": 2.995771670190275e-05, |
| "loss": 0.3765, |
| "step": 1211 |
| }, |
| { |
| "epoch": 1.383342840844267, |
| "grad_norm": 0.2824173533303826, |
| "learning_rate": 2.9936575052854126e-05, |
| "loss": 0.3681, |
| "step": 1212 |
| }, |
| { |
| "epoch": 1.3844837421563034, |
| "grad_norm": 0.28736316984448956, |
| "learning_rate": 2.9915433403805497e-05, |
| "loss": 0.3625, |
| "step": 1213 |
| }, |
| { |
| "epoch": 1.38562464346834, |
| "grad_norm": 0.33703904092494497, |
| "learning_rate": 2.989429175475687e-05, |
| "loss": 0.3777, |
| "step": 1214 |
| }, |
| { |
| "epoch": 1.3867655447803764, |
| "grad_norm": 0.29634799772730275, |
| "learning_rate": 2.987315010570825e-05, |
| "loss": 0.3608, |
| "step": 1215 |
| }, |
| { |
| "epoch": 1.3879064460924129, |
| "grad_norm": 0.3384257392542635, |
| "learning_rate": 2.985200845665962e-05, |
| "loss": 0.3602, |
| "step": 1216 |
| }, |
| { |
| "epoch": 1.3890473474044496, |
| "grad_norm": 0.27944486323405615, |
| "learning_rate": 2.9830866807610997e-05, |
| "loss": 0.3464, |
| "step": 1217 |
| }, |
| { |
| "epoch": 1.390188248716486, |
| "grad_norm": 0.32014182507961697, |
| "learning_rate": 2.9809725158562367e-05, |
| "loss": 0.3739, |
| "step": 1218 |
| }, |
| { |
| "epoch": 1.3913291500285225, |
| "grad_norm": 0.33271962982884457, |
| "learning_rate": 2.9788583509513745e-05, |
| "loss": 0.3969, |
| "step": 1219 |
| }, |
| { |
| "epoch": 1.392470051340559, |
| "grad_norm": 0.30683561080527283, |
| "learning_rate": 2.976744186046512e-05, |
| "loss": 0.3896, |
| "step": 1220 |
| }, |
| { |
| "epoch": 1.3936109526525955, |
| "grad_norm": 0.31727387176204047, |
| "learning_rate": 2.9746300211416493e-05, |
| "loss": 0.3706, |
| "step": 1221 |
| }, |
| { |
| "epoch": 1.3947518539646322, |
| "grad_norm": 0.35232887418187936, |
| "learning_rate": 2.9725158562367867e-05, |
| "loss": 0.357, |
| "step": 1222 |
| }, |
| { |
| "epoch": 1.3958927552766687, |
| "grad_norm": 0.2941768931758356, |
| "learning_rate": 2.9704016913319238e-05, |
| "loss": 0.3916, |
| "step": 1223 |
| }, |
| { |
| "epoch": 1.3970336565887052, |
| "grad_norm": 0.38849939713177317, |
| "learning_rate": 2.9682875264270615e-05, |
| "loss": 0.3731, |
| "step": 1224 |
| }, |
| { |
| "epoch": 1.3981745579007416, |
| "grad_norm": 0.2733119638416779, |
| "learning_rate": 2.9661733615221986e-05, |
| "loss": 0.3508, |
| "step": 1225 |
| }, |
| { |
| "epoch": 1.3993154592127781, |
| "grad_norm": 0.3141094815641651, |
| "learning_rate": 2.9640591966173363e-05, |
| "loss": 0.3747, |
| "step": 1226 |
| }, |
| { |
| "epoch": 1.4004563605248146, |
| "grad_norm": 0.25985281656595854, |
| "learning_rate": 2.9619450317124737e-05, |
| "loss": 0.3561, |
| "step": 1227 |
| }, |
| { |
| "epoch": 1.401597261836851, |
| "grad_norm": 0.30023051357215597, |
| "learning_rate": 2.9598308668076115e-05, |
| "loss": 0.3632, |
| "step": 1228 |
| }, |
| { |
| "epoch": 1.4027381631488876, |
| "grad_norm": 0.3000718719373125, |
| "learning_rate": 2.9577167019027486e-05, |
| "loss": 0.3793, |
| "step": 1229 |
| }, |
| { |
| "epoch": 1.403879064460924, |
| "grad_norm": 0.2784816189918396, |
| "learning_rate": 2.9556025369978863e-05, |
| "loss": 0.3622, |
| "step": 1230 |
| }, |
| { |
| "epoch": 1.4050199657729605, |
| "grad_norm": 0.33685900817290626, |
| "learning_rate": 2.9534883720930234e-05, |
| "loss": 0.3782, |
| "step": 1231 |
| }, |
| { |
| "epoch": 1.4061608670849972, |
| "grad_norm": 0.27572068061345933, |
| "learning_rate": 2.9513742071881604e-05, |
| "loss": 0.3622, |
| "step": 1232 |
| }, |
| { |
| "epoch": 1.4073017683970337, |
| "grad_norm": 0.31338924633780607, |
| "learning_rate": 2.9492600422832982e-05, |
| "loss": 0.381, |
| "step": 1233 |
| }, |
| { |
| "epoch": 1.4084426697090702, |
| "grad_norm": 0.2734014953931813, |
| "learning_rate": 2.9471458773784356e-05, |
| "loss": 0.3546, |
| "step": 1234 |
| }, |
| { |
| "epoch": 1.4095835710211067, |
| "grad_norm": 0.3026690395161937, |
| "learning_rate": 2.9450317124735733e-05, |
| "loss": 0.3628, |
| "step": 1235 |
| }, |
| { |
| "epoch": 1.4107244723331431, |
| "grad_norm": 0.3562181622146391, |
| "learning_rate": 2.9429175475687104e-05, |
| "loss": 0.3561, |
| "step": 1236 |
| }, |
| { |
| "epoch": 1.4118653736451796, |
| "grad_norm": 0.2768764956360295, |
| "learning_rate": 2.940803382663848e-05, |
| "loss": 0.3779, |
| "step": 1237 |
| }, |
| { |
| "epoch": 1.4130062749572163, |
| "grad_norm": 0.3441377334550278, |
| "learning_rate": 2.9386892177589852e-05, |
| "loss": 0.3666, |
| "step": 1238 |
| }, |
| { |
| "epoch": 1.4141471762692528, |
| "grad_norm": 0.3387001258999047, |
| "learning_rate": 2.9365750528541226e-05, |
| "loss": 0.3789, |
| "step": 1239 |
| }, |
| { |
| "epoch": 1.4152880775812893, |
| "grad_norm": 0.30476509136714863, |
| "learning_rate": 2.9344608879492604e-05, |
| "loss": 0.3645, |
| "step": 1240 |
| }, |
| { |
| "epoch": 1.4164289788933258, |
| "grad_norm": 0.37908310081130964, |
| "learning_rate": 2.9323467230443974e-05, |
| "loss": 0.3774, |
| "step": 1241 |
| }, |
| { |
| "epoch": 1.4175698802053622, |
| "grad_norm": 0.32793422438669423, |
| "learning_rate": 2.9302325581395352e-05, |
| "loss": 0.3734, |
| "step": 1242 |
| }, |
| { |
| "epoch": 1.4187107815173987, |
| "grad_norm": 0.3277132324333076, |
| "learning_rate": 2.9281183932346723e-05, |
| "loss": 0.3904, |
| "step": 1243 |
| }, |
| { |
| "epoch": 1.4198516828294352, |
| "grad_norm": 0.3312953337286729, |
| "learning_rate": 2.92600422832981e-05, |
| "loss": 0.3692, |
| "step": 1244 |
| }, |
| { |
| "epoch": 1.4209925841414717, |
| "grad_norm": 0.2715055972763672, |
| "learning_rate": 2.923890063424947e-05, |
| "loss": 0.3655, |
| "step": 1245 |
| }, |
| { |
| "epoch": 1.4221334854535082, |
| "grad_norm": 0.2953587172579261, |
| "learning_rate": 2.9217758985200848e-05, |
| "loss": 0.3766, |
| "step": 1246 |
| }, |
| { |
| "epoch": 1.4232743867655449, |
| "grad_norm": 0.31388222966291707, |
| "learning_rate": 2.9196617336152222e-05, |
| "loss": 0.3674, |
| "step": 1247 |
| }, |
| { |
| "epoch": 1.4244152880775813, |
| "grad_norm": 0.35965122052908766, |
| "learning_rate": 2.9175475687103593e-05, |
| "loss": 0.3782, |
| "step": 1248 |
| }, |
| { |
| "epoch": 1.4255561893896178, |
| "grad_norm": 0.30486150383607696, |
| "learning_rate": 2.915433403805497e-05, |
| "loss": 0.368, |
| "step": 1249 |
| }, |
| { |
| "epoch": 1.4266970907016543, |
| "grad_norm": 0.39677093495998783, |
| "learning_rate": 2.913319238900634e-05, |
| "loss": 0.3683, |
| "step": 1250 |
| }, |
| { |
| "epoch": 1.4278379920136908, |
| "grad_norm": 0.3223810641459737, |
| "learning_rate": 2.911205073995772e-05, |
| "loss": 0.3689, |
| "step": 1251 |
| }, |
| { |
| "epoch": 1.4289788933257273, |
| "grad_norm": 0.30507032725118605, |
| "learning_rate": 2.909090909090909e-05, |
| "loss": 0.3705, |
| "step": 1252 |
| }, |
| { |
| "epoch": 1.430119794637764, |
| "grad_norm": 0.4194926217364012, |
| "learning_rate": 2.9069767441860467e-05, |
| "loss": 0.3687, |
| "step": 1253 |
| }, |
| { |
| "epoch": 1.4312606959498004, |
| "grad_norm": 0.2954199664136386, |
| "learning_rate": 2.904862579281184e-05, |
| "loss": 0.3728, |
| "step": 1254 |
| }, |
| { |
| "epoch": 1.432401597261837, |
| "grad_norm": 0.41519643808435247, |
| "learning_rate": 2.9027484143763218e-05, |
| "loss": 0.3616, |
| "step": 1255 |
| }, |
| { |
| "epoch": 1.4335424985738734, |
| "grad_norm": 0.37013820480001497, |
| "learning_rate": 2.900634249471459e-05, |
| "loss": 0.3631, |
| "step": 1256 |
| }, |
| { |
| "epoch": 1.4346833998859099, |
| "grad_norm": 0.3637646288174245, |
| "learning_rate": 2.898520084566596e-05, |
| "loss": 0.3705, |
| "step": 1257 |
| }, |
| { |
| "epoch": 1.4358243011979464, |
| "grad_norm": 0.3994429680732055, |
| "learning_rate": 2.8964059196617337e-05, |
| "loss": 0.3603, |
| "step": 1258 |
| }, |
| { |
| "epoch": 1.4369652025099828, |
| "grad_norm": 0.3261584175677973, |
| "learning_rate": 2.894291754756871e-05, |
| "loss": 0.3783, |
| "step": 1259 |
| }, |
| { |
| "epoch": 1.4381061038220193, |
| "grad_norm": 0.3535660656201677, |
| "learning_rate": 2.892177589852009e-05, |
| "loss": 0.3722, |
| "step": 1260 |
| }, |
| { |
| "epoch": 1.4392470051340558, |
| "grad_norm": 0.3547559486950115, |
| "learning_rate": 2.890063424947146e-05, |
| "loss": 0.385, |
| "step": 1261 |
| }, |
| { |
| "epoch": 1.4403879064460925, |
| "grad_norm": 0.27982112384758195, |
| "learning_rate": 2.8879492600422837e-05, |
| "loss": 0.3651, |
| "step": 1262 |
| }, |
| { |
| "epoch": 1.441528807758129, |
| "grad_norm": 0.33283939892409153, |
| "learning_rate": 2.8858350951374207e-05, |
| "loss": 0.3657, |
| "step": 1263 |
| }, |
| { |
| "epoch": 1.4426697090701655, |
| "grad_norm": 0.32733079528541736, |
| "learning_rate": 2.8837209302325585e-05, |
| "loss": 0.3787, |
| "step": 1264 |
| }, |
| { |
| "epoch": 1.443810610382202, |
| "grad_norm": 0.23586854044772235, |
| "learning_rate": 2.8816067653276956e-05, |
| "loss": 0.3585, |
| "step": 1265 |
| }, |
| { |
| "epoch": 1.4449515116942384, |
| "grad_norm": 0.3178604473656048, |
| "learning_rate": 2.879492600422833e-05, |
| "loss": 0.3617, |
| "step": 1266 |
| }, |
| { |
| "epoch": 1.446092413006275, |
| "grad_norm": 0.340401195862735, |
| "learning_rate": 2.8773784355179707e-05, |
| "loss": 0.3569, |
| "step": 1267 |
| }, |
| { |
| "epoch": 1.4472333143183116, |
| "grad_norm": 0.2832755699487134, |
| "learning_rate": 2.8752642706131078e-05, |
| "loss": 0.3752, |
| "step": 1268 |
| }, |
| { |
| "epoch": 1.448374215630348, |
| "grad_norm": 0.2816782125944015, |
| "learning_rate": 2.8731501057082455e-05, |
| "loss": 0.3694, |
| "step": 1269 |
| }, |
| { |
| "epoch": 1.4495151169423846, |
| "grad_norm": 0.2962764604229308, |
| "learning_rate": 2.8710359408033826e-05, |
| "loss": 0.3631, |
| "step": 1270 |
| }, |
| { |
| "epoch": 1.450656018254421, |
| "grad_norm": 0.3123088681993934, |
| "learning_rate": 2.8689217758985203e-05, |
| "loss": 0.3982, |
| "step": 1271 |
| }, |
| { |
| "epoch": 1.4517969195664575, |
| "grad_norm": 0.2589384687492925, |
| "learning_rate": 2.8668076109936574e-05, |
| "loss": 0.3663, |
| "step": 1272 |
| }, |
| { |
| "epoch": 1.452937820878494, |
| "grad_norm": 0.39661680089050527, |
| "learning_rate": 2.864693446088795e-05, |
| "loss": 0.3854, |
| "step": 1273 |
| }, |
| { |
| "epoch": 1.4540787221905305, |
| "grad_norm": 0.25225760585377377, |
| "learning_rate": 2.8625792811839326e-05, |
| "loss": 0.346, |
| "step": 1274 |
| }, |
| { |
| "epoch": 1.455219623502567, |
| "grad_norm": 0.3721747355722219, |
| "learning_rate": 2.8604651162790696e-05, |
| "loss": 0.3704, |
| "step": 1275 |
| }, |
| { |
| "epoch": 1.4563605248146034, |
| "grad_norm": 0.36112828697634103, |
| "learning_rate": 2.8583509513742074e-05, |
| "loss": 0.3779, |
| "step": 1276 |
| }, |
| { |
| "epoch": 1.45750142612664, |
| "grad_norm": 0.3074224938411391, |
| "learning_rate": 2.8562367864693444e-05, |
| "loss": 0.376, |
| "step": 1277 |
| }, |
| { |
| "epoch": 1.4586423274386766, |
| "grad_norm": 0.30091086011362805, |
| "learning_rate": 2.8541226215644822e-05, |
| "loss": 0.3551, |
| "step": 1278 |
| }, |
| { |
| "epoch": 1.459783228750713, |
| "grad_norm": 0.29332910598868295, |
| "learning_rate": 2.8520084566596196e-05, |
| "loss": 0.3588, |
| "step": 1279 |
| }, |
| { |
| "epoch": 1.4609241300627496, |
| "grad_norm": 0.27911592405959207, |
| "learning_rate": 2.8498942917547573e-05, |
| "loss": 0.3878, |
| "step": 1280 |
| }, |
| { |
| "epoch": 1.462065031374786, |
| "grad_norm": 0.26555424161809305, |
| "learning_rate": 2.8477801268498944e-05, |
| "loss": 0.3643, |
| "step": 1281 |
| }, |
| { |
| "epoch": 1.4632059326868225, |
| "grad_norm": 0.3114698661350436, |
| "learning_rate": 2.845665961945032e-05, |
| "loss": 0.3891, |
| "step": 1282 |
| }, |
| { |
| "epoch": 1.4643468339988592, |
| "grad_norm": 0.3417032679196954, |
| "learning_rate": 2.8435517970401692e-05, |
| "loss": 0.378, |
| "step": 1283 |
| }, |
| { |
| "epoch": 1.4654877353108957, |
| "grad_norm": 0.31269026412846357, |
| "learning_rate": 2.8414376321353063e-05, |
| "loss": 0.3632, |
| "step": 1284 |
| }, |
| { |
| "epoch": 1.4666286366229322, |
| "grad_norm": 0.2818282401954472, |
| "learning_rate": 2.839323467230444e-05, |
| "loss": 0.3857, |
| "step": 1285 |
| }, |
| { |
| "epoch": 1.4677695379349687, |
| "grad_norm": 0.3242138101510427, |
| "learning_rate": 2.8372093023255815e-05, |
| "loss": 0.3671, |
| "step": 1286 |
| }, |
| { |
| "epoch": 1.4689104392470052, |
| "grad_norm": 0.2899834506946572, |
| "learning_rate": 2.8350951374207192e-05, |
| "loss": 0.3672, |
| "step": 1287 |
| }, |
| { |
| "epoch": 1.4700513405590416, |
| "grad_norm": 0.3605761691098186, |
| "learning_rate": 2.8329809725158563e-05, |
| "loss": 0.3763, |
| "step": 1288 |
| }, |
| { |
| "epoch": 1.471192241871078, |
| "grad_norm": 0.33147697016869365, |
| "learning_rate": 2.830866807610994e-05, |
| "loss": 0.3525, |
| "step": 1289 |
| }, |
| { |
| "epoch": 1.4723331431831146, |
| "grad_norm": 0.325133970011816, |
| "learning_rate": 2.828752642706131e-05, |
| "loss": 0.3796, |
| "step": 1290 |
| }, |
| { |
| "epoch": 1.473474044495151, |
| "grad_norm": 0.33180923891881353, |
| "learning_rate": 2.8266384778012688e-05, |
| "loss": 0.3526, |
| "step": 1291 |
| }, |
| { |
| "epoch": 1.4746149458071875, |
| "grad_norm": 0.30830437851470766, |
| "learning_rate": 2.824524312896406e-05, |
| "loss": 0.3619, |
| "step": 1292 |
| }, |
| { |
| "epoch": 1.4757558471192243, |
| "grad_norm": 0.34255116892939635, |
| "learning_rate": 2.8224101479915433e-05, |
| "loss": 0.3557, |
| "step": 1293 |
| }, |
| { |
| "epoch": 1.4768967484312607, |
| "grad_norm": 0.3676335419437175, |
| "learning_rate": 2.820295983086681e-05, |
| "loss": 0.3826, |
| "step": 1294 |
| }, |
| { |
| "epoch": 1.4780376497432972, |
| "grad_norm": 0.3473952158913241, |
| "learning_rate": 2.818181818181818e-05, |
| "loss": 0.3696, |
| "step": 1295 |
| }, |
| { |
| "epoch": 1.4791785510553337, |
| "grad_norm": 0.3437354048268917, |
| "learning_rate": 2.816067653276956e-05, |
| "loss": 0.3697, |
| "step": 1296 |
| }, |
| { |
| "epoch": 1.4803194523673702, |
| "grad_norm": 0.34931348276289553, |
| "learning_rate": 2.813953488372093e-05, |
| "loss": 0.3867, |
| "step": 1297 |
| }, |
| { |
| "epoch": 1.4814603536794066, |
| "grad_norm": 0.3516920235276441, |
| "learning_rate": 2.8118393234672307e-05, |
| "loss": 0.3883, |
| "step": 1298 |
| }, |
| { |
| "epoch": 1.4826012549914434, |
| "grad_norm": 0.3369724056209684, |
| "learning_rate": 2.809725158562368e-05, |
| "loss": 0.3651, |
| "step": 1299 |
| }, |
| { |
| "epoch": 1.4837421563034798, |
| "grad_norm": 0.40382857751436074, |
| "learning_rate": 2.807610993657506e-05, |
| "loss": 0.3782, |
| "step": 1300 |
| }, |
| { |
| "epoch": 1.4848830576155163, |
| "grad_norm": 0.3095489263656417, |
| "learning_rate": 2.805496828752643e-05, |
| "loss": 0.3676, |
| "step": 1301 |
| }, |
| { |
| "epoch": 1.4860239589275528, |
| "grad_norm": 0.3503334326036625, |
| "learning_rate": 2.80338266384778e-05, |
| "loss": 0.3537, |
| "step": 1302 |
| }, |
| { |
| "epoch": 1.4871648602395893, |
| "grad_norm": 0.3311850238618065, |
| "learning_rate": 2.8012684989429177e-05, |
| "loss": 0.3624, |
| "step": 1303 |
| }, |
| { |
| "epoch": 1.4883057615516257, |
| "grad_norm": 0.3844061940542019, |
| "learning_rate": 2.7991543340380548e-05, |
| "loss": 0.365, |
| "step": 1304 |
| }, |
| { |
| "epoch": 1.4894466628636622, |
| "grad_norm": 0.30280777632485556, |
| "learning_rate": 2.7970401691331925e-05, |
| "loss": 0.3782, |
| "step": 1305 |
| }, |
| { |
| "epoch": 1.4905875641756987, |
| "grad_norm": 0.38759228805496665, |
| "learning_rate": 2.79492600422833e-05, |
| "loss": 0.3681, |
| "step": 1306 |
| }, |
| { |
| "epoch": 1.4917284654877352, |
| "grad_norm": 0.30175396840705576, |
| "learning_rate": 2.7928118393234677e-05, |
| "loss": 0.3587, |
| "step": 1307 |
| }, |
| { |
| "epoch": 1.492869366799772, |
| "grad_norm": 0.38149939268945615, |
| "learning_rate": 2.7906976744186048e-05, |
| "loss": 0.3876, |
| "step": 1308 |
| }, |
| { |
| "epoch": 1.4940102681118084, |
| "grad_norm": 0.3287914417483383, |
| "learning_rate": 2.7885835095137425e-05, |
| "loss": 0.3665, |
| "step": 1309 |
| }, |
| { |
| "epoch": 1.4951511694238449, |
| "grad_norm": 0.34723730837713296, |
| "learning_rate": 2.7864693446088796e-05, |
| "loss": 0.3799, |
| "step": 1310 |
| }, |
| { |
| "epoch": 1.4962920707358813, |
| "grad_norm": 0.3538733140564447, |
| "learning_rate": 2.784355179704017e-05, |
| "loss": 0.3705, |
| "step": 1311 |
| }, |
| { |
| "epoch": 1.4974329720479178, |
| "grad_norm": 0.28754610115885065, |
| "learning_rate": 2.7822410147991547e-05, |
| "loss": 0.3685, |
| "step": 1312 |
| }, |
| { |
| "epoch": 1.4985738733599543, |
| "grad_norm": 0.35941620116530376, |
| "learning_rate": 2.7801268498942918e-05, |
| "loss": 0.357, |
| "step": 1313 |
| }, |
| { |
| "epoch": 1.499714774671991, |
| "grad_norm": 0.28384788457688803, |
| "learning_rate": 2.7780126849894295e-05, |
| "loss": 0.3873, |
| "step": 1314 |
| }, |
| { |
| "epoch": 1.5008556759840275, |
| "grad_norm": 0.2921901422746972, |
| "learning_rate": 2.7758985200845666e-05, |
| "loss": 0.3576, |
| "step": 1315 |
| }, |
| { |
| "epoch": 1.501996577296064, |
| "grad_norm": 0.3069192169786753, |
| "learning_rate": 2.7737843551797044e-05, |
| "loss": 0.3912, |
| "step": 1316 |
| }, |
| { |
| "epoch": 1.5031374786081004, |
| "grad_norm": 0.28324765644412975, |
| "learning_rate": 2.7716701902748414e-05, |
| "loss": 0.3649, |
| "step": 1317 |
| }, |
| { |
| "epoch": 1.504278379920137, |
| "grad_norm": 0.2961812995454121, |
| "learning_rate": 2.769556025369979e-05, |
| "loss": 0.3482, |
| "step": 1318 |
| }, |
| { |
| "epoch": 1.5054192812321734, |
| "grad_norm": 0.32061195189305464, |
| "learning_rate": 2.7674418604651166e-05, |
| "loss": 0.3935, |
| "step": 1319 |
| }, |
| { |
| "epoch": 1.5065601825442099, |
| "grad_norm": 0.32516542840350554, |
| "learning_rate": 2.7653276955602536e-05, |
| "loss": 0.3589, |
| "step": 1320 |
| }, |
| { |
| "epoch": 1.5077010838562463, |
| "grad_norm": 0.3089964135811039, |
| "learning_rate": 2.7632135306553914e-05, |
| "loss": 0.3827, |
| "step": 1321 |
| }, |
| { |
| "epoch": 1.5088419851682828, |
| "grad_norm": 0.32133331637060614, |
| "learning_rate": 2.7610993657505285e-05, |
| "loss": 0.4043, |
| "step": 1322 |
| }, |
| { |
| "epoch": 1.5099828864803193, |
| "grad_norm": 0.31101837387755843, |
| "learning_rate": 2.7589852008456662e-05, |
| "loss": 0.3734, |
| "step": 1323 |
| }, |
| { |
| "epoch": 1.511123787792356, |
| "grad_norm": 0.3158550217117038, |
| "learning_rate": 2.7568710359408033e-05, |
| "loss": 0.3665, |
| "step": 1324 |
| }, |
| { |
| "epoch": 1.5122646891043925, |
| "grad_norm": 0.31029750240919934, |
| "learning_rate": 2.754756871035941e-05, |
| "loss": 0.3787, |
| "step": 1325 |
| }, |
| { |
| "epoch": 1.513405590416429, |
| "grad_norm": 0.3044823379693363, |
| "learning_rate": 2.7526427061310784e-05, |
| "loss": 0.3621, |
| "step": 1326 |
| }, |
| { |
| "epoch": 1.5145464917284654, |
| "grad_norm": 0.31325599526226944, |
| "learning_rate": 2.7505285412262155e-05, |
| "loss": 0.382, |
| "step": 1327 |
| }, |
| { |
| "epoch": 1.5156873930405022, |
| "grad_norm": 0.3126617606799847, |
| "learning_rate": 2.7484143763213532e-05, |
| "loss": 0.3404, |
| "step": 1328 |
| }, |
| { |
| "epoch": 1.5168282943525386, |
| "grad_norm": 0.3676906021498789, |
| "learning_rate": 2.7463002114164903e-05, |
| "loss": 0.3776, |
| "step": 1329 |
| }, |
| { |
| "epoch": 1.517969195664575, |
| "grad_norm": 0.32710198071826563, |
| "learning_rate": 2.744186046511628e-05, |
| "loss": 0.3789, |
| "step": 1330 |
| }, |
| { |
| "epoch": 1.5191100969766116, |
| "grad_norm": 0.29538279939957807, |
| "learning_rate": 2.7420718816067655e-05, |
| "loss": 0.3805, |
| "step": 1331 |
| }, |
| { |
| "epoch": 1.520250998288648, |
| "grad_norm": 0.4323552211327581, |
| "learning_rate": 2.7399577167019032e-05, |
| "loss": 0.3979, |
| "step": 1332 |
| }, |
| { |
| "epoch": 1.5213918996006845, |
| "grad_norm": 0.30159103255087294, |
| "learning_rate": 2.7378435517970403e-05, |
| "loss": 0.3609, |
| "step": 1333 |
| }, |
| { |
| "epoch": 1.522532800912721, |
| "grad_norm": 0.34952476702805724, |
| "learning_rate": 2.735729386892178e-05, |
| "loss": 0.392, |
| "step": 1334 |
| }, |
| { |
| "epoch": 1.5236737022247575, |
| "grad_norm": 0.4043924554170926, |
| "learning_rate": 2.733615221987315e-05, |
| "loss": 0.3681, |
| "step": 1335 |
| }, |
| { |
| "epoch": 1.524814603536794, |
| "grad_norm": 0.33704014908802854, |
| "learning_rate": 2.731501057082452e-05, |
| "loss": 0.3544, |
| "step": 1336 |
| }, |
| { |
| "epoch": 1.5259555048488305, |
| "grad_norm": 0.3811213990480285, |
| "learning_rate": 2.72938689217759e-05, |
| "loss": 0.3867, |
| "step": 1337 |
| }, |
| { |
| "epoch": 1.527096406160867, |
| "grad_norm": 0.3717796810852278, |
| "learning_rate": 2.7272727272727273e-05, |
| "loss": 0.3931, |
| "step": 1338 |
| }, |
| { |
| "epoch": 1.5282373074729034, |
| "grad_norm": 0.3356101639570807, |
| "learning_rate": 2.725158562367865e-05, |
| "loss": 0.3809, |
| "step": 1339 |
| }, |
| { |
| "epoch": 1.5293782087849401, |
| "grad_norm": 0.3543267000421066, |
| "learning_rate": 2.723044397463002e-05, |
| "loss": 0.3603, |
| "step": 1340 |
| }, |
| { |
| "epoch": 1.5305191100969766, |
| "grad_norm": 0.41995732245076695, |
| "learning_rate": 2.72093023255814e-05, |
| "loss": 0.3655, |
| "step": 1341 |
| }, |
| { |
| "epoch": 1.531660011409013, |
| "grad_norm": 0.30788330372687395, |
| "learning_rate": 2.718816067653277e-05, |
| "loss": 0.353, |
| "step": 1342 |
| }, |
| { |
| "epoch": 1.5328009127210498, |
| "grad_norm": 0.4323021854865316, |
| "learning_rate": 2.7167019027484147e-05, |
| "loss": 0.3737, |
| "step": 1343 |
| }, |
| { |
| "epoch": 1.5339418140330863, |
| "grad_norm": 0.2932504556713375, |
| "learning_rate": 2.7145877378435518e-05, |
| "loss": 0.3725, |
| "step": 1344 |
| }, |
| { |
| "epoch": 1.5350827153451228, |
| "grad_norm": 0.38646332196884464, |
| "learning_rate": 2.712473572938689e-05, |
| "loss": 0.3836, |
| "step": 1345 |
| }, |
| { |
| "epoch": 1.5362236166571592, |
| "grad_norm": 0.3318875060177627, |
| "learning_rate": 2.710359408033827e-05, |
| "loss": 0.3587, |
| "step": 1346 |
| }, |
| { |
| "epoch": 1.5373645179691957, |
| "grad_norm": 0.39425050106556736, |
| "learning_rate": 2.708245243128964e-05, |
| "loss": 0.356, |
| "step": 1347 |
| }, |
| { |
| "epoch": 1.5385054192812322, |
| "grad_norm": 0.3312786798058442, |
| "learning_rate": 2.7061310782241017e-05, |
| "loss": 0.3819, |
| "step": 1348 |
| }, |
| { |
| "epoch": 1.5396463205932687, |
| "grad_norm": 0.3751189043301931, |
| "learning_rate": 2.7040169133192388e-05, |
| "loss": 0.358, |
| "step": 1349 |
| }, |
| { |
| "epoch": 1.5407872219053051, |
| "grad_norm": 0.45866033683350615, |
| "learning_rate": 2.7019027484143765e-05, |
| "loss": 0.3623, |
| "step": 1350 |
| }, |
| { |
| "epoch": 1.5419281232173416, |
| "grad_norm": 0.293269475978877, |
| "learning_rate": 2.699788583509514e-05, |
| "loss": 0.3675, |
| "step": 1351 |
| }, |
| { |
| "epoch": 1.543069024529378, |
| "grad_norm": 0.5264603659706322, |
| "learning_rate": 2.6976744186046517e-05, |
| "loss": 0.3801, |
| "step": 1352 |
| }, |
| { |
| "epoch": 1.5442099258414146, |
| "grad_norm": 0.3169556373264047, |
| "learning_rate": 2.6955602536997888e-05, |
| "loss": 0.3632, |
| "step": 1353 |
| }, |
| { |
| "epoch": 1.545350827153451, |
| "grad_norm": 0.49559534834221575, |
| "learning_rate": 2.6934460887949258e-05, |
| "loss": 0.3695, |
| "step": 1354 |
| }, |
| { |
| "epoch": 1.5464917284654878, |
| "grad_norm": 0.2853680856246618, |
| "learning_rate": 2.6913319238900636e-05, |
| "loss": 0.3505, |
| "step": 1355 |
| }, |
| { |
| "epoch": 1.5476326297775242, |
| "grad_norm": 0.35237363707507235, |
| "learning_rate": 2.6892177589852006e-05, |
| "loss": 0.363, |
| "step": 1356 |
| }, |
| { |
| "epoch": 1.5487735310895607, |
| "grad_norm": 0.34812240762886904, |
| "learning_rate": 2.6871035940803384e-05, |
| "loss": 0.356, |
| "step": 1357 |
| }, |
| { |
| "epoch": 1.5499144324015972, |
| "grad_norm": 0.3249639412396184, |
| "learning_rate": 2.6849894291754758e-05, |
| "loss": 0.3676, |
| "step": 1358 |
| }, |
| { |
| "epoch": 1.551055333713634, |
| "grad_norm": 0.34991069473657654, |
| "learning_rate": 2.6828752642706135e-05, |
| "loss": 0.359, |
| "step": 1359 |
| }, |
| { |
| "epoch": 1.5521962350256704, |
| "grad_norm": 0.29576634258933143, |
| "learning_rate": 2.6807610993657506e-05, |
| "loss": 0.3671, |
| "step": 1360 |
| }, |
| { |
| "epoch": 1.5533371363377069, |
| "grad_norm": 0.34606104122898584, |
| "learning_rate": 2.6786469344608884e-05, |
| "loss": 0.371, |
| "step": 1361 |
| }, |
| { |
| "epoch": 1.5544780376497433, |
| "grad_norm": 0.2775582193569763, |
| "learning_rate": 2.6765327695560254e-05, |
| "loss": 0.3629, |
| "step": 1362 |
| }, |
| { |
| "epoch": 1.5556189389617798, |
| "grad_norm": 0.3247050074902207, |
| "learning_rate": 2.674418604651163e-05, |
| "loss": 0.3842, |
| "step": 1363 |
| }, |
| { |
| "epoch": 1.5567598402738163, |
| "grad_norm": 0.2723391641667623, |
| "learning_rate": 2.6723044397463002e-05, |
| "loss": 0.3604, |
| "step": 1364 |
| }, |
| { |
| "epoch": 1.5579007415858528, |
| "grad_norm": 0.36316336391236725, |
| "learning_rate": 2.6701902748414376e-05, |
| "loss": 0.3947, |
| "step": 1365 |
| }, |
| { |
| "epoch": 1.5590416428978893, |
| "grad_norm": 0.275443180804818, |
| "learning_rate": 2.6680761099365754e-05, |
| "loss": 0.3961, |
| "step": 1366 |
| }, |
| { |
| "epoch": 1.5601825442099257, |
| "grad_norm": 0.35579725890937663, |
| "learning_rate": 2.6659619450317125e-05, |
| "loss": 0.3846, |
| "step": 1367 |
| }, |
| { |
| "epoch": 1.5613234455219622, |
| "grad_norm": 0.26830941897973276, |
| "learning_rate": 2.6638477801268502e-05, |
| "loss": 0.3835, |
| "step": 1368 |
| }, |
| { |
| "epoch": 1.5624643468339987, |
| "grad_norm": 0.30243736502540497, |
| "learning_rate": 2.6617336152219873e-05, |
| "loss": 0.3481, |
| "step": 1369 |
| }, |
| { |
| "epoch": 1.5636052481460354, |
| "grad_norm": 0.31682892993854983, |
| "learning_rate": 2.659619450317125e-05, |
| "loss": 0.3684, |
| "step": 1370 |
| }, |
| { |
| "epoch": 1.5647461494580719, |
| "grad_norm": 0.27334699163010084, |
| "learning_rate": 2.6575052854122624e-05, |
| "loss": 0.358, |
| "step": 1371 |
| }, |
| { |
| "epoch": 1.5658870507701084, |
| "grad_norm": 0.2895943256047113, |
| "learning_rate": 2.6553911205073995e-05, |
| "loss": 0.3601, |
| "step": 1372 |
| }, |
| { |
| "epoch": 1.5670279520821448, |
| "grad_norm": 0.28755144042185043, |
| "learning_rate": 2.6532769556025372e-05, |
| "loss": 0.3717, |
| "step": 1373 |
| }, |
| { |
| "epoch": 1.5681688533941815, |
| "grad_norm": 0.25968518258420775, |
| "learning_rate": 2.6511627906976743e-05, |
| "loss": 0.3626, |
| "step": 1374 |
| }, |
| { |
| "epoch": 1.569309754706218, |
| "grad_norm": 0.32179811901000466, |
| "learning_rate": 2.649048625792812e-05, |
| "loss": 0.3673, |
| "step": 1375 |
| }, |
| { |
| "epoch": 1.5704506560182545, |
| "grad_norm": 0.2766533809177961, |
| "learning_rate": 2.646934460887949e-05, |
| "loss": 0.3691, |
| "step": 1376 |
| }, |
| { |
| "epoch": 1.571591557330291, |
| "grad_norm": 0.302638177000013, |
| "learning_rate": 2.644820295983087e-05, |
| "loss": 0.3672, |
| "step": 1377 |
| }, |
| { |
| "epoch": 1.5727324586423275, |
| "grad_norm": 0.296654847118796, |
| "learning_rate": 2.6427061310782243e-05, |
| "loss": 0.3812, |
| "step": 1378 |
| }, |
| { |
| "epoch": 1.573873359954364, |
| "grad_norm": 0.33824246630089, |
| "learning_rate": 2.640591966173362e-05, |
| "loss": 0.3756, |
| "step": 1379 |
| }, |
| { |
| "epoch": 1.5750142612664004, |
| "grad_norm": 0.3351383592040746, |
| "learning_rate": 2.638477801268499e-05, |
| "loss": 0.3766, |
| "step": 1380 |
| }, |
| { |
| "epoch": 1.576155162578437, |
| "grad_norm": 0.3024600320447683, |
| "learning_rate": 2.636363636363636e-05, |
| "loss": 0.3662, |
| "step": 1381 |
| }, |
| { |
| "epoch": 1.5772960638904734, |
| "grad_norm": 0.32753567856330584, |
| "learning_rate": 2.634249471458774e-05, |
| "loss": 0.3565, |
| "step": 1382 |
| }, |
| { |
| "epoch": 1.5784369652025099, |
| "grad_norm": 0.345187119181163, |
| "learning_rate": 2.6321353065539113e-05, |
| "loss": 0.3799, |
| "step": 1383 |
| }, |
| { |
| "epoch": 1.5795778665145463, |
| "grad_norm": 0.3331299603253503, |
| "learning_rate": 2.6300211416490487e-05, |
| "loss": 0.3987, |
| "step": 1384 |
| }, |
| { |
| "epoch": 1.580718767826583, |
| "grad_norm": 0.28725692061029895, |
| "learning_rate": 2.627906976744186e-05, |
| "loss": 0.3383, |
| "step": 1385 |
| }, |
| { |
| "epoch": 1.5818596691386195, |
| "grad_norm": 0.3111425790157666, |
| "learning_rate": 2.625792811839324e-05, |
| "loss": 0.3617, |
| "step": 1386 |
| }, |
| { |
| "epoch": 1.583000570450656, |
| "grad_norm": 0.299296343009948, |
| "learning_rate": 2.623678646934461e-05, |
| "loss": 0.3644, |
| "step": 1387 |
| }, |
| { |
| "epoch": 1.5841414717626925, |
| "grad_norm": 0.3347632168210099, |
| "learning_rate": 2.6215644820295987e-05, |
| "loss": 0.3723, |
| "step": 1388 |
| }, |
| { |
| "epoch": 1.5852823730747292, |
| "grad_norm": 0.31376561832951744, |
| "learning_rate": 2.6194503171247358e-05, |
| "loss": 0.3658, |
| "step": 1389 |
| }, |
| { |
| "epoch": 1.5864232743867657, |
| "grad_norm": 0.31027997164647236, |
| "learning_rate": 2.617336152219873e-05, |
| "loss": 0.3743, |
| "step": 1390 |
| }, |
| { |
| "epoch": 1.5875641756988021, |
| "grad_norm": 0.3526552435594915, |
| "learning_rate": 2.615221987315011e-05, |
| "loss": 0.3599, |
| "step": 1391 |
| }, |
| { |
| "epoch": 1.5887050770108386, |
| "grad_norm": 0.29310310962923514, |
| "learning_rate": 2.613107822410148e-05, |
| "loss": 0.3459, |
| "step": 1392 |
| }, |
| { |
| "epoch": 1.589845978322875, |
| "grad_norm": 0.300313890699162, |
| "learning_rate": 2.6109936575052857e-05, |
| "loss": 0.3638, |
| "step": 1393 |
| }, |
| { |
| "epoch": 1.5909868796349116, |
| "grad_norm": 0.35597737339719526, |
| "learning_rate": 2.6088794926004228e-05, |
| "loss": 0.3629, |
| "step": 1394 |
| }, |
| { |
| "epoch": 1.592127780946948, |
| "grad_norm": 0.29485050155881004, |
| "learning_rate": 2.6067653276955605e-05, |
| "loss": 0.3495, |
| "step": 1395 |
| }, |
| { |
| "epoch": 1.5932686822589845, |
| "grad_norm": 0.3083586629060851, |
| "learning_rate": 2.6046511627906976e-05, |
| "loss": 0.3682, |
| "step": 1396 |
| }, |
| { |
| "epoch": 1.594409583571021, |
| "grad_norm": 0.3087204856725598, |
| "learning_rate": 2.6025369978858354e-05, |
| "loss": 0.3516, |
| "step": 1397 |
| }, |
| { |
| "epoch": 1.5955504848830575, |
| "grad_norm": 0.28096277736373027, |
| "learning_rate": 2.6004228329809728e-05, |
| "loss": 0.365, |
| "step": 1398 |
| }, |
| { |
| "epoch": 1.596691386195094, |
| "grad_norm": 0.30757276234849873, |
| "learning_rate": 2.59830866807611e-05, |
| "loss": 0.3732, |
| "step": 1399 |
| }, |
| { |
| "epoch": 1.5978322875071307, |
| "grad_norm": 0.3333668786223208, |
| "learning_rate": 2.5961945031712476e-05, |
| "loss": 0.375, |
| "step": 1400 |
| }, |
| { |
| "epoch": 1.5989731888191672, |
| "grad_norm": 0.2651214513170331, |
| "learning_rate": 2.5940803382663846e-05, |
| "loss": 0.3599, |
| "step": 1401 |
| }, |
| { |
| "epoch": 1.6001140901312036, |
| "grad_norm": 0.2973829631907778, |
| "learning_rate": 2.5919661733615224e-05, |
| "loss": 0.3605, |
| "step": 1402 |
| }, |
| { |
| "epoch": 1.6012549914432401, |
| "grad_norm": 0.2831085318391084, |
| "learning_rate": 2.5898520084566598e-05, |
| "loss": 0.3491, |
| "step": 1403 |
| }, |
| { |
| "epoch": 1.6023958927552768, |
| "grad_norm": 0.32151978953955745, |
| "learning_rate": 2.5877378435517975e-05, |
| "loss": 0.3752, |
| "step": 1404 |
| }, |
| { |
| "epoch": 1.6035367940673133, |
| "grad_norm": 0.2821353356916035, |
| "learning_rate": 2.5856236786469346e-05, |
| "loss": 0.3588, |
| "step": 1405 |
| }, |
| { |
| "epoch": 1.6046776953793498, |
| "grad_norm": 0.3125896600678469, |
| "learning_rate": 2.5835095137420717e-05, |
| "loss": 0.3708, |
| "step": 1406 |
| }, |
| { |
| "epoch": 1.6058185966913863, |
| "grad_norm": 0.29262459892442727, |
| "learning_rate": 2.5813953488372094e-05, |
| "loss": 0.3893, |
| "step": 1407 |
| }, |
| { |
| "epoch": 1.6069594980034227, |
| "grad_norm": 0.3339011655139542, |
| "learning_rate": 2.5792811839323465e-05, |
| "loss": 0.3824, |
| "step": 1408 |
| }, |
| { |
| "epoch": 1.6081003993154592, |
| "grad_norm": 0.3015512370604447, |
| "learning_rate": 2.5771670190274842e-05, |
| "loss": 0.361, |
| "step": 1409 |
| }, |
| { |
| "epoch": 1.6092413006274957, |
| "grad_norm": 0.2682503512944308, |
| "learning_rate": 2.5750528541226217e-05, |
| "loss": 0.3606, |
| "step": 1410 |
| }, |
| { |
| "epoch": 1.6103822019395322, |
| "grad_norm": 0.2939224257467947, |
| "learning_rate": 2.5729386892177594e-05, |
| "loss": 0.3706, |
| "step": 1411 |
| }, |
| { |
| "epoch": 1.6115231032515687, |
| "grad_norm": 0.29144618672695827, |
| "learning_rate": 2.5708245243128965e-05, |
| "loss": 0.364, |
| "step": 1412 |
| }, |
| { |
| "epoch": 1.6126640045636051, |
| "grad_norm": 0.2905815562354008, |
| "learning_rate": 2.5687103594080342e-05, |
| "loss": 0.3565, |
| "step": 1413 |
| }, |
| { |
| "epoch": 1.6138049058756416, |
| "grad_norm": 0.30246721937468807, |
| "learning_rate": 2.5665961945031713e-05, |
| "loss": 0.3718, |
| "step": 1414 |
| }, |
| { |
| "epoch": 1.614945807187678, |
| "grad_norm": 0.2714015002455204, |
| "learning_rate": 2.5644820295983087e-05, |
| "loss": 0.358, |
| "step": 1415 |
| }, |
| { |
| "epoch": 1.6160867084997148, |
| "grad_norm": 0.29518888476520655, |
| "learning_rate": 2.562367864693446e-05, |
| "loss": 0.3659, |
| "step": 1416 |
| }, |
| { |
| "epoch": 1.6172276098117513, |
| "grad_norm": 0.32113379666355385, |
| "learning_rate": 2.5602536997885835e-05, |
| "loss": 0.3523, |
| "step": 1417 |
| }, |
| { |
| "epoch": 1.6183685111237878, |
| "grad_norm": 0.3084244799881773, |
| "learning_rate": 2.5581395348837212e-05, |
| "loss": 0.3632, |
| "step": 1418 |
| }, |
| { |
| "epoch": 1.6195094124358242, |
| "grad_norm": 0.3112848716857277, |
| "learning_rate": 2.5560253699788583e-05, |
| "loss": 0.3604, |
| "step": 1419 |
| }, |
| { |
| "epoch": 1.620650313747861, |
| "grad_norm": 0.297659167096412, |
| "learning_rate": 2.553911205073996e-05, |
| "loss": 0.372, |
| "step": 1420 |
| }, |
| { |
| "epoch": 1.6217912150598974, |
| "grad_norm": 0.38804953050492647, |
| "learning_rate": 2.551797040169133e-05, |
| "loss": 0.3778, |
| "step": 1421 |
| }, |
| { |
| "epoch": 1.622932116371934, |
| "grad_norm": 0.3422019172941039, |
| "learning_rate": 2.549682875264271e-05, |
| "loss": 0.378, |
| "step": 1422 |
| }, |
| { |
| "epoch": 1.6240730176839704, |
| "grad_norm": 0.291456523422963, |
| "learning_rate": 2.5475687103594083e-05, |
| "loss": 0.3592, |
| "step": 1423 |
| }, |
| { |
| "epoch": 1.6252139189960069, |
| "grad_norm": 0.3890506166879392, |
| "learning_rate": 2.5454545454545454e-05, |
| "loss": 0.3775, |
| "step": 1424 |
| }, |
| { |
| "epoch": 1.6263548203080433, |
| "grad_norm": 0.32715486375630815, |
| "learning_rate": 2.543340380549683e-05, |
| "loss": 0.3721, |
| "step": 1425 |
| }, |
| { |
| "epoch": 1.6274957216200798, |
| "grad_norm": 0.3095339401874632, |
| "learning_rate": 2.54122621564482e-05, |
| "loss": 0.3543, |
| "step": 1426 |
| }, |
| { |
| "epoch": 1.6286366229321163, |
| "grad_norm": 0.2968567670795314, |
| "learning_rate": 2.539112050739958e-05, |
| "loss": 0.368, |
| "step": 1427 |
| }, |
| { |
| "epoch": 1.6297775242441528, |
| "grad_norm": 0.3009801689582668, |
| "learning_rate": 2.536997885835095e-05, |
| "loss": 0.3443, |
| "step": 1428 |
| }, |
| { |
| "epoch": 1.6309184255561893, |
| "grad_norm": 0.25882579033711073, |
| "learning_rate": 2.5348837209302327e-05, |
| "loss": 0.3706, |
| "step": 1429 |
| }, |
| { |
| "epoch": 1.6320593268682257, |
| "grad_norm": 0.27518164377492776, |
| "learning_rate": 2.53276955602537e-05, |
| "loss": 0.3717, |
| "step": 1430 |
| }, |
| { |
| "epoch": 1.6332002281802624, |
| "grad_norm": 0.32265443241190234, |
| "learning_rate": 2.530655391120508e-05, |
| "loss": 0.3933, |
| "step": 1431 |
| }, |
| { |
| "epoch": 1.634341129492299, |
| "grad_norm": 0.30698302763604135, |
| "learning_rate": 2.528541226215645e-05, |
| "loss": 0.362, |
| "step": 1432 |
| }, |
| { |
| "epoch": 1.6354820308043354, |
| "grad_norm": 0.2715331709115899, |
| "learning_rate": 2.526427061310782e-05, |
| "loss": 0.3749, |
| "step": 1433 |
| }, |
| { |
| "epoch": 1.6366229321163719, |
| "grad_norm": 0.3091602334662984, |
| "learning_rate": 2.5243128964059198e-05, |
| "loss": 0.3524, |
| "step": 1434 |
| }, |
| { |
| "epoch": 1.6377638334284086, |
| "grad_norm": 0.34223110608310403, |
| "learning_rate": 2.5221987315010572e-05, |
| "loss": 0.3891, |
| "step": 1435 |
| }, |
| { |
| "epoch": 1.638904734740445, |
| "grad_norm": 0.28760443573042516, |
| "learning_rate": 2.5200845665961946e-05, |
| "loss": 0.3681, |
| "step": 1436 |
| }, |
| { |
| "epoch": 1.6400456360524815, |
| "grad_norm": 0.30154305625963906, |
| "learning_rate": 2.517970401691332e-05, |
| "loss": 0.3641, |
| "step": 1437 |
| }, |
| { |
| "epoch": 1.641186537364518, |
| "grad_norm": 0.3720716239663011, |
| "learning_rate": 2.5158562367864697e-05, |
| "loss": 0.3991, |
| "step": 1438 |
| }, |
| { |
| "epoch": 1.6423274386765545, |
| "grad_norm": 0.28530367783404104, |
| "learning_rate": 2.5137420718816068e-05, |
| "loss": 0.3808, |
| "step": 1439 |
| }, |
| { |
| "epoch": 1.643468339988591, |
| "grad_norm": 0.27013626954015546, |
| "learning_rate": 2.5116279069767445e-05, |
| "loss": 0.3688, |
| "step": 1440 |
| }, |
| { |
| "epoch": 1.6446092413006275, |
| "grad_norm": 0.31682127904243484, |
| "learning_rate": 2.5095137420718816e-05, |
| "loss": 0.3541, |
| "step": 1441 |
| }, |
| { |
| "epoch": 1.645750142612664, |
| "grad_norm": 0.2841931208482495, |
| "learning_rate": 2.507399577167019e-05, |
| "loss": 0.3602, |
| "step": 1442 |
| }, |
| { |
| "epoch": 1.6468910439247004, |
| "grad_norm": 0.276074554952914, |
| "learning_rate": 2.5052854122621568e-05, |
| "loss": 0.3632, |
| "step": 1443 |
| }, |
| { |
| "epoch": 1.648031945236737, |
| "grad_norm": 0.3045129394868517, |
| "learning_rate": 2.503171247357294e-05, |
| "loss": 0.3612, |
| "step": 1444 |
| }, |
| { |
| "epoch": 1.6491728465487734, |
| "grad_norm": 0.2711753572169419, |
| "learning_rate": 2.5010570824524316e-05, |
| "loss": 0.3695, |
| "step": 1445 |
| }, |
| { |
| "epoch": 1.65031374786081, |
| "grad_norm": 0.31184353074715726, |
| "learning_rate": 2.4989429175475687e-05, |
| "loss": 0.3577, |
| "step": 1446 |
| }, |
| { |
| "epoch": 1.6514546491728466, |
| "grad_norm": 0.2736196705378409, |
| "learning_rate": 2.496828752642706e-05, |
| "loss": 0.3538, |
| "step": 1447 |
| }, |
| { |
| "epoch": 1.652595550484883, |
| "grad_norm": 0.2829200724593569, |
| "learning_rate": 2.4947145877378435e-05, |
| "loss": 0.3703, |
| "step": 1448 |
| }, |
| { |
| "epoch": 1.6537364517969195, |
| "grad_norm": 0.296167668333878, |
| "learning_rate": 2.4926004228329812e-05, |
| "loss": 0.3682, |
| "step": 1449 |
| }, |
| { |
| "epoch": 1.6548773531089562, |
| "grad_norm": 0.2624154269631874, |
| "learning_rate": 2.4904862579281186e-05, |
| "loss": 0.3637, |
| "step": 1450 |
| }, |
| { |
| "epoch": 1.6560182544209927, |
| "grad_norm": 0.29443772973012555, |
| "learning_rate": 2.488372093023256e-05, |
| "loss": 0.3474, |
| "step": 1451 |
| }, |
| { |
| "epoch": 1.6571591557330292, |
| "grad_norm": 0.308700169528517, |
| "learning_rate": 2.4862579281183934e-05, |
| "loss": 0.3827, |
| "step": 1452 |
| }, |
| { |
| "epoch": 1.6583000570450657, |
| "grad_norm": 0.29493785751514123, |
| "learning_rate": 2.484143763213531e-05, |
| "loss": 0.3421, |
| "step": 1453 |
| }, |
| { |
| "epoch": 1.6594409583571021, |
| "grad_norm": 0.2713838260766285, |
| "learning_rate": 2.4820295983086683e-05, |
| "loss": 0.351, |
| "step": 1454 |
| }, |
| { |
| "epoch": 1.6605818596691386, |
| "grad_norm": 0.28390750706850665, |
| "learning_rate": 2.4799154334038057e-05, |
| "loss": 0.3493, |
| "step": 1455 |
| }, |
| { |
| "epoch": 1.661722760981175, |
| "grad_norm": 0.2990257269654475, |
| "learning_rate": 2.477801268498943e-05, |
| "loss": 0.3612, |
| "step": 1456 |
| }, |
| { |
| "epoch": 1.6628636622932116, |
| "grad_norm": 0.2665546960167287, |
| "learning_rate": 2.4756871035940805e-05, |
| "loss": 0.3576, |
| "step": 1457 |
| }, |
| { |
| "epoch": 1.664004563605248, |
| "grad_norm": 0.30439565309196187, |
| "learning_rate": 2.473572938689218e-05, |
| "loss": 0.3576, |
| "step": 1458 |
| }, |
| { |
| "epoch": 1.6651454649172845, |
| "grad_norm": 0.33814380783466985, |
| "learning_rate": 2.4714587737843553e-05, |
| "loss": 0.384, |
| "step": 1459 |
| }, |
| { |
| "epoch": 1.666286366229321, |
| "grad_norm": 0.30098003633111947, |
| "learning_rate": 2.4693446088794927e-05, |
| "loss": 0.3613, |
| "step": 1460 |
| }, |
| { |
| "epoch": 1.6674272675413577, |
| "grad_norm": 0.294974905406365, |
| "learning_rate": 2.46723044397463e-05, |
| "loss": 0.3707, |
| "step": 1461 |
| }, |
| { |
| "epoch": 1.6685681688533942, |
| "grad_norm": 0.3708465734189445, |
| "learning_rate": 2.4651162790697675e-05, |
| "loss": 0.3733, |
| "step": 1462 |
| }, |
| { |
| "epoch": 1.6697090701654307, |
| "grad_norm": 0.3843774740323064, |
| "learning_rate": 2.4630021141649053e-05, |
| "loss": 0.3677, |
| "step": 1463 |
| }, |
| { |
| "epoch": 1.6708499714774672, |
| "grad_norm": 0.2862491424432163, |
| "learning_rate": 2.4608879492600423e-05, |
| "loss": 0.3644, |
| "step": 1464 |
| }, |
| { |
| "epoch": 1.6719908727895039, |
| "grad_norm": 0.3820186373619291, |
| "learning_rate": 2.4587737843551797e-05, |
| "loss": 0.3683, |
| "step": 1465 |
| }, |
| { |
| "epoch": 1.6731317741015403, |
| "grad_norm": 0.3605477948643615, |
| "learning_rate": 2.456659619450317e-05, |
| "loss": 0.3643, |
| "step": 1466 |
| }, |
| { |
| "epoch": 1.6742726754135768, |
| "grad_norm": 0.3745644891120593, |
| "learning_rate": 2.4545454545454545e-05, |
| "loss": 0.3963, |
| "step": 1467 |
| }, |
| { |
| "epoch": 1.6754135767256133, |
| "grad_norm": 0.2843263524287354, |
| "learning_rate": 2.452431289640592e-05, |
| "loss": 0.3573, |
| "step": 1468 |
| }, |
| { |
| "epoch": 1.6765544780376498, |
| "grad_norm": 0.3095584589918876, |
| "learning_rate": 2.4503171247357297e-05, |
| "loss": 0.3672, |
| "step": 1469 |
| }, |
| { |
| "epoch": 1.6776953793496863, |
| "grad_norm": 0.33775878021951095, |
| "learning_rate": 2.448202959830867e-05, |
| "loss": 0.3644, |
| "step": 1470 |
| }, |
| { |
| "epoch": 1.6788362806617227, |
| "grad_norm": 0.2798731836487535, |
| "learning_rate": 2.4460887949260045e-05, |
| "loss": 0.3904, |
| "step": 1471 |
| }, |
| { |
| "epoch": 1.6799771819737592, |
| "grad_norm": 0.3176793770462291, |
| "learning_rate": 2.4439746300211416e-05, |
| "loss": 0.3503, |
| "step": 1472 |
| }, |
| { |
| "epoch": 1.6811180832857957, |
| "grad_norm": 0.2817311269242639, |
| "learning_rate": 2.441860465116279e-05, |
| "loss": 0.3507, |
| "step": 1473 |
| }, |
| { |
| "epoch": 1.6822589845978322, |
| "grad_norm": 0.332496204883691, |
| "learning_rate": 2.4397463002114164e-05, |
| "loss": 0.3701, |
| "step": 1474 |
| }, |
| { |
| "epoch": 1.6833998859098687, |
| "grad_norm": 0.31023078517004654, |
| "learning_rate": 2.437632135306554e-05, |
| "loss": 0.3793, |
| "step": 1475 |
| }, |
| { |
| "epoch": 1.6845407872219051, |
| "grad_norm": 0.3074921350102049, |
| "learning_rate": 2.4355179704016916e-05, |
| "loss": 0.3587, |
| "step": 1476 |
| }, |
| { |
| "epoch": 1.6856816885339418, |
| "grad_norm": 0.31490984904000224, |
| "learning_rate": 2.433403805496829e-05, |
| "loss": 0.366, |
| "step": 1477 |
| }, |
| { |
| "epoch": 1.6868225898459783, |
| "grad_norm": 0.3316060402389124, |
| "learning_rate": 2.4312896405919664e-05, |
| "loss": 0.3741, |
| "step": 1478 |
| }, |
| { |
| "epoch": 1.6879634911580148, |
| "grad_norm": 0.28937031810952313, |
| "learning_rate": 2.4291754756871038e-05, |
| "loss": 0.3583, |
| "step": 1479 |
| }, |
| { |
| "epoch": 1.6891043924700513, |
| "grad_norm": 0.29230363513203816, |
| "learning_rate": 2.4270613107822412e-05, |
| "loss": 0.3444, |
| "step": 1480 |
| }, |
| { |
| "epoch": 1.690245293782088, |
| "grad_norm": 0.3506573605117981, |
| "learning_rate": 2.4249471458773786e-05, |
| "loss": 0.3626, |
| "step": 1481 |
| }, |
| { |
| "epoch": 1.6913861950941245, |
| "grad_norm": 0.26141179784062163, |
| "learning_rate": 2.422832980972516e-05, |
| "loss": 0.3528, |
| "step": 1482 |
| }, |
| { |
| "epoch": 1.692527096406161, |
| "grad_norm": 0.3330228413197761, |
| "learning_rate": 2.4207188160676534e-05, |
| "loss": 0.379, |
| "step": 1483 |
| }, |
| { |
| "epoch": 1.6936679977181974, |
| "grad_norm": 0.29655254496962724, |
| "learning_rate": 2.4186046511627908e-05, |
| "loss": 0.3869, |
| "step": 1484 |
| }, |
| { |
| "epoch": 1.694808899030234, |
| "grad_norm": 0.2732954777447921, |
| "learning_rate": 2.4164904862579282e-05, |
| "loss": 0.3745, |
| "step": 1485 |
| }, |
| { |
| "epoch": 1.6959498003422704, |
| "grad_norm": 0.325475009094081, |
| "learning_rate": 2.4143763213530656e-05, |
| "loss": 0.3748, |
| "step": 1486 |
| }, |
| { |
| "epoch": 1.6970907016543069, |
| "grad_norm": 0.2993471282332693, |
| "learning_rate": 2.412262156448203e-05, |
| "loss": 0.3668, |
| "step": 1487 |
| }, |
| { |
| "epoch": 1.6982316029663433, |
| "grad_norm": 0.31603236693685227, |
| "learning_rate": 2.4101479915433404e-05, |
| "loss": 0.3727, |
| "step": 1488 |
| }, |
| { |
| "epoch": 1.6993725042783798, |
| "grad_norm": 0.2817814174465588, |
| "learning_rate": 2.4080338266384782e-05, |
| "loss": 0.3554, |
| "step": 1489 |
| }, |
| { |
| "epoch": 1.7005134055904163, |
| "grad_norm": 0.32120059999814593, |
| "learning_rate": 2.4059196617336153e-05, |
| "loss": 0.3659, |
| "step": 1490 |
| }, |
| { |
| "epoch": 1.7016543069024528, |
| "grad_norm": 0.28924927945350043, |
| "learning_rate": 2.4038054968287527e-05, |
| "loss": 0.3731, |
| "step": 1491 |
| }, |
| { |
| "epoch": 1.7027952082144895, |
| "grad_norm": 0.284643173875121, |
| "learning_rate": 2.40169133192389e-05, |
| "loss": 0.3884, |
| "step": 1492 |
| }, |
| { |
| "epoch": 1.703936109526526, |
| "grad_norm": 0.2665022764695233, |
| "learning_rate": 2.3995771670190275e-05, |
| "loss": 0.3547, |
| "step": 1493 |
| }, |
| { |
| "epoch": 1.7050770108385624, |
| "grad_norm": 0.33643098630695556, |
| "learning_rate": 2.397463002114165e-05, |
| "loss": 0.3753, |
| "step": 1494 |
| }, |
| { |
| "epoch": 1.706217912150599, |
| "grad_norm": 0.25812980136170705, |
| "learning_rate": 2.3953488372093026e-05, |
| "loss": 0.3671, |
| "step": 1495 |
| }, |
| { |
| "epoch": 1.7073588134626356, |
| "grad_norm": 0.2876613549689138, |
| "learning_rate": 2.39323467230444e-05, |
| "loss": 0.3714, |
| "step": 1496 |
| }, |
| { |
| "epoch": 1.708499714774672, |
| "grad_norm": 0.31197661099028084, |
| "learning_rate": 2.3911205073995774e-05, |
| "loss": 0.3718, |
| "step": 1497 |
| }, |
| { |
| "epoch": 1.7096406160867086, |
| "grad_norm": 0.29188522368743014, |
| "learning_rate": 2.389006342494715e-05, |
| "loss": 0.3757, |
| "step": 1498 |
| }, |
| { |
| "epoch": 1.710781517398745, |
| "grad_norm": 0.2592921193768068, |
| "learning_rate": 2.386892177589852e-05, |
| "loss": 0.361, |
| "step": 1499 |
| }, |
| { |
| "epoch": 1.7119224187107815, |
| "grad_norm": 0.28717400651849345, |
| "learning_rate": 2.3847780126849893e-05, |
| "loss": 0.3923, |
| "step": 1500 |
| }, |
| { |
| "epoch": 1.713063320022818, |
| "grad_norm": 0.2837339672529112, |
| "learning_rate": 2.382663847780127e-05, |
| "loss": 0.3788, |
| "step": 1501 |
| }, |
| { |
| "epoch": 1.7142042213348545, |
| "grad_norm": 0.25399925608301915, |
| "learning_rate": 2.3805496828752645e-05, |
| "loss": 0.3634, |
| "step": 1502 |
| }, |
| { |
| "epoch": 1.715345122646891, |
| "grad_norm": 0.2505460201463145, |
| "learning_rate": 2.378435517970402e-05, |
| "loss": 0.3614, |
| "step": 1503 |
| }, |
| { |
| "epoch": 1.7164860239589275, |
| "grad_norm": 0.25947337418651406, |
| "learning_rate": 2.3763213530655393e-05, |
| "loss": 0.3512, |
| "step": 1504 |
| }, |
| { |
| "epoch": 1.717626925270964, |
| "grad_norm": 0.30519573039102393, |
| "learning_rate": 2.3742071881606767e-05, |
| "loss": 0.3713, |
| "step": 1505 |
| }, |
| { |
| "epoch": 1.7187678265830004, |
| "grad_norm": 0.23254546868546908, |
| "learning_rate": 2.372093023255814e-05, |
| "loss": 0.3413, |
| "step": 1506 |
| }, |
| { |
| "epoch": 1.7199087278950371, |
| "grad_norm": 0.36817083989520405, |
| "learning_rate": 2.3699788583509515e-05, |
| "loss": 0.3749, |
| "step": 1507 |
| }, |
| { |
| "epoch": 1.7210496292070736, |
| "grad_norm": 0.3186865756330951, |
| "learning_rate": 2.367864693446089e-05, |
| "loss": 0.3495, |
| "step": 1508 |
| }, |
| { |
| "epoch": 1.72219053051911, |
| "grad_norm": 0.2808903990515687, |
| "learning_rate": 2.3657505285412263e-05, |
| "loss": 0.3558, |
| "step": 1509 |
| }, |
| { |
| "epoch": 1.7233314318311466, |
| "grad_norm": 0.2972003145721708, |
| "learning_rate": 2.3636363636363637e-05, |
| "loss": 0.3684, |
| "step": 1510 |
| }, |
| { |
| "epoch": 1.7244723331431833, |
| "grad_norm": 0.32992144660872674, |
| "learning_rate": 2.361522198731501e-05, |
| "loss": 0.3901, |
| "step": 1511 |
| }, |
| { |
| "epoch": 1.7256132344552197, |
| "grad_norm": 0.304645651405577, |
| "learning_rate": 2.3594080338266386e-05, |
| "loss": 0.3643, |
| "step": 1512 |
| }, |
| { |
| "epoch": 1.7267541357672562, |
| "grad_norm": 0.40363172426881777, |
| "learning_rate": 2.357293868921776e-05, |
| "loss": 0.3692, |
| "step": 1513 |
| }, |
| { |
| "epoch": 1.7278950370792927, |
| "grad_norm": 0.2735019850601522, |
| "learning_rate": 2.3551797040169134e-05, |
| "loss": 0.3787, |
| "step": 1514 |
| }, |
| { |
| "epoch": 1.7290359383913292, |
| "grad_norm": 0.3442056801706256, |
| "learning_rate": 2.353065539112051e-05, |
| "loss": 0.3945, |
| "step": 1515 |
| }, |
| { |
| "epoch": 1.7301768397033657, |
| "grad_norm": 0.3955918249097106, |
| "learning_rate": 2.3509513742071882e-05, |
| "loss": 0.3833, |
| "step": 1516 |
| }, |
| { |
| "epoch": 1.7313177410154021, |
| "grad_norm": 0.31123018585787954, |
| "learning_rate": 2.3488372093023256e-05, |
| "loss": 0.3535, |
| "step": 1517 |
| }, |
| { |
| "epoch": 1.7324586423274386, |
| "grad_norm": 0.33153086678958815, |
| "learning_rate": 2.346723044397463e-05, |
| "loss": 0.3763, |
| "step": 1518 |
| }, |
| { |
| "epoch": 1.733599543639475, |
| "grad_norm": 0.3480404455336306, |
| "learning_rate": 2.3446088794926004e-05, |
| "loss": 0.3673, |
| "step": 1519 |
| }, |
| { |
| "epoch": 1.7347404449515116, |
| "grad_norm": 0.3149346024288447, |
| "learning_rate": 2.3424947145877378e-05, |
| "loss": 0.3595, |
| "step": 1520 |
| }, |
| { |
| "epoch": 1.735881346263548, |
| "grad_norm": 0.30056431949640394, |
| "learning_rate": 2.3403805496828756e-05, |
| "loss": 0.3656, |
| "step": 1521 |
| }, |
| { |
| "epoch": 1.7370222475755848, |
| "grad_norm": 0.3041892875716995, |
| "learning_rate": 2.338266384778013e-05, |
| "loss": 0.3599, |
| "step": 1522 |
| }, |
| { |
| "epoch": 1.7381631488876212, |
| "grad_norm": 0.33025855308799845, |
| "learning_rate": 2.3361522198731504e-05, |
| "loss": 0.3674, |
| "step": 1523 |
| }, |
| { |
| "epoch": 1.7393040501996577, |
| "grad_norm": 0.33179918502041716, |
| "learning_rate": 2.3340380549682878e-05, |
| "loss": 0.3786, |
| "step": 1524 |
| }, |
| { |
| "epoch": 1.7404449515116942, |
| "grad_norm": 0.2757882558131415, |
| "learning_rate": 2.331923890063425e-05, |
| "loss": 0.3621, |
| "step": 1525 |
| }, |
| { |
| "epoch": 1.741585852823731, |
| "grad_norm": 0.3240423708410654, |
| "learning_rate": 2.3298097251585623e-05, |
| "loss": 0.3502, |
| "step": 1526 |
| }, |
| { |
| "epoch": 1.7427267541357674, |
| "grad_norm": 0.3355411788126555, |
| "learning_rate": 2.3276955602537e-05, |
| "loss": 0.3823, |
| "step": 1527 |
| }, |
| { |
| "epoch": 1.7438676554478039, |
| "grad_norm": 0.29410966517590625, |
| "learning_rate": 2.3255813953488374e-05, |
| "loss": 0.3612, |
| "step": 1528 |
| }, |
| { |
| "epoch": 1.7450085567598403, |
| "grad_norm": 0.3129468551760911, |
| "learning_rate": 2.3234672304439748e-05, |
| "loss": 0.3811, |
| "step": 1529 |
| }, |
| { |
| "epoch": 1.7461494580718768, |
| "grad_norm": 0.37196565677390886, |
| "learning_rate": 2.3213530655391122e-05, |
| "loss": 0.3649, |
| "step": 1530 |
| }, |
| { |
| "epoch": 1.7472903593839133, |
| "grad_norm": 0.27482339581092213, |
| "learning_rate": 2.3192389006342496e-05, |
| "loss": 0.3702, |
| "step": 1531 |
| }, |
| { |
| "epoch": 1.7484312606959498, |
| "grad_norm": 0.32354120499237055, |
| "learning_rate": 2.317124735729387e-05, |
| "loss": 0.3696, |
| "step": 1532 |
| }, |
| { |
| "epoch": 1.7495721620079863, |
| "grad_norm": 0.344333434322942, |
| "learning_rate": 2.3150105708245244e-05, |
| "loss": 0.3775, |
| "step": 1533 |
| }, |
| { |
| "epoch": 1.7507130633200227, |
| "grad_norm": 0.2501858892242347, |
| "learning_rate": 2.312896405919662e-05, |
| "loss": 0.3673, |
| "step": 1534 |
| }, |
| { |
| "epoch": 1.7518539646320592, |
| "grad_norm": 0.2619318350318475, |
| "learning_rate": 2.3107822410147993e-05, |
| "loss": 0.371, |
| "step": 1535 |
| }, |
| { |
| "epoch": 1.7529948659440957, |
| "grad_norm": 0.28267590669718795, |
| "learning_rate": 2.3086680761099367e-05, |
| "loss": 0.346, |
| "step": 1536 |
| }, |
| { |
| "epoch": 1.7541357672561322, |
| "grad_norm": 0.2822911644558842, |
| "learning_rate": 2.306553911205074e-05, |
| "loss": 0.3611, |
| "step": 1537 |
| }, |
| { |
| "epoch": 1.7552766685681689, |
| "grad_norm": 0.24913100106359873, |
| "learning_rate": 2.3044397463002115e-05, |
| "loss": 0.3587, |
| "step": 1538 |
| }, |
| { |
| "epoch": 1.7564175698802054, |
| "grad_norm": 0.2782277920002323, |
| "learning_rate": 2.302325581395349e-05, |
| "loss": 0.3589, |
| "step": 1539 |
| }, |
| { |
| "epoch": 1.7575584711922418, |
| "grad_norm": 0.29310960832535715, |
| "learning_rate": 2.3002114164904863e-05, |
| "loss": 0.3534, |
| "step": 1540 |
| }, |
| { |
| "epoch": 1.7586993725042785, |
| "grad_norm": 0.26490888346701547, |
| "learning_rate": 2.298097251585624e-05, |
| "loss": 0.3625, |
| "step": 1541 |
| }, |
| { |
| "epoch": 1.759840273816315, |
| "grad_norm": 0.3124532199065016, |
| "learning_rate": 2.2959830866807614e-05, |
| "loss": 0.3533, |
| "step": 1542 |
| }, |
| { |
| "epoch": 1.7609811751283515, |
| "grad_norm": 0.28873529134829373, |
| "learning_rate": 2.2938689217758985e-05, |
| "loss": 0.3846, |
| "step": 1543 |
| }, |
| { |
| "epoch": 1.762122076440388, |
| "grad_norm": 0.2907772323127595, |
| "learning_rate": 2.291754756871036e-05, |
| "loss": 0.3279, |
| "step": 1544 |
| }, |
| { |
| "epoch": 1.7632629777524245, |
| "grad_norm": 0.29886052812500113, |
| "learning_rate": 2.2896405919661733e-05, |
| "loss": 0.3585, |
| "step": 1545 |
| }, |
| { |
| "epoch": 1.764403879064461, |
| "grad_norm": 0.29662624280244543, |
| "learning_rate": 2.2875264270613107e-05, |
| "loss": 0.3646, |
| "step": 1546 |
| }, |
| { |
| "epoch": 1.7655447803764974, |
| "grad_norm": 0.3141679404141358, |
| "learning_rate": 2.2854122621564485e-05, |
| "loss": 0.374, |
| "step": 1547 |
| }, |
| { |
| "epoch": 1.766685681688534, |
| "grad_norm": 0.2951349996054108, |
| "learning_rate": 2.283298097251586e-05, |
| "loss": 0.3668, |
| "step": 1548 |
| }, |
| { |
| "epoch": 1.7678265830005704, |
| "grad_norm": 0.29342310695343443, |
| "learning_rate": 2.2811839323467233e-05, |
| "loss": 0.3495, |
| "step": 1549 |
| }, |
| { |
| "epoch": 1.7689674843126069, |
| "grad_norm": 0.25105234676118393, |
| "learning_rate": 2.2790697674418607e-05, |
| "loss": 0.3439, |
| "step": 1550 |
| }, |
| { |
| "epoch": 1.7701083856246433, |
| "grad_norm": 0.28202846143657684, |
| "learning_rate": 2.276955602536998e-05, |
| "loss": 0.3622, |
| "step": 1551 |
| }, |
| { |
| "epoch": 1.7712492869366798, |
| "grad_norm": 0.2577324776105485, |
| "learning_rate": 2.2748414376321352e-05, |
| "loss": 0.3674, |
| "step": 1552 |
| }, |
| { |
| "epoch": 1.7723901882487165, |
| "grad_norm": 0.26427649352790966, |
| "learning_rate": 2.272727272727273e-05, |
| "loss": 0.3915, |
| "step": 1553 |
| }, |
| { |
| "epoch": 1.773531089560753, |
| "grad_norm": 0.2975077391703283, |
| "learning_rate": 2.2706131078224103e-05, |
| "loss": 0.3719, |
| "step": 1554 |
| }, |
| { |
| "epoch": 1.7746719908727895, |
| "grad_norm": 0.30371902180036175, |
| "learning_rate": 2.2684989429175477e-05, |
| "loss": 0.3625, |
| "step": 1555 |
| }, |
| { |
| "epoch": 1.775812892184826, |
| "grad_norm": 0.2978704874115724, |
| "learning_rate": 2.266384778012685e-05, |
| "loss": 0.3492, |
| "step": 1556 |
| }, |
| { |
| "epoch": 1.7769537934968627, |
| "grad_norm": 0.3424710072192493, |
| "learning_rate": 2.2642706131078226e-05, |
| "loss": 0.376, |
| "step": 1557 |
| }, |
| { |
| "epoch": 1.7780946948088991, |
| "grad_norm": 0.4304707541450273, |
| "learning_rate": 2.26215644820296e-05, |
| "loss": 0.3716, |
| "step": 1558 |
| }, |
| { |
| "epoch": 1.7792355961209356, |
| "grad_norm": 0.2888383768672417, |
| "learning_rate": 2.2600422832980974e-05, |
| "loss": 0.3657, |
| "step": 1559 |
| }, |
| { |
| "epoch": 1.780376497432972, |
| "grad_norm": 0.3424825882345746, |
| "learning_rate": 2.2579281183932348e-05, |
| "loss": 0.3886, |
| "step": 1560 |
| }, |
| { |
| "epoch": 1.7815173987450086, |
| "grad_norm": 0.3321929869237414, |
| "learning_rate": 2.2558139534883722e-05, |
| "loss": 0.3675, |
| "step": 1561 |
| }, |
| { |
| "epoch": 1.782658300057045, |
| "grad_norm": 0.3593851391701366, |
| "learning_rate": 2.2536997885835096e-05, |
| "loss": 0.3589, |
| "step": 1562 |
| }, |
| { |
| "epoch": 1.7837992013690815, |
| "grad_norm": 0.3095036515987366, |
| "learning_rate": 2.251585623678647e-05, |
| "loss": 0.3628, |
| "step": 1563 |
| }, |
| { |
| "epoch": 1.784940102681118, |
| "grad_norm": 0.31974279496437447, |
| "learning_rate": 2.2494714587737844e-05, |
| "loss": 0.3642, |
| "step": 1564 |
| }, |
| { |
| "epoch": 1.7860810039931545, |
| "grad_norm": 0.356612501455481, |
| "learning_rate": 2.2473572938689218e-05, |
| "loss": 0.3601, |
| "step": 1565 |
| }, |
| { |
| "epoch": 1.787221905305191, |
| "grad_norm": 0.3569612897898255, |
| "learning_rate": 2.2452431289640592e-05, |
| "loss": 0.3815, |
| "step": 1566 |
| }, |
| { |
| "epoch": 1.7883628066172275, |
| "grad_norm": 0.30994755489776965, |
| "learning_rate": 2.243128964059197e-05, |
| "loss": 0.3647, |
| "step": 1567 |
| }, |
| { |
| "epoch": 1.7895037079292642, |
| "grad_norm": 0.3737275403511739, |
| "learning_rate": 2.2410147991543344e-05, |
| "loss": 0.3712, |
| "step": 1568 |
| }, |
| { |
| "epoch": 1.7906446092413006, |
| "grad_norm": 0.407808772953075, |
| "learning_rate": 2.2389006342494714e-05, |
| "loss": 0.3809, |
| "step": 1569 |
| }, |
| { |
| "epoch": 1.7917855105533371, |
| "grad_norm": 0.3169286511807649, |
| "learning_rate": 2.236786469344609e-05, |
| "loss": 0.3699, |
| "step": 1570 |
| }, |
| { |
| "epoch": 1.7929264118653736, |
| "grad_norm": 0.3650292862468508, |
| "learning_rate": 2.2346723044397463e-05, |
| "loss": 0.3661, |
| "step": 1571 |
| }, |
| { |
| "epoch": 1.7940673131774103, |
| "grad_norm": 0.3566495908380766, |
| "learning_rate": 2.2325581395348837e-05, |
| "loss": 0.3502, |
| "step": 1572 |
| }, |
| { |
| "epoch": 1.7952082144894468, |
| "grad_norm": 0.2875990542081684, |
| "learning_rate": 2.2304439746300214e-05, |
| "loss": 0.3495, |
| "step": 1573 |
| }, |
| { |
| "epoch": 1.7963491158014833, |
| "grad_norm": 0.32014895991533876, |
| "learning_rate": 2.2283298097251588e-05, |
| "loss": 0.3836, |
| "step": 1574 |
| }, |
| { |
| "epoch": 1.7974900171135197, |
| "grad_norm": 0.4105886211909479, |
| "learning_rate": 2.2262156448202962e-05, |
| "loss": 0.3775, |
| "step": 1575 |
| }, |
| { |
| "epoch": 1.7986309184255562, |
| "grad_norm": 0.3737630383761046, |
| "learning_rate": 2.2241014799154336e-05, |
| "loss": 0.3892, |
| "step": 1576 |
| }, |
| { |
| "epoch": 1.7997718197375927, |
| "grad_norm": 0.3247924558161578, |
| "learning_rate": 2.221987315010571e-05, |
| "loss": 0.3536, |
| "step": 1577 |
| }, |
| { |
| "epoch": 1.8009127210496292, |
| "grad_norm": 0.3077737827513184, |
| "learning_rate": 2.219873150105708e-05, |
| "loss": 0.3513, |
| "step": 1578 |
| }, |
| { |
| "epoch": 1.8020536223616657, |
| "grad_norm": 0.30397617493723933, |
| "learning_rate": 2.2177589852008455e-05, |
| "loss": 0.367, |
| "step": 1579 |
| }, |
| { |
| "epoch": 1.8031945236737021, |
| "grad_norm": 0.2899719233093763, |
| "learning_rate": 2.2156448202959833e-05, |
| "loss": 0.371, |
| "step": 1580 |
| }, |
| { |
| "epoch": 1.8043354249857386, |
| "grad_norm": 0.250887784452979, |
| "learning_rate": 2.2135306553911207e-05, |
| "loss": 0.3731, |
| "step": 1581 |
| }, |
| { |
| "epoch": 1.805476326297775, |
| "grad_norm": 0.2988653720689675, |
| "learning_rate": 2.211416490486258e-05, |
| "loss": 0.355, |
| "step": 1582 |
| }, |
| { |
| "epoch": 1.8066172276098118, |
| "grad_norm": 0.27970342221002986, |
| "learning_rate": 2.2093023255813955e-05, |
| "loss": 0.379, |
| "step": 1583 |
| }, |
| { |
| "epoch": 1.8077581289218483, |
| "grad_norm": 0.2685564185888628, |
| "learning_rate": 2.207188160676533e-05, |
| "loss": 0.3785, |
| "step": 1584 |
| }, |
| { |
| "epoch": 1.8088990302338848, |
| "grad_norm": 0.3217391026492225, |
| "learning_rate": 2.2050739957716703e-05, |
| "loss": 0.387, |
| "step": 1585 |
| }, |
| { |
| "epoch": 1.8100399315459212, |
| "grad_norm": 0.2922819981265088, |
| "learning_rate": 2.2029598308668077e-05, |
| "loss": 0.3792, |
| "step": 1586 |
| }, |
| { |
| "epoch": 1.811180832857958, |
| "grad_norm": 0.27230409663403116, |
| "learning_rate": 2.200845665961945e-05, |
| "loss": 0.3661, |
| "step": 1587 |
| }, |
| { |
| "epoch": 1.8123217341699944, |
| "grad_norm": 0.32291579528376907, |
| "learning_rate": 2.1987315010570825e-05, |
| "loss": 0.3796, |
| "step": 1588 |
| }, |
| { |
| "epoch": 1.813462635482031, |
| "grad_norm": 0.25901863105280115, |
| "learning_rate": 2.19661733615222e-05, |
| "loss": 0.3605, |
| "step": 1589 |
| }, |
| { |
| "epoch": 1.8146035367940674, |
| "grad_norm": 0.31352888990371003, |
| "learning_rate": 2.1945031712473573e-05, |
| "loss": 0.3766, |
| "step": 1590 |
| }, |
| { |
| "epoch": 1.8157444381061039, |
| "grad_norm": 0.28862685464242693, |
| "learning_rate": 2.1923890063424947e-05, |
| "loss": 0.3574, |
| "step": 1591 |
| }, |
| { |
| "epoch": 1.8168853394181403, |
| "grad_norm": 0.2753939853521965, |
| "learning_rate": 2.190274841437632e-05, |
| "loss": 0.3627, |
| "step": 1592 |
| }, |
| { |
| "epoch": 1.8180262407301768, |
| "grad_norm": 0.27928625456853756, |
| "learning_rate": 2.18816067653277e-05, |
| "loss": 0.3828, |
| "step": 1593 |
| }, |
| { |
| "epoch": 1.8191671420422133, |
| "grad_norm": 0.25294551894757467, |
| "learning_rate": 2.1860465116279073e-05, |
| "loss": 0.3374, |
| "step": 1594 |
| }, |
| { |
| "epoch": 1.8203080433542498, |
| "grad_norm": 0.2616026047197691, |
| "learning_rate": 2.1839323467230444e-05, |
| "loss": 0.3416, |
| "step": 1595 |
| }, |
| { |
| "epoch": 1.8214489446662863, |
| "grad_norm": 0.2409961331311988, |
| "learning_rate": 2.1818181818181818e-05, |
| "loss": 0.3494, |
| "step": 1596 |
| }, |
| { |
| "epoch": 1.8225898459783227, |
| "grad_norm": 0.26493946117653555, |
| "learning_rate": 2.1797040169133192e-05, |
| "loss": 0.3644, |
| "step": 1597 |
| }, |
| { |
| "epoch": 1.8237307472903592, |
| "grad_norm": 0.28798554747908567, |
| "learning_rate": 2.1775898520084566e-05, |
| "loss": 0.3624, |
| "step": 1598 |
| }, |
| { |
| "epoch": 1.824871648602396, |
| "grad_norm": 0.26437027686258674, |
| "learning_rate": 2.1754756871035943e-05, |
| "loss": 0.3572, |
| "step": 1599 |
| }, |
| { |
| "epoch": 1.8260125499144324, |
| "grad_norm": 0.24691020921569914, |
| "learning_rate": 2.1733615221987317e-05, |
| "loss": 0.3862, |
| "step": 1600 |
| }, |
| { |
| "epoch": 1.8271534512264689, |
| "grad_norm": 0.2649171577055507, |
| "learning_rate": 2.171247357293869e-05, |
| "loss": 0.3527, |
| "step": 1601 |
| }, |
| { |
| "epoch": 1.8282943525385056, |
| "grad_norm": 0.28647906553531793, |
| "learning_rate": 2.1691331923890066e-05, |
| "loss": 0.379, |
| "step": 1602 |
| }, |
| { |
| "epoch": 1.829435253850542, |
| "grad_norm": 0.27888222041894295, |
| "learning_rate": 2.167019027484144e-05, |
| "loss": 0.3841, |
| "step": 1603 |
| }, |
| { |
| "epoch": 1.8305761551625785, |
| "grad_norm": 0.25803288173345323, |
| "learning_rate": 2.164904862579281e-05, |
| "loss": 0.3496, |
| "step": 1604 |
| }, |
| { |
| "epoch": 1.831717056474615, |
| "grad_norm": 0.3152294889154458, |
| "learning_rate": 2.1627906976744184e-05, |
| "loss": 0.3796, |
| "step": 1605 |
| }, |
| { |
| "epoch": 1.8328579577866515, |
| "grad_norm": 0.2837028459205526, |
| "learning_rate": 2.1606765327695562e-05, |
| "loss": 0.3718, |
| "step": 1606 |
| }, |
| { |
| "epoch": 1.833998859098688, |
| "grad_norm": 0.2942487206676281, |
| "learning_rate": 2.1585623678646936e-05, |
| "loss": 0.3672, |
| "step": 1607 |
| }, |
| { |
| "epoch": 1.8351397604107245, |
| "grad_norm": 0.27094002473416, |
| "learning_rate": 2.156448202959831e-05, |
| "loss": 0.3825, |
| "step": 1608 |
| }, |
| { |
| "epoch": 1.836280661722761, |
| "grad_norm": 0.3080124085299322, |
| "learning_rate": 2.1543340380549684e-05, |
| "loss": 0.3768, |
| "step": 1609 |
| }, |
| { |
| "epoch": 1.8374215630347974, |
| "grad_norm": 0.29458146796584034, |
| "learning_rate": 2.1522198731501058e-05, |
| "loss": 0.3826, |
| "step": 1610 |
| }, |
| { |
| "epoch": 1.838562464346834, |
| "grad_norm": 0.2709545289583168, |
| "learning_rate": 2.1501057082452432e-05, |
| "loss": 0.3492, |
| "step": 1611 |
| }, |
| { |
| "epoch": 1.8397033656588704, |
| "grad_norm": 0.2626488042697663, |
| "learning_rate": 2.1479915433403806e-05, |
| "loss": 0.3576, |
| "step": 1612 |
| }, |
| { |
| "epoch": 1.8408442669709069, |
| "grad_norm": 0.29505118436293093, |
| "learning_rate": 2.145877378435518e-05, |
| "loss": 0.3679, |
| "step": 1613 |
| }, |
| { |
| "epoch": 1.8419851682829436, |
| "grad_norm": 0.2811956966393874, |
| "learning_rate": 2.1437632135306555e-05, |
| "loss": 0.3706, |
| "step": 1614 |
| }, |
| { |
| "epoch": 1.84312606959498, |
| "grad_norm": 0.2857476313553613, |
| "learning_rate": 2.141649048625793e-05, |
| "loss": 0.363, |
| "step": 1615 |
| }, |
| { |
| "epoch": 1.8442669709070165, |
| "grad_norm": 0.26379976558838814, |
| "learning_rate": 2.1395348837209303e-05, |
| "loss": 0.3656, |
| "step": 1616 |
| }, |
| { |
| "epoch": 1.845407872219053, |
| "grad_norm": 0.30310468777418886, |
| "learning_rate": 2.1374207188160677e-05, |
| "loss": 0.3766, |
| "step": 1617 |
| }, |
| { |
| "epoch": 1.8465487735310897, |
| "grad_norm": 0.29055054780933476, |
| "learning_rate": 2.135306553911205e-05, |
| "loss": 0.3768, |
| "step": 1618 |
| }, |
| { |
| "epoch": 1.8476896748431262, |
| "grad_norm": 0.26275911199095003, |
| "learning_rate": 2.1331923890063428e-05, |
| "loss": 0.3655, |
| "step": 1619 |
| }, |
| { |
| "epoch": 1.8488305761551627, |
| "grad_norm": 0.3075089578022422, |
| "learning_rate": 2.1310782241014802e-05, |
| "loss": 0.3671, |
| "step": 1620 |
| }, |
| { |
| "epoch": 1.8499714774671991, |
| "grad_norm": 0.24011215531163518, |
| "learning_rate": 2.1289640591966176e-05, |
| "loss": 0.3392, |
| "step": 1621 |
| }, |
| { |
| "epoch": 1.8511123787792356, |
| "grad_norm": 0.3003670940065247, |
| "learning_rate": 2.1268498942917547e-05, |
| "loss": 0.3745, |
| "step": 1622 |
| }, |
| { |
| "epoch": 1.852253280091272, |
| "grad_norm": 0.32867835263049694, |
| "learning_rate": 2.124735729386892e-05, |
| "loss": 0.374, |
| "step": 1623 |
| }, |
| { |
| "epoch": 1.8533941814033086, |
| "grad_norm": 0.2736149864201345, |
| "learning_rate": 2.1226215644820295e-05, |
| "loss": 0.3866, |
| "step": 1624 |
| }, |
| { |
| "epoch": 1.854535082715345, |
| "grad_norm": 0.32004051514876714, |
| "learning_rate": 2.120507399577167e-05, |
| "loss": 0.3499, |
| "step": 1625 |
| }, |
| { |
| "epoch": 1.8556759840273815, |
| "grad_norm": 0.3104696099218829, |
| "learning_rate": 2.1183932346723047e-05, |
| "loss": 0.3821, |
| "step": 1626 |
| }, |
| { |
| "epoch": 1.856816885339418, |
| "grad_norm": 0.3296864222385294, |
| "learning_rate": 2.116279069767442e-05, |
| "loss": 0.3764, |
| "step": 1627 |
| }, |
| { |
| "epoch": 1.8579577866514545, |
| "grad_norm": 0.35311586083947655, |
| "learning_rate": 2.1141649048625795e-05, |
| "loss": 0.3784, |
| "step": 1628 |
| }, |
| { |
| "epoch": 1.8590986879634912, |
| "grad_norm": 0.2645700372496905, |
| "learning_rate": 2.112050739957717e-05, |
| "loss": 0.3465, |
| "step": 1629 |
| }, |
| { |
| "epoch": 1.8602395892755277, |
| "grad_norm": 0.34980987571760414, |
| "learning_rate": 2.1099365750528543e-05, |
| "loss": 0.3638, |
| "step": 1630 |
| }, |
| { |
| "epoch": 1.8613804905875642, |
| "grad_norm": 0.279728784438425, |
| "learning_rate": 2.1078224101479914e-05, |
| "loss": 0.3711, |
| "step": 1631 |
| }, |
| { |
| "epoch": 1.8625213918996006, |
| "grad_norm": 0.2753861016019802, |
| "learning_rate": 2.105708245243129e-05, |
| "loss": 0.3683, |
| "step": 1632 |
| }, |
| { |
| "epoch": 1.8636622932116373, |
| "grad_norm": 0.2979160433487785, |
| "learning_rate": 2.1035940803382665e-05, |
| "loss": 0.3649, |
| "step": 1633 |
| }, |
| { |
| "epoch": 1.8648031945236738, |
| "grad_norm": 0.28213788188185157, |
| "learning_rate": 2.101479915433404e-05, |
| "loss": 0.3551, |
| "step": 1634 |
| }, |
| { |
| "epoch": 1.8659440958357103, |
| "grad_norm": 0.29960423038143713, |
| "learning_rate": 2.0993657505285413e-05, |
| "loss": 0.3468, |
| "step": 1635 |
| }, |
| { |
| "epoch": 1.8670849971477468, |
| "grad_norm": 0.27966434696237047, |
| "learning_rate": 2.0972515856236788e-05, |
| "loss": 0.3631, |
| "step": 1636 |
| }, |
| { |
| "epoch": 1.8682258984597833, |
| "grad_norm": 0.35759260629909057, |
| "learning_rate": 2.095137420718816e-05, |
| "loss": 0.3776, |
| "step": 1637 |
| }, |
| { |
| "epoch": 1.8693667997718197, |
| "grad_norm": 0.24413963286231924, |
| "learning_rate": 2.0930232558139536e-05, |
| "loss": 0.357, |
| "step": 1638 |
| }, |
| { |
| "epoch": 1.8705077010838562, |
| "grad_norm": 0.2942994597300611, |
| "learning_rate": 2.090909090909091e-05, |
| "loss": 0.3606, |
| "step": 1639 |
| }, |
| { |
| "epoch": 1.8716486023958927, |
| "grad_norm": 0.36190249258733775, |
| "learning_rate": 2.0887949260042284e-05, |
| "loss": 0.3752, |
| "step": 1640 |
| }, |
| { |
| "epoch": 1.8727895037079292, |
| "grad_norm": 0.2608033194653284, |
| "learning_rate": 2.0866807610993658e-05, |
| "loss": 0.3452, |
| "step": 1641 |
| }, |
| { |
| "epoch": 1.8739304050199657, |
| "grad_norm": 0.26918936097256435, |
| "learning_rate": 2.0845665961945032e-05, |
| "loss": 0.3739, |
| "step": 1642 |
| }, |
| { |
| "epoch": 1.8750713063320021, |
| "grad_norm": 0.31655255219302714, |
| "learning_rate": 2.0824524312896406e-05, |
| "loss": 0.3906, |
| "step": 1643 |
| }, |
| { |
| "epoch": 1.8762122076440388, |
| "grad_norm": 0.22432722104783256, |
| "learning_rate": 2.080338266384778e-05, |
| "loss": 0.3452, |
| "step": 1644 |
| }, |
| { |
| "epoch": 1.8773531089560753, |
| "grad_norm": 0.27099992542216245, |
| "learning_rate": 2.0782241014799158e-05, |
| "loss": 0.3562, |
| "step": 1645 |
| }, |
| { |
| "epoch": 1.8784940102681118, |
| "grad_norm": 0.2687749493001164, |
| "learning_rate": 2.076109936575053e-05, |
| "loss": 0.3839, |
| "step": 1646 |
| }, |
| { |
| "epoch": 1.8796349115801483, |
| "grad_norm": 0.2745458632822797, |
| "learning_rate": 2.0739957716701906e-05, |
| "loss": 0.3585, |
| "step": 1647 |
| }, |
| { |
| "epoch": 1.880775812892185, |
| "grad_norm": 0.2649532173440124, |
| "learning_rate": 2.0718816067653276e-05, |
| "loss": 0.3643, |
| "step": 1648 |
| }, |
| { |
| "epoch": 1.8819167142042215, |
| "grad_norm": 0.2926959708581756, |
| "learning_rate": 2.069767441860465e-05, |
| "loss": 0.362, |
| "step": 1649 |
| }, |
| { |
| "epoch": 1.883057615516258, |
| "grad_norm": 0.28133283588264074, |
| "learning_rate": 2.0676532769556025e-05, |
| "loss": 0.3672, |
| "step": 1650 |
| }, |
| { |
| "epoch": 1.8841985168282944, |
| "grad_norm": 0.26954237691342353, |
| "learning_rate": 2.06553911205074e-05, |
| "loss": 0.3632, |
| "step": 1651 |
| }, |
| { |
| "epoch": 1.885339418140331, |
| "grad_norm": 0.28736812482531, |
| "learning_rate": 2.0634249471458776e-05, |
| "loss": 0.3413, |
| "step": 1652 |
| }, |
| { |
| "epoch": 1.8864803194523674, |
| "grad_norm": 0.2743819590826279, |
| "learning_rate": 2.061310782241015e-05, |
| "loss": 0.378, |
| "step": 1653 |
| }, |
| { |
| "epoch": 1.8876212207644039, |
| "grad_norm": 0.30624176993565166, |
| "learning_rate": 2.0591966173361524e-05, |
| "loss": 0.3663, |
| "step": 1654 |
| }, |
| { |
| "epoch": 1.8887621220764403, |
| "grad_norm": 0.2589219913447823, |
| "learning_rate": 2.0570824524312898e-05, |
| "loss": 0.3492, |
| "step": 1655 |
| }, |
| { |
| "epoch": 1.8899030233884768, |
| "grad_norm": 0.2768282482738376, |
| "learning_rate": 2.0549682875264272e-05, |
| "loss": 0.3618, |
| "step": 1656 |
| }, |
| { |
| "epoch": 1.8910439247005133, |
| "grad_norm": 0.2739420056242106, |
| "learning_rate": 2.0528541226215643e-05, |
| "loss": 0.3555, |
| "step": 1657 |
| }, |
| { |
| "epoch": 1.8921848260125498, |
| "grad_norm": 0.26928054226917636, |
| "learning_rate": 2.050739957716702e-05, |
| "loss": 0.3558, |
| "step": 1658 |
| }, |
| { |
| "epoch": 1.8933257273245863, |
| "grad_norm": 0.3207377769023388, |
| "learning_rate": 2.0486257928118395e-05, |
| "loss": 0.3798, |
| "step": 1659 |
| }, |
| { |
| "epoch": 1.894466628636623, |
| "grad_norm": 0.24989405415599858, |
| "learning_rate": 2.046511627906977e-05, |
| "loss": 0.3613, |
| "step": 1660 |
| }, |
| { |
| "epoch": 1.8956075299486594, |
| "grad_norm": 0.2607195645529972, |
| "learning_rate": 2.0443974630021143e-05, |
| "loss": 0.3709, |
| "step": 1661 |
| }, |
| { |
| "epoch": 1.896748431260696, |
| "grad_norm": 0.2733555160670365, |
| "learning_rate": 2.0422832980972517e-05, |
| "loss": 0.3614, |
| "step": 1662 |
| }, |
| { |
| "epoch": 1.8978893325727326, |
| "grad_norm": 0.27871241601853286, |
| "learning_rate": 2.040169133192389e-05, |
| "loss": 0.3648, |
| "step": 1663 |
| }, |
| { |
| "epoch": 1.899030233884769, |
| "grad_norm": 0.31234592073077655, |
| "learning_rate": 2.0380549682875265e-05, |
| "loss": 0.3683, |
| "step": 1664 |
| }, |
| { |
| "epoch": 1.9001711351968056, |
| "grad_norm": 0.27741519189889535, |
| "learning_rate": 2.0359408033826642e-05, |
| "loss": 0.373, |
| "step": 1665 |
| }, |
| { |
| "epoch": 1.901312036508842, |
| "grad_norm": 0.28054435993083715, |
| "learning_rate": 2.0338266384778013e-05, |
| "loss": 0.3557, |
| "step": 1666 |
| }, |
| { |
| "epoch": 1.9024529378208785, |
| "grad_norm": 0.27814457755739297, |
| "learning_rate": 2.0317124735729387e-05, |
| "loss": 0.3858, |
| "step": 1667 |
| }, |
| { |
| "epoch": 1.903593839132915, |
| "grad_norm": 0.2884539606763188, |
| "learning_rate": 2.029598308668076e-05, |
| "loss": 0.3741, |
| "step": 1668 |
| }, |
| { |
| "epoch": 1.9047347404449515, |
| "grad_norm": 0.2618620359150727, |
| "learning_rate": 2.0274841437632135e-05, |
| "loss": 0.3649, |
| "step": 1669 |
| }, |
| { |
| "epoch": 1.905875641756988, |
| "grad_norm": 0.25113408676713106, |
| "learning_rate": 2.025369978858351e-05, |
| "loss": 0.3622, |
| "step": 1670 |
| }, |
| { |
| "epoch": 1.9070165430690245, |
| "grad_norm": 0.2784177351306522, |
| "learning_rate": 2.0232558139534883e-05, |
| "loss": 0.3517, |
| "step": 1671 |
| }, |
| { |
| "epoch": 1.908157444381061, |
| "grad_norm": 0.2856685467357065, |
| "learning_rate": 2.021141649048626e-05, |
| "loss": 0.3683, |
| "step": 1672 |
| }, |
| { |
| "epoch": 1.9092983456930974, |
| "grad_norm": 0.26172116445181576, |
| "learning_rate": 2.0190274841437635e-05, |
| "loss": 0.354, |
| "step": 1673 |
| }, |
| { |
| "epoch": 1.910439247005134, |
| "grad_norm": 0.26335022190661506, |
| "learning_rate": 2.016913319238901e-05, |
| "loss": 0.3507, |
| "step": 1674 |
| }, |
| { |
| "epoch": 1.9115801483171706, |
| "grad_norm": 0.25622287485822953, |
| "learning_rate": 2.014799154334038e-05, |
| "loss": 0.3731, |
| "step": 1675 |
| }, |
| { |
| "epoch": 1.912721049629207, |
| "grad_norm": 0.2601225492512362, |
| "learning_rate": 2.0126849894291754e-05, |
| "loss": 0.3632, |
| "step": 1676 |
| }, |
| { |
| "epoch": 1.9138619509412436, |
| "grad_norm": 0.2604903931429644, |
| "learning_rate": 2.0105708245243128e-05, |
| "loss": 0.3519, |
| "step": 1677 |
| }, |
| { |
| "epoch": 1.91500285225328, |
| "grad_norm": 0.24755000365197305, |
| "learning_rate": 2.0084566596194505e-05, |
| "loss": 0.3564, |
| "step": 1678 |
| }, |
| { |
| "epoch": 1.9161437535653167, |
| "grad_norm": 0.2638138653479009, |
| "learning_rate": 2.006342494714588e-05, |
| "loss": 0.3626, |
| "step": 1679 |
| }, |
| { |
| "epoch": 1.9172846548773532, |
| "grad_norm": 0.25681513482848295, |
| "learning_rate": 2.0042283298097253e-05, |
| "loss": 0.3581, |
| "step": 1680 |
| }, |
| { |
| "epoch": 1.9184255561893897, |
| "grad_norm": 0.2557089013629714, |
| "learning_rate": 2.0021141649048628e-05, |
| "loss": 0.3369, |
| "step": 1681 |
| }, |
| { |
| "epoch": 1.9195664575014262, |
| "grad_norm": 0.30474490842943003, |
| "learning_rate": 2e-05, |
| "loss": 0.3586, |
| "step": 1682 |
| }, |
| { |
| "epoch": 1.9207073588134627, |
| "grad_norm": 0.23362258404659542, |
| "learning_rate": 1.9978858350951372e-05, |
| "loss": 0.3548, |
| "step": 1683 |
| }, |
| { |
| "epoch": 1.9218482601254991, |
| "grad_norm": 0.282854268486239, |
| "learning_rate": 1.995771670190275e-05, |
| "loss": 0.3833, |
| "step": 1684 |
| }, |
| { |
| "epoch": 1.9229891614375356, |
| "grad_norm": 0.28664344992691926, |
| "learning_rate": 1.9936575052854124e-05, |
| "loss": 0.3568, |
| "step": 1685 |
| }, |
| { |
| "epoch": 1.924130062749572, |
| "grad_norm": 0.2671099649115389, |
| "learning_rate": 1.9915433403805498e-05, |
| "loss": 0.3646, |
| "step": 1686 |
| }, |
| { |
| "epoch": 1.9252709640616086, |
| "grad_norm": 0.2954713254197816, |
| "learning_rate": 1.9894291754756872e-05, |
| "loss": 0.3643, |
| "step": 1687 |
| }, |
| { |
| "epoch": 1.926411865373645, |
| "grad_norm": 0.33930978902768366, |
| "learning_rate": 1.9873150105708246e-05, |
| "loss": 0.3769, |
| "step": 1688 |
| }, |
| { |
| "epoch": 1.9275527666856815, |
| "grad_norm": 0.29824577630447163, |
| "learning_rate": 1.985200845665962e-05, |
| "loss": 0.3768, |
| "step": 1689 |
| }, |
| { |
| "epoch": 1.9286936679977182, |
| "grad_norm": 0.29176022977395993, |
| "learning_rate": 1.9830866807610994e-05, |
| "loss": 0.3507, |
| "step": 1690 |
| }, |
| { |
| "epoch": 1.9298345693097547, |
| "grad_norm": 0.24390654056177216, |
| "learning_rate": 1.980972515856237e-05, |
| "loss": 0.3506, |
| "step": 1691 |
| }, |
| { |
| "epoch": 1.9309754706217912, |
| "grad_norm": 0.2590795747683378, |
| "learning_rate": 1.9788583509513742e-05, |
| "loss": 0.3501, |
| "step": 1692 |
| }, |
| { |
| "epoch": 1.9321163719338277, |
| "grad_norm": 0.28164134893382536, |
| "learning_rate": 1.9767441860465116e-05, |
| "loss": 0.3608, |
| "step": 1693 |
| }, |
| { |
| "epoch": 1.9332572732458644, |
| "grad_norm": 0.2647748061214524, |
| "learning_rate": 1.974630021141649e-05, |
| "loss": 0.3606, |
| "step": 1694 |
| }, |
| { |
| "epoch": 1.9343981745579009, |
| "grad_norm": 0.28010445382267146, |
| "learning_rate": 1.9725158562367865e-05, |
| "loss": 0.3581, |
| "step": 1695 |
| }, |
| { |
| "epoch": 1.9355390758699373, |
| "grad_norm": 0.2533929878448776, |
| "learning_rate": 1.970401691331924e-05, |
| "loss": 0.3706, |
| "step": 1696 |
| }, |
| { |
| "epoch": 1.9366799771819738, |
| "grad_norm": 0.28611878053557527, |
| "learning_rate": 1.9682875264270613e-05, |
| "loss": 0.3728, |
| "step": 1697 |
| }, |
| { |
| "epoch": 1.9378208784940103, |
| "grad_norm": 0.28041130653252294, |
| "learning_rate": 1.966173361522199e-05, |
| "loss": 0.3622, |
| "step": 1698 |
| }, |
| { |
| "epoch": 1.9389617798060468, |
| "grad_norm": 0.28497047972994977, |
| "learning_rate": 1.9640591966173364e-05, |
| "loss": 0.3563, |
| "step": 1699 |
| }, |
| { |
| "epoch": 1.9401026811180833, |
| "grad_norm": 0.2918310757848588, |
| "learning_rate": 1.961945031712474e-05, |
| "loss": 0.3766, |
| "step": 1700 |
| }, |
| { |
| "epoch": 1.9412435824301197, |
| "grad_norm": 0.2369182789719308, |
| "learning_rate": 1.959830866807611e-05, |
| "loss": 0.3478, |
| "step": 1701 |
| }, |
| { |
| "epoch": 1.9423844837421562, |
| "grad_norm": 0.2647060419296417, |
| "learning_rate": 1.9577167019027483e-05, |
| "loss": 0.3582, |
| "step": 1702 |
| }, |
| { |
| "epoch": 1.9435253850541927, |
| "grad_norm": 0.2920307858064205, |
| "learning_rate": 1.9556025369978857e-05, |
| "loss": 0.369, |
| "step": 1703 |
| }, |
| { |
| "epoch": 1.9446662863662292, |
| "grad_norm": 0.3227097667935105, |
| "learning_rate": 1.9534883720930235e-05, |
| "loss": 0.368, |
| "step": 1704 |
| }, |
| { |
| "epoch": 1.9458071876782659, |
| "grad_norm": 0.2667720664080837, |
| "learning_rate": 1.951374207188161e-05, |
| "loss": 0.3637, |
| "step": 1705 |
| }, |
| { |
| "epoch": 1.9469480889903024, |
| "grad_norm": 0.3062658322537471, |
| "learning_rate": 1.9492600422832983e-05, |
| "loss": 0.3663, |
| "step": 1706 |
| }, |
| { |
| "epoch": 1.9480889903023388, |
| "grad_norm": 0.27305673090240135, |
| "learning_rate": 1.9471458773784357e-05, |
| "loss": 0.3622, |
| "step": 1707 |
| }, |
| { |
| "epoch": 1.9492298916143753, |
| "grad_norm": 0.2641687358716487, |
| "learning_rate": 1.945031712473573e-05, |
| "loss": 0.355, |
| "step": 1708 |
| }, |
| { |
| "epoch": 1.950370792926412, |
| "grad_norm": 0.2580431501007941, |
| "learning_rate": 1.9429175475687105e-05, |
| "loss": 0.3652, |
| "step": 1709 |
| }, |
| { |
| "epoch": 1.9515116942384485, |
| "grad_norm": 0.2344794936808685, |
| "learning_rate": 1.940803382663848e-05, |
| "loss": 0.3468, |
| "step": 1710 |
| }, |
| { |
| "epoch": 1.952652595550485, |
| "grad_norm": 0.2886415993544298, |
| "learning_rate": 1.9386892177589853e-05, |
| "loss": 0.3757, |
| "step": 1711 |
| }, |
| { |
| "epoch": 1.9537934968625215, |
| "grad_norm": 0.3049085901486973, |
| "learning_rate": 1.9365750528541227e-05, |
| "loss": 0.3737, |
| "step": 1712 |
| }, |
| { |
| "epoch": 1.954934398174558, |
| "grad_norm": 0.26596397633339325, |
| "learning_rate": 1.93446088794926e-05, |
| "loss": 0.3642, |
| "step": 1713 |
| }, |
| { |
| "epoch": 1.9560752994865944, |
| "grad_norm": 0.30294710633352423, |
| "learning_rate": 1.9323467230443975e-05, |
| "loss": 0.3668, |
| "step": 1714 |
| }, |
| { |
| "epoch": 1.957216200798631, |
| "grad_norm": 0.26349235851430564, |
| "learning_rate": 1.930232558139535e-05, |
| "loss": 0.3559, |
| "step": 1715 |
| }, |
| { |
| "epoch": 1.9583571021106674, |
| "grad_norm": 0.2685783037927963, |
| "learning_rate": 1.9281183932346724e-05, |
| "loss": 0.3774, |
| "step": 1716 |
| }, |
| { |
| "epoch": 1.9594980034227039, |
| "grad_norm": 0.264292192361419, |
| "learning_rate": 1.9260042283298098e-05, |
| "loss": 0.3735, |
| "step": 1717 |
| }, |
| { |
| "epoch": 1.9606389047347403, |
| "grad_norm": 0.3554897307494108, |
| "learning_rate": 1.9238900634249475e-05, |
| "loss": 0.371, |
| "step": 1718 |
| }, |
| { |
| "epoch": 1.9617798060467768, |
| "grad_norm": 0.31087400630355955, |
| "learning_rate": 1.9217758985200846e-05, |
| "loss": 0.3661, |
| "step": 1719 |
| }, |
| { |
| "epoch": 1.9629207073588133, |
| "grad_norm": 0.3095791509449235, |
| "learning_rate": 1.919661733615222e-05, |
| "loss": 0.3706, |
| "step": 1720 |
| }, |
| { |
| "epoch": 1.96406160867085, |
| "grad_norm": 0.2738969954461718, |
| "learning_rate": 1.9175475687103594e-05, |
| "loss": 0.3876, |
| "step": 1721 |
| }, |
| { |
| "epoch": 1.9652025099828865, |
| "grad_norm": 0.34003420604038476, |
| "learning_rate": 1.9154334038054968e-05, |
| "loss": 0.3956, |
| "step": 1722 |
| }, |
| { |
| "epoch": 1.966343411294923, |
| "grad_norm": 0.28565166124104413, |
| "learning_rate": 1.9133192389006342e-05, |
| "loss": 0.3478, |
| "step": 1723 |
| }, |
| { |
| "epoch": 1.9674843126069597, |
| "grad_norm": 0.2632293762228427, |
| "learning_rate": 1.911205073995772e-05, |
| "loss": 0.3672, |
| "step": 1724 |
| }, |
| { |
| "epoch": 1.9686252139189961, |
| "grad_norm": 0.2973825583942189, |
| "learning_rate": 1.9090909090909094e-05, |
| "loss": 0.3726, |
| "step": 1725 |
| }, |
| { |
| "epoch": 1.9697661152310326, |
| "grad_norm": 0.2525267995321032, |
| "learning_rate": 1.9069767441860468e-05, |
| "loss": 0.374, |
| "step": 1726 |
| }, |
| { |
| "epoch": 1.970907016543069, |
| "grad_norm": 0.24811016517447812, |
| "learning_rate": 1.904862579281184e-05, |
| "loss": 0.3956, |
| "step": 1727 |
| }, |
| { |
| "epoch": 1.9720479178551056, |
| "grad_norm": 0.3270553538748556, |
| "learning_rate": 1.9027484143763212e-05, |
| "loss": 0.3662, |
| "step": 1728 |
| }, |
| { |
| "epoch": 1.973188819167142, |
| "grad_norm": 0.2541293268170106, |
| "learning_rate": 1.9006342494714586e-05, |
| "loss": 0.3403, |
| "step": 1729 |
| }, |
| { |
| "epoch": 1.9743297204791785, |
| "grad_norm": 0.27007055492054866, |
| "learning_rate": 1.8985200845665964e-05, |
| "loss": 0.3598, |
| "step": 1730 |
| }, |
| { |
| "epoch": 1.975470621791215, |
| "grad_norm": 0.2789281069327813, |
| "learning_rate": 1.8964059196617338e-05, |
| "loss": 0.3708, |
| "step": 1731 |
| }, |
| { |
| "epoch": 1.9766115231032515, |
| "grad_norm": 0.335240345320772, |
| "learning_rate": 1.8942917547568712e-05, |
| "loss": 0.3759, |
| "step": 1732 |
| }, |
| { |
| "epoch": 1.977752424415288, |
| "grad_norm": 0.3014749329262505, |
| "learning_rate": 1.8921775898520086e-05, |
| "loss": 0.3743, |
| "step": 1733 |
| }, |
| { |
| "epoch": 1.9788933257273245, |
| "grad_norm": 0.2642588744547289, |
| "learning_rate": 1.890063424947146e-05, |
| "loss": 0.3742, |
| "step": 1734 |
| }, |
| { |
| "epoch": 1.980034227039361, |
| "grad_norm": 0.3913575419827154, |
| "learning_rate": 1.8879492600422834e-05, |
| "loss": 0.3467, |
| "step": 1735 |
| }, |
| { |
| "epoch": 1.9811751283513976, |
| "grad_norm": 0.3105592595689725, |
| "learning_rate": 1.885835095137421e-05, |
| "loss": 0.3998, |
| "step": 1736 |
| }, |
| { |
| "epoch": 1.9823160296634341, |
| "grad_norm": 0.294507885586603, |
| "learning_rate": 1.8837209302325582e-05, |
| "loss": 0.3636, |
| "step": 1737 |
| }, |
| { |
| "epoch": 1.9834569309754706, |
| "grad_norm": 0.32337663354893215, |
| "learning_rate": 1.8816067653276956e-05, |
| "loss": 0.3502, |
| "step": 1738 |
| }, |
| { |
| "epoch": 1.984597832287507, |
| "grad_norm": 0.3285424012955404, |
| "learning_rate": 1.879492600422833e-05, |
| "loss": 0.3856, |
| "step": 1739 |
| }, |
| { |
| "epoch": 1.9857387335995438, |
| "grad_norm": 0.2904265186501213, |
| "learning_rate": 1.8773784355179705e-05, |
| "loss": 0.371, |
| "step": 1740 |
| }, |
| { |
| "epoch": 1.9868796349115803, |
| "grad_norm": 0.2837758477482779, |
| "learning_rate": 1.875264270613108e-05, |
| "loss": 0.3556, |
| "step": 1741 |
| }, |
| { |
| "epoch": 1.9880205362236167, |
| "grad_norm": 0.3199499304677922, |
| "learning_rate": 1.8731501057082453e-05, |
| "loss": 0.3781, |
| "step": 1742 |
| }, |
| { |
| "epoch": 1.9891614375356532, |
| "grad_norm": 0.2620483932749284, |
| "learning_rate": 1.8710359408033827e-05, |
| "loss": 0.347, |
| "step": 1743 |
| }, |
| { |
| "epoch": 1.9903023388476897, |
| "grad_norm": 0.31940723204059734, |
| "learning_rate": 1.8689217758985204e-05, |
| "loss": 0.3531, |
| "step": 1744 |
| }, |
| { |
| "epoch": 1.9914432401597262, |
| "grad_norm": 0.2623270110349358, |
| "learning_rate": 1.8668076109936575e-05, |
| "loss": 0.3779, |
| "step": 1745 |
| }, |
| { |
| "epoch": 1.9925841414717627, |
| "grad_norm": 0.2877974818385608, |
| "learning_rate": 1.864693446088795e-05, |
| "loss": 0.3796, |
| "step": 1746 |
| }, |
| { |
| "epoch": 1.9937250427837991, |
| "grad_norm": 0.30045186550520864, |
| "learning_rate": 1.8625792811839323e-05, |
| "loss": 0.3884, |
| "step": 1747 |
| }, |
| { |
| "epoch": 1.9948659440958356, |
| "grad_norm": 0.2869930913760931, |
| "learning_rate": 1.8604651162790697e-05, |
| "loss": 0.3359, |
| "step": 1748 |
| }, |
| { |
| "epoch": 1.996006845407872, |
| "grad_norm": 0.2916651153608028, |
| "learning_rate": 1.858350951374207e-05, |
| "loss": 0.359, |
| "step": 1749 |
| }, |
| { |
| "epoch": 1.9971477467199086, |
| "grad_norm": 0.32125796964460684, |
| "learning_rate": 1.856236786469345e-05, |
| "loss": 0.391, |
| "step": 1750 |
| }, |
| { |
| "epoch": 1.9982886480319453, |
| "grad_norm": 0.28157904329195294, |
| "learning_rate": 1.8541226215644823e-05, |
| "loss": 0.3531, |
| "step": 1751 |
| }, |
| { |
| "epoch": 1.9994295493439818, |
| "grad_norm": 0.28627744809790545, |
| "learning_rate": 1.8520084566596197e-05, |
| "loss": 0.3655, |
| "step": 1752 |
| }, |
| { |
| "epoch": 2.0011409013120365, |
| "grad_norm": 0.4487577079137177, |
| "learning_rate": 1.849894291754757e-05, |
| "loss": 0.6555, |
| "step": 1753 |
| }, |
| { |
| "epoch": 2.002281802624073, |
| "grad_norm": 0.28088289953032464, |
| "learning_rate": 1.847780126849894e-05, |
| "loss": 0.2958, |
| "step": 1754 |
| }, |
| { |
| "epoch": 2.0034227039361094, |
| "grad_norm": 0.2891272992850902, |
| "learning_rate": 1.8456659619450316e-05, |
| "loss": 0.3185, |
| "step": 1755 |
| }, |
| { |
| "epoch": 2.004563605248146, |
| "grad_norm": 0.4112554650528418, |
| "learning_rate": 1.8435517970401693e-05, |
| "loss": 0.3052, |
| "step": 1756 |
| }, |
| { |
| "epoch": 2.0057045065601824, |
| "grad_norm": 0.26946760277582954, |
| "learning_rate": 1.8414376321353067e-05, |
| "loss": 0.3075, |
| "step": 1757 |
| }, |
| { |
| "epoch": 2.006845407872219, |
| "grad_norm": 0.3043281070992764, |
| "learning_rate": 1.839323467230444e-05, |
| "loss": 0.2972, |
| "step": 1758 |
| }, |
| { |
| "epoch": 2.0079863091842554, |
| "grad_norm": 0.32883104084724435, |
| "learning_rate": 1.8372093023255815e-05, |
| "loss": 0.2994, |
| "step": 1759 |
| }, |
| { |
| "epoch": 2.0091272104962923, |
| "grad_norm": 0.30200499224952815, |
| "learning_rate": 1.835095137420719e-05, |
| "loss": 0.3, |
| "step": 1760 |
| }, |
| { |
| "epoch": 2.0102681118083288, |
| "grad_norm": 0.2828662075516682, |
| "learning_rate": 1.8329809725158564e-05, |
| "loss": 0.2955, |
| "step": 1761 |
| }, |
| { |
| "epoch": 2.0114090131203652, |
| "grad_norm": 0.2992366159862029, |
| "learning_rate": 1.8308668076109938e-05, |
| "loss": 0.3035, |
| "step": 1762 |
| }, |
| { |
| "epoch": 2.0125499144324017, |
| "grad_norm": 0.3371880577990774, |
| "learning_rate": 1.8287526427061312e-05, |
| "loss": 0.2949, |
| "step": 1763 |
| }, |
| { |
| "epoch": 2.013690815744438, |
| "grad_norm": 0.28344316901737787, |
| "learning_rate": 1.8266384778012686e-05, |
| "loss": 0.3048, |
| "step": 1764 |
| }, |
| { |
| "epoch": 2.0148317170564747, |
| "grad_norm": 0.27179491325048666, |
| "learning_rate": 1.824524312896406e-05, |
| "loss": 0.2888, |
| "step": 1765 |
| }, |
| { |
| "epoch": 2.015972618368511, |
| "grad_norm": 0.2727511224813464, |
| "learning_rate": 1.8224101479915434e-05, |
| "loss": 0.2915, |
| "step": 1766 |
| }, |
| { |
| "epoch": 2.0171135196805476, |
| "grad_norm": 0.35663638818735105, |
| "learning_rate": 1.8202959830866808e-05, |
| "loss": 0.2928, |
| "step": 1767 |
| }, |
| { |
| "epoch": 2.018254420992584, |
| "grad_norm": 0.28095383665244567, |
| "learning_rate": 1.8181818181818182e-05, |
| "loss": 0.2927, |
| "step": 1768 |
| }, |
| { |
| "epoch": 2.0193953223046206, |
| "grad_norm": 0.2711800724825952, |
| "learning_rate": 1.8160676532769556e-05, |
| "loss": 0.3033, |
| "step": 1769 |
| }, |
| { |
| "epoch": 2.020536223616657, |
| "grad_norm": 0.30322673551396767, |
| "learning_rate": 1.8139534883720934e-05, |
| "loss": 0.2953, |
| "step": 1770 |
| }, |
| { |
| "epoch": 2.0216771249286936, |
| "grad_norm": 0.2750325414334585, |
| "learning_rate": 1.8118393234672304e-05, |
| "loss": 0.2815, |
| "step": 1771 |
| }, |
| { |
| "epoch": 2.02281802624073, |
| "grad_norm": 0.2472532660983106, |
| "learning_rate": 1.809725158562368e-05, |
| "loss": 0.2921, |
| "step": 1772 |
| }, |
| { |
| "epoch": 2.0239589275527665, |
| "grad_norm": 0.2927848622819586, |
| "learning_rate": 1.8076109936575052e-05, |
| "loss": 0.3057, |
| "step": 1773 |
| }, |
| { |
| "epoch": 2.025099828864803, |
| "grad_norm": 0.2294021321585527, |
| "learning_rate": 1.8054968287526427e-05, |
| "loss": 0.2974, |
| "step": 1774 |
| }, |
| { |
| "epoch": 2.02624073017684, |
| "grad_norm": 0.26484271651685803, |
| "learning_rate": 1.80338266384778e-05, |
| "loss": 0.2848, |
| "step": 1775 |
| }, |
| { |
| "epoch": 2.0273816314888764, |
| "grad_norm": 0.2538624793787531, |
| "learning_rate": 1.8012684989429178e-05, |
| "loss": 0.2997, |
| "step": 1776 |
| }, |
| { |
| "epoch": 2.028522532800913, |
| "grad_norm": 0.24532737131358523, |
| "learning_rate": 1.7991543340380552e-05, |
| "loss": 0.2934, |
| "step": 1777 |
| }, |
| { |
| "epoch": 2.0296634341129494, |
| "grad_norm": 0.33354789343265456, |
| "learning_rate": 1.7970401691331926e-05, |
| "loss": 0.2949, |
| "step": 1778 |
| }, |
| { |
| "epoch": 2.030804335424986, |
| "grad_norm": 0.2463122343168031, |
| "learning_rate": 1.79492600422833e-05, |
| "loss": 0.2914, |
| "step": 1779 |
| }, |
| { |
| "epoch": 2.0319452367370223, |
| "grad_norm": 0.2723099135506961, |
| "learning_rate": 1.792811839323467e-05, |
| "loss": 0.3127, |
| "step": 1780 |
| }, |
| { |
| "epoch": 2.033086138049059, |
| "grad_norm": 0.2641672662067301, |
| "learning_rate": 1.7906976744186045e-05, |
| "loss": 0.2877, |
| "step": 1781 |
| }, |
| { |
| "epoch": 2.0342270393610953, |
| "grad_norm": 0.2590296004276685, |
| "learning_rate": 1.7885835095137422e-05, |
| "loss": 0.2825, |
| "step": 1782 |
| }, |
| { |
| "epoch": 2.0353679406731318, |
| "grad_norm": 0.23856790203525635, |
| "learning_rate": 1.7864693446088797e-05, |
| "loss": 0.2809, |
| "step": 1783 |
| }, |
| { |
| "epoch": 2.0365088419851682, |
| "grad_norm": 0.25679769430683064, |
| "learning_rate": 1.784355179704017e-05, |
| "loss": 0.2668, |
| "step": 1784 |
| }, |
| { |
| "epoch": 2.0376497432972047, |
| "grad_norm": 0.23357636555828296, |
| "learning_rate": 1.7822410147991545e-05, |
| "loss": 0.2934, |
| "step": 1785 |
| }, |
| { |
| "epoch": 2.038790644609241, |
| "grad_norm": 0.2635889031071584, |
| "learning_rate": 1.780126849894292e-05, |
| "loss": 0.3032, |
| "step": 1786 |
| }, |
| { |
| "epoch": 2.0399315459212777, |
| "grad_norm": 0.24980277535986198, |
| "learning_rate": 1.7780126849894293e-05, |
| "loss": 0.2977, |
| "step": 1787 |
| }, |
| { |
| "epoch": 2.041072447233314, |
| "grad_norm": 0.295692136350144, |
| "learning_rate": 1.7758985200845667e-05, |
| "loss": 0.2952, |
| "step": 1788 |
| }, |
| { |
| "epoch": 2.0422133485453506, |
| "grad_norm": 0.23935600190392897, |
| "learning_rate": 1.773784355179704e-05, |
| "loss": 0.2964, |
| "step": 1789 |
| }, |
| { |
| "epoch": 2.043354249857387, |
| "grad_norm": 0.30965446159470705, |
| "learning_rate": 1.7716701902748415e-05, |
| "loss": 0.2739, |
| "step": 1790 |
| }, |
| { |
| "epoch": 2.044495151169424, |
| "grad_norm": 0.24093476355063664, |
| "learning_rate": 1.769556025369979e-05, |
| "loss": 0.303, |
| "step": 1791 |
| }, |
| { |
| "epoch": 2.0456360524814605, |
| "grad_norm": 0.25136517438617395, |
| "learning_rate": 1.7674418604651163e-05, |
| "loss": 0.2871, |
| "step": 1792 |
| }, |
| { |
| "epoch": 2.046776953793497, |
| "grad_norm": 0.2751162964138566, |
| "learning_rate": 1.7653276955602537e-05, |
| "loss": 0.2935, |
| "step": 1793 |
| }, |
| { |
| "epoch": 2.0479178551055335, |
| "grad_norm": 0.26971281309270306, |
| "learning_rate": 1.763213530655391e-05, |
| "loss": 0.2928, |
| "step": 1794 |
| }, |
| { |
| "epoch": 2.04905875641757, |
| "grad_norm": 0.2312877390629995, |
| "learning_rate": 1.7610993657505285e-05, |
| "loss": 0.2898, |
| "step": 1795 |
| }, |
| { |
| "epoch": 2.0501996577296064, |
| "grad_norm": 0.2712479419818717, |
| "learning_rate": 1.7589852008456663e-05, |
| "loss": 0.2859, |
| "step": 1796 |
| }, |
| { |
| "epoch": 2.051340559041643, |
| "grad_norm": 0.310067258301682, |
| "learning_rate": 1.7568710359408037e-05, |
| "loss": 0.326, |
| "step": 1797 |
| }, |
| { |
| "epoch": 2.0524814603536794, |
| "grad_norm": 0.2526105961518339, |
| "learning_rate": 1.7547568710359408e-05, |
| "loss": 0.3056, |
| "step": 1798 |
| }, |
| { |
| "epoch": 2.053622361665716, |
| "grad_norm": 0.2579468875301229, |
| "learning_rate": 1.7526427061310782e-05, |
| "loss": 0.3039, |
| "step": 1799 |
| }, |
| { |
| "epoch": 2.0547632629777524, |
| "grad_norm": 0.24865148191408976, |
| "learning_rate": 1.7505285412262156e-05, |
| "loss": 0.299, |
| "step": 1800 |
| }, |
| { |
| "epoch": 2.055904164289789, |
| "grad_norm": 0.27482713881750265, |
| "learning_rate": 1.748414376321353e-05, |
| "loss": 0.2828, |
| "step": 1801 |
| }, |
| { |
| "epoch": 2.0570450656018253, |
| "grad_norm": 0.23944885286193957, |
| "learning_rate": 1.7463002114164907e-05, |
| "loss": 0.2802, |
| "step": 1802 |
| }, |
| { |
| "epoch": 2.058185966913862, |
| "grad_norm": 0.23387539270157587, |
| "learning_rate": 1.744186046511628e-05, |
| "loss": 0.2833, |
| "step": 1803 |
| }, |
| { |
| "epoch": 2.0593268682258983, |
| "grad_norm": 0.2507361486702264, |
| "learning_rate": 1.7420718816067655e-05, |
| "loss": 0.2912, |
| "step": 1804 |
| }, |
| { |
| "epoch": 2.0604677695379348, |
| "grad_norm": 0.27028591396300616, |
| "learning_rate": 1.739957716701903e-05, |
| "loss": 0.2753, |
| "step": 1805 |
| }, |
| { |
| "epoch": 2.0616086708499717, |
| "grad_norm": 0.2619452920223977, |
| "learning_rate": 1.73784355179704e-05, |
| "loss": 0.2842, |
| "step": 1806 |
| }, |
| { |
| "epoch": 2.062749572162008, |
| "grad_norm": 0.2607113990408732, |
| "learning_rate": 1.7357293868921774e-05, |
| "loss": 0.292, |
| "step": 1807 |
| }, |
| { |
| "epoch": 2.0638904734740446, |
| "grad_norm": 0.30100822829080276, |
| "learning_rate": 1.7336152219873152e-05, |
| "loss": 0.3064, |
| "step": 1808 |
| }, |
| { |
| "epoch": 2.065031374786081, |
| "grad_norm": 0.2568094886960586, |
| "learning_rate": 1.7315010570824526e-05, |
| "loss": 0.2769, |
| "step": 1809 |
| }, |
| { |
| "epoch": 2.0661722760981176, |
| "grad_norm": 0.2668816139835237, |
| "learning_rate": 1.72938689217759e-05, |
| "loss": 0.2893, |
| "step": 1810 |
| }, |
| { |
| "epoch": 2.067313177410154, |
| "grad_norm": 0.275437215316109, |
| "learning_rate": 1.7272727272727274e-05, |
| "loss": 0.2889, |
| "step": 1811 |
| }, |
| { |
| "epoch": 2.0684540787221906, |
| "grad_norm": 0.2416657979758825, |
| "learning_rate": 1.7251585623678648e-05, |
| "loss": 0.2943, |
| "step": 1812 |
| }, |
| { |
| "epoch": 2.069594980034227, |
| "grad_norm": 0.28532369815909503, |
| "learning_rate": 1.7230443974630022e-05, |
| "loss": 0.2953, |
| "step": 1813 |
| }, |
| { |
| "epoch": 2.0707358813462635, |
| "grad_norm": 0.23963135477526373, |
| "learning_rate": 1.7209302325581396e-05, |
| "loss": 0.2967, |
| "step": 1814 |
| }, |
| { |
| "epoch": 2.0718767826583, |
| "grad_norm": 0.27869751780119484, |
| "learning_rate": 1.718816067653277e-05, |
| "loss": 0.2963, |
| "step": 1815 |
| }, |
| { |
| "epoch": 2.0730176839703365, |
| "grad_norm": 0.2662336852675231, |
| "learning_rate": 1.7167019027484144e-05, |
| "loss": 0.2972, |
| "step": 1816 |
| }, |
| { |
| "epoch": 2.074158585282373, |
| "grad_norm": 0.2588271856116877, |
| "learning_rate": 1.714587737843552e-05, |
| "loss": 0.2945, |
| "step": 1817 |
| }, |
| { |
| "epoch": 2.0752994865944094, |
| "grad_norm": 0.2862172344229437, |
| "learning_rate": 1.7124735729386892e-05, |
| "loss": 0.2994, |
| "step": 1818 |
| }, |
| { |
| "epoch": 2.076440387906446, |
| "grad_norm": 0.24831537941992662, |
| "learning_rate": 1.7103594080338267e-05, |
| "loss": 0.2834, |
| "step": 1819 |
| }, |
| { |
| "epoch": 2.0775812892184824, |
| "grad_norm": 0.22516742916788882, |
| "learning_rate": 1.708245243128964e-05, |
| "loss": 0.2971, |
| "step": 1820 |
| }, |
| { |
| "epoch": 2.0787221905305193, |
| "grad_norm": 0.28169017677827185, |
| "learning_rate": 1.7061310782241015e-05, |
| "loss": 0.2861, |
| "step": 1821 |
| }, |
| { |
| "epoch": 2.079863091842556, |
| "grad_norm": 0.23806396697312238, |
| "learning_rate": 1.7040169133192392e-05, |
| "loss": 0.2834, |
| "step": 1822 |
| }, |
| { |
| "epoch": 2.0810039931545923, |
| "grad_norm": 0.2555243399385284, |
| "learning_rate": 1.7019027484143766e-05, |
| "loss": 0.304, |
| "step": 1823 |
| }, |
| { |
| "epoch": 2.0821448944666288, |
| "grad_norm": 0.2578938428402628, |
| "learning_rate": 1.6997885835095137e-05, |
| "loss": 0.2895, |
| "step": 1824 |
| }, |
| { |
| "epoch": 2.0832857957786652, |
| "grad_norm": 0.23149122222114324, |
| "learning_rate": 1.697674418604651e-05, |
| "loss": 0.2806, |
| "step": 1825 |
| }, |
| { |
| "epoch": 2.0844266970907017, |
| "grad_norm": 0.25122516172954884, |
| "learning_rate": 1.6955602536997885e-05, |
| "loss": 0.2906, |
| "step": 1826 |
| }, |
| { |
| "epoch": 2.085567598402738, |
| "grad_norm": 0.22496719907018478, |
| "learning_rate": 1.693446088794926e-05, |
| "loss": 0.268, |
| "step": 1827 |
| }, |
| { |
| "epoch": 2.0867084997147747, |
| "grad_norm": 0.24917040442115526, |
| "learning_rate": 1.6913319238900637e-05, |
| "loss": 0.3072, |
| "step": 1828 |
| }, |
| { |
| "epoch": 2.087849401026811, |
| "grad_norm": 0.2461073133402563, |
| "learning_rate": 1.689217758985201e-05, |
| "loss": 0.3023, |
| "step": 1829 |
| }, |
| { |
| "epoch": 2.0889903023388476, |
| "grad_norm": 0.24067405395346075, |
| "learning_rate": 1.6871035940803385e-05, |
| "loss": 0.2873, |
| "step": 1830 |
| }, |
| { |
| "epoch": 2.090131203650884, |
| "grad_norm": 0.23945458238615078, |
| "learning_rate": 1.684989429175476e-05, |
| "loss": 0.2902, |
| "step": 1831 |
| }, |
| { |
| "epoch": 2.0912721049629206, |
| "grad_norm": 0.2382053179878838, |
| "learning_rate": 1.6828752642706133e-05, |
| "loss": 0.3078, |
| "step": 1832 |
| }, |
| { |
| "epoch": 2.092413006274957, |
| "grad_norm": 0.23429092759412315, |
| "learning_rate": 1.6807610993657504e-05, |
| "loss": 0.3189, |
| "step": 1833 |
| }, |
| { |
| "epoch": 2.0935539075869936, |
| "grad_norm": 0.24756030196541584, |
| "learning_rate": 1.678646934460888e-05, |
| "loss": 0.3011, |
| "step": 1834 |
| }, |
| { |
| "epoch": 2.09469480889903, |
| "grad_norm": 0.25175437537430273, |
| "learning_rate": 1.6765327695560255e-05, |
| "loss": 0.2838, |
| "step": 1835 |
| }, |
| { |
| "epoch": 2.095835710211067, |
| "grad_norm": 0.23741898688224317, |
| "learning_rate": 1.674418604651163e-05, |
| "loss": 0.2942, |
| "step": 1836 |
| }, |
| { |
| "epoch": 2.0969766115231034, |
| "grad_norm": 0.24003294023247418, |
| "learning_rate": 1.6723044397463003e-05, |
| "loss": 0.2993, |
| "step": 1837 |
| }, |
| { |
| "epoch": 2.09811751283514, |
| "grad_norm": 0.23478472744981252, |
| "learning_rate": 1.6701902748414377e-05, |
| "loss": 0.2907, |
| "step": 1838 |
| }, |
| { |
| "epoch": 2.0992584141471764, |
| "grad_norm": 0.23365403996013454, |
| "learning_rate": 1.668076109936575e-05, |
| "loss": 0.2772, |
| "step": 1839 |
| }, |
| { |
| "epoch": 2.100399315459213, |
| "grad_norm": 0.24999253956880316, |
| "learning_rate": 1.6659619450317125e-05, |
| "loss": 0.3015, |
| "step": 1840 |
| }, |
| { |
| "epoch": 2.1015402167712494, |
| "grad_norm": 0.23580829687884494, |
| "learning_rate": 1.66384778012685e-05, |
| "loss": 0.289, |
| "step": 1841 |
| }, |
| { |
| "epoch": 2.102681118083286, |
| "grad_norm": 0.2605491108678695, |
| "learning_rate": 1.6617336152219874e-05, |
| "loss": 0.2987, |
| "step": 1842 |
| }, |
| { |
| "epoch": 2.1038220193953223, |
| "grad_norm": 0.24526608405712047, |
| "learning_rate": 1.6596194503171248e-05, |
| "loss": 0.2911, |
| "step": 1843 |
| }, |
| { |
| "epoch": 2.104962920707359, |
| "grad_norm": 0.23507549939657288, |
| "learning_rate": 1.6575052854122622e-05, |
| "loss": 0.2935, |
| "step": 1844 |
| }, |
| { |
| "epoch": 2.1061038220193953, |
| "grad_norm": 0.23017170444286497, |
| "learning_rate": 1.6553911205073996e-05, |
| "loss": 0.293, |
| "step": 1845 |
| }, |
| { |
| "epoch": 2.1072447233314318, |
| "grad_norm": 0.23209560845918595, |
| "learning_rate": 1.653276955602537e-05, |
| "loss": 0.2893, |
| "step": 1846 |
| }, |
| { |
| "epoch": 2.1083856246434682, |
| "grad_norm": 0.23595690639390743, |
| "learning_rate": 1.6511627906976744e-05, |
| "loss": 0.2805, |
| "step": 1847 |
| }, |
| { |
| "epoch": 2.1095265259555047, |
| "grad_norm": 0.21983931290432804, |
| "learning_rate": 1.649048625792812e-05, |
| "loss": 0.2902, |
| "step": 1848 |
| }, |
| { |
| "epoch": 2.110667427267541, |
| "grad_norm": 0.21796041933539737, |
| "learning_rate": 1.6469344608879496e-05, |
| "loss": 0.2823, |
| "step": 1849 |
| }, |
| { |
| "epoch": 2.1118083285795777, |
| "grad_norm": 0.24121642344375474, |
| "learning_rate": 1.6448202959830866e-05, |
| "loss": 0.2942, |
| "step": 1850 |
| }, |
| { |
| "epoch": 2.1129492298916146, |
| "grad_norm": 0.24186049324723294, |
| "learning_rate": 1.642706131078224e-05, |
| "loss": 0.2868, |
| "step": 1851 |
| }, |
| { |
| "epoch": 2.114090131203651, |
| "grad_norm": 0.22393730692732144, |
| "learning_rate": 1.6405919661733614e-05, |
| "loss": 0.3086, |
| "step": 1852 |
| }, |
| { |
| "epoch": 2.1152310325156876, |
| "grad_norm": 0.2617315914853435, |
| "learning_rate": 1.638477801268499e-05, |
| "loss": 0.2875, |
| "step": 1853 |
| }, |
| { |
| "epoch": 2.116371933827724, |
| "grad_norm": 0.23203734158676664, |
| "learning_rate": 1.6363636363636366e-05, |
| "loss": 0.2716, |
| "step": 1854 |
| }, |
| { |
| "epoch": 2.1175128351397605, |
| "grad_norm": 0.286581008935328, |
| "learning_rate": 1.634249471458774e-05, |
| "loss": 0.2991, |
| "step": 1855 |
| }, |
| { |
| "epoch": 2.118653736451797, |
| "grad_norm": 0.24041373152409004, |
| "learning_rate": 1.6321353065539114e-05, |
| "loss": 0.292, |
| "step": 1856 |
| }, |
| { |
| "epoch": 2.1197946377638335, |
| "grad_norm": 0.29656734889505126, |
| "learning_rate": 1.6300211416490488e-05, |
| "loss": 0.2864, |
| "step": 1857 |
| }, |
| { |
| "epoch": 2.12093553907587, |
| "grad_norm": 0.25003488021762793, |
| "learning_rate": 1.6279069767441862e-05, |
| "loss": 0.3022, |
| "step": 1858 |
| }, |
| { |
| "epoch": 2.1220764403879064, |
| "grad_norm": 0.22864083847140418, |
| "learning_rate": 1.6257928118393233e-05, |
| "loss": 0.2952, |
| "step": 1859 |
| }, |
| { |
| "epoch": 2.123217341699943, |
| "grad_norm": 0.26581686925779857, |
| "learning_rate": 1.623678646934461e-05, |
| "loss": 0.2856, |
| "step": 1860 |
| }, |
| { |
| "epoch": 2.1243582430119794, |
| "grad_norm": 0.26086638331564543, |
| "learning_rate": 1.6215644820295984e-05, |
| "loss": 0.2935, |
| "step": 1861 |
| }, |
| { |
| "epoch": 2.125499144324016, |
| "grad_norm": 0.23337841700522194, |
| "learning_rate": 1.619450317124736e-05, |
| "loss": 0.2852, |
| "step": 1862 |
| }, |
| { |
| "epoch": 2.1266400456360524, |
| "grad_norm": 0.2769523730953264, |
| "learning_rate": 1.6173361522198733e-05, |
| "loss": 0.2781, |
| "step": 1863 |
| }, |
| { |
| "epoch": 2.127780946948089, |
| "grad_norm": 0.2718124892758348, |
| "learning_rate": 1.6152219873150107e-05, |
| "loss": 0.2962, |
| "step": 1864 |
| }, |
| { |
| "epoch": 2.1289218482601253, |
| "grad_norm": 0.22143722786026926, |
| "learning_rate": 1.613107822410148e-05, |
| "loss": 0.3111, |
| "step": 1865 |
| }, |
| { |
| "epoch": 2.1300627495721622, |
| "grad_norm": 0.27102920563649024, |
| "learning_rate": 1.6109936575052855e-05, |
| "loss": 0.2935, |
| "step": 1866 |
| }, |
| { |
| "epoch": 2.1312036508841987, |
| "grad_norm": 0.27336499438269957, |
| "learning_rate": 1.608879492600423e-05, |
| "loss": 0.2993, |
| "step": 1867 |
| }, |
| { |
| "epoch": 2.132344552196235, |
| "grad_norm": 0.2178279407621219, |
| "learning_rate": 1.6067653276955603e-05, |
| "loss": 0.2765, |
| "step": 1868 |
| }, |
| { |
| "epoch": 2.1334854535082717, |
| "grad_norm": 0.23210951727420406, |
| "learning_rate": 1.6046511627906977e-05, |
| "loss": 0.3076, |
| "step": 1869 |
| }, |
| { |
| "epoch": 2.134626354820308, |
| "grad_norm": 0.24938828827661355, |
| "learning_rate": 1.602536997885835e-05, |
| "loss": 0.2999, |
| "step": 1870 |
| }, |
| { |
| "epoch": 2.1357672561323446, |
| "grad_norm": 0.28143320894936363, |
| "learning_rate": 1.6004228329809725e-05, |
| "loss": 0.2875, |
| "step": 1871 |
| }, |
| { |
| "epoch": 2.136908157444381, |
| "grad_norm": 0.2363884317835549, |
| "learning_rate": 1.59830866807611e-05, |
| "loss": 0.2818, |
| "step": 1872 |
| }, |
| { |
| "epoch": 2.1380490587564176, |
| "grad_norm": 0.23142000312910235, |
| "learning_rate": 1.5961945031712473e-05, |
| "loss": 0.2886, |
| "step": 1873 |
| }, |
| { |
| "epoch": 2.139189960068454, |
| "grad_norm": 0.2967980449947715, |
| "learning_rate": 1.594080338266385e-05, |
| "loss": 0.287, |
| "step": 1874 |
| }, |
| { |
| "epoch": 2.1403308613804906, |
| "grad_norm": 0.2523233122910311, |
| "learning_rate": 1.5919661733615225e-05, |
| "loss": 0.3046, |
| "step": 1875 |
| }, |
| { |
| "epoch": 2.141471762692527, |
| "grad_norm": 0.253248683782715, |
| "learning_rate": 1.58985200845666e-05, |
| "loss": 0.299, |
| "step": 1876 |
| }, |
| { |
| "epoch": 2.1426126640045635, |
| "grad_norm": 0.25140721281143225, |
| "learning_rate": 1.587737843551797e-05, |
| "loss": 0.3008, |
| "step": 1877 |
| }, |
| { |
| "epoch": 2.1437535653166, |
| "grad_norm": 0.2661164691317505, |
| "learning_rate": 1.5856236786469344e-05, |
| "loss": 0.2956, |
| "step": 1878 |
| }, |
| { |
| "epoch": 2.1448944666286365, |
| "grad_norm": 0.23762095264944688, |
| "learning_rate": 1.5835095137420718e-05, |
| "loss": 0.2934, |
| "step": 1879 |
| }, |
| { |
| "epoch": 2.146035367940673, |
| "grad_norm": 0.2699960211507811, |
| "learning_rate": 1.5813953488372095e-05, |
| "loss": 0.3012, |
| "step": 1880 |
| }, |
| { |
| "epoch": 2.1471762692527094, |
| "grad_norm": 0.25416412797833793, |
| "learning_rate": 1.579281183932347e-05, |
| "loss": 0.2997, |
| "step": 1881 |
| }, |
| { |
| "epoch": 2.148317170564746, |
| "grad_norm": 0.2603044805803768, |
| "learning_rate": 1.5771670190274843e-05, |
| "loss": 0.311, |
| "step": 1882 |
| }, |
| { |
| "epoch": 2.149458071876783, |
| "grad_norm": 0.24861515965118985, |
| "learning_rate": 1.5750528541226217e-05, |
| "loss": 0.2749, |
| "step": 1883 |
| }, |
| { |
| "epoch": 2.1505989731888193, |
| "grad_norm": 0.2266673641657746, |
| "learning_rate": 1.572938689217759e-05, |
| "loss": 0.2851, |
| "step": 1884 |
| }, |
| { |
| "epoch": 2.151739874500856, |
| "grad_norm": 0.2542824263685328, |
| "learning_rate": 1.5708245243128966e-05, |
| "loss": 0.2778, |
| "step": 1885 |
| }, |
| { |
| "epoch": 2.1528807758128923, |
| "grad_norm": 0.23241437414135388, |
| "learning_rate": 1.5687103594080336e-05, |
| "loss": 0.3021, |
| "step": 1886 |
| }, |
| { |
| "epoch": 2.1540216771249288, |
| "grad_norm": 0.2439766144777705, |
| "learning_rate": 1.5665961945031714e-05, |
| "loss": 0.2918, |
| "step": 1887 |
| }, |
| { |
| "epoch": 2.1551625784369652, |
| "grad_norm": 0.22410145683828062, |
| "learning_rate": 1.5644820295983088e-05, |
| "loss": 0.2934, |
| "step": 1888 |
| }, |
| { |
| "epoch": 2.1563034797490017, |
| "grad_norm": 0.21422379309473735, |
| "learning_rate": 1.5623678646934462e-05, |
| "loss": 0.2804, |
| "step": 1889 |
| }, |
| { |
| "epoch": 2.157444381061038, |
| "grad_norm": 0.23352117964227453, |
| "learning_rate": 1.5602536997885836e-05, |
| "loss": 0.2767, |
| "step": 1890 |
| }, |
| { |
| "epoch": 2.1585852823730747, |
| "grad_norm": 0.2202691337690104, |
| "learning_rate": 1.558139534883721e-05, |
| "loss": 0.2938, |
| "step": 1891 |
| }, |
| { |
| "epoch": 2.159726183685111, |
| "grad_norm": 0.23851514096715734, |
| "learning_rate": 1.5560253699788584e-05, |
| "loss": 0.2825, |
| "step": 1892 |
| }, |
| { |
| "epoch": 2.1608670849971476, |
| "grad_norm": 0.24720474182268987, |
| "learning_rate": 1.5539112050739958e-05, |
| "loss": 0.2968, |
| "step": 1893 |
| }, |
| { |
| "epoch": 2.162007986309184, |
| "grad_norm": 0.23232992455363097, |
| "learning_rate": 1.5517970401691332e-05, |
| "loss": 0.2905, |
| "step": 1894 |
| }, |
| { |
| "epoch": 2.1631488876212206, |
| "grad_norm": 0.26117342795271153, |
| "learning_rate": 1.5496828752642706e-05, |
| "loss": 0.2905, |
| "step": 1895 |
| }, |
| { |
| "epoch": 2.164289788933257, |
| "grad_norm": 0.22523236320973763, |
| "learning_rate": 1.547568710359408e-05, |
| "loss": 0.2862, |
| "step": 1896 |
| }, |
| { |
| "epoch": 2.1654306902452936, |
| "grad_norm": 0.2516240346631564, |
| "learning_rate": 1.5454545454545454e-05, |
| "loss": 0.3104, |
| "step": 1897 |
| }, |
| { |
| "epoch": 2.1665715915573305, |
| "grad_norm": 0.23518703025428958, |
| "learning_rate": 1.543340380549683e-05, |
| "loss": 0.3012, |
| "step": 1898 |
| }, |
| { |
| "epoch": 2.167712492869367, |
| "grad_norm": 0.2373995553413098, |
| "learning_rate": 1.5412262156448203e-05, |
| "loss": 0.2817, |
| "step": 1899 |
| }, |
| { |
| "epoch": 2.1688533941814034, |
| "grad_norm": 0.2215128863227245, |
| "learning_rate": 1.539112050739958e-05, |
| "loss": 0.3211, |
| "step": 1900 |
| }, |
| { |
| "epoch": 2.16999429549344, |
| "grad_norm": 0.24566821893965865, |
| "learning_rate": 1.5369978858350954e-05, |
| "loss": 0.2903, |
| "step": 1901 |
| }, |
| { |
| "epoch": 2.1711351968054764, |
| "grad_norm": 0.23907763184389938, |
| "learning_rate": 1.5348837209302328e-05, |
| "loss": 0.2917, |
| "step": 1902 |
| }, |
| { |
| "epoch": 2.172276098117513, |
| "grad_norm": 0.24613813251538924, |
| "learning_rate": 1.53276955602537e-05, |
| "loss": 0.2983, |
| "step": 1903 |
| }, |
| { |
| "epoch": 2.1734169994295494, |
| "grad_norm": 0.27660756080669346, |
| "learning_rate": 1.5306553911205073e-05, |
| "loss": 0.3074, |
| "step": 1904 |
| }, |
| { |
| "epoch": 2.174557900741586, |
| "grad_norm": 0.24073203570701351, |
| "learning_rate": 1.5285412262156447e-05, |
| "loss": 0.3047, |
| "step": 1905 |
| }, |
| { |
| "epoch": 2.1756988020536223, |
| "grad_norm": 0.24942911547741597, |
| "learning_rate": 1.5264270613107824e-05, |
| "loss": 0.2895, |
| "step": 1906 |
| }, |
| { |
| "epoch": 2.176839703365659, |
| "grad_norm": 0.2634505861930869, |
| "learning_rate": 1.5243128964059197e-05, |
| "loss": 0.2874, |
| "step": 1907 |
| }, |
| { |
| "epoch": 2.1779806046776953, |
| "grad_norm": 0.25700128710560166, |
| "learning_rate": 1.5221987315010573e-05, |
| "loss": 0.2921, |
| "step": 1908 |
| }, |
| { |
| "epoch": 2.1791215059897318, |
| "grad_norm": 0.2504882028942424, |
| "learning_rate": 1.5200845665961947e-05, |
| "loss": 0.2945, |
| "step": 1909 |
| }, |
| { |
| "epoch": 2.1802624073017682, |
| "grad_norm": 0.25717922096384405, |
| "learning_rate": 1.517970401691332e-05, |
| "loss": 0.2996, |
| "step": 1910 |
| }, |
| { |
| "epoch": 2.1814033086138047, |
| "grad_norm": 0.24086404886837165, |
| "learning_rate": 1.5158562367864695e-05, |
| "loss": 0.3071, |
| "step": 1911 |
| }, |
| { |
| "epoch": 2.182544209925841, |
| "grad_norm": 0.25023284208061214, |
| "learning_rate": 1.5137420718816067e-05, |
| "loss": 0.2834, |
| "step": 1912 |
| }, |
| { |
| "epoch": 2.183685111237878, |
| "grad_norm": 0.24495771528295845, |
| "learning_rate": 1.5116279069767441e-05, |
| "loss": 0.2846, |
| "step": 1913 |
| }, |
| { |
| "epoch": 2.1848260125499146, |
| "grad_norm": 0.23837446613226718, |
| "learning_rate": 1.5095137420718815e-05, |
| "loss": 0.2985, |
| "step": 1914 |
| }, |
| { |
| "epoch": 2.185966913861951, |
| "grad_norm": 0.25371400523552884, |
| "learning_rate": 1.5073995771670191e-05, |
| "loss": 0.2935, |
| "step": 1915 |
| }, |
| { |
| "epoch": 2.1871078151739876, |
| "grad_norm": 0.22958447884607433, |
| "learning_rate": 1.5052854122621565e-05, |
| "loss": 0.3015, |
| "step": 1916 |
| }, |
| { |
| "epoch": 2.188248716486024, |
| "grad_norm": 0.24419362474562023, |
| "learning_rate": 1.503171247357294e-05, |
| "loss": 0.2794, |
| "step": 1917 |
| }, |
| { |
| "epoch": 2.1893896177980605, |
| "grad_norm": 0.2482930512404429, |
| "learning_rate": 1.5010570824524315e-05, |
| "loss": 0.2881, |
| "step": 1918 |
| }, |
| { |
| "epoch": 2.190530519110097, |
| "grad_norm": 0.2518793397866365, |
| "learning_rate": 1.4989429175475689e-05, |
| "loss": 0.3022, |
| "step": 1919 |
| }, |
| { |
| "epoch": 2.1916714204221335, |
| "grad_norm": 0.23310730322180817, |
| "learning_rate": 1.4968287526427063e-05, |
| "loss": 0.3002, |
| "step": 1920 |
| }, |
| { |
| "epoch": 2.19281232173417, |
| "grad_norm": 0.24918138942890583, |
| "learning_rate": 1.4947145877378436e-05, |
| "loss": 0.3062, |
| "step": 1921 |
| }, |
| { |
| "epoch": 2.1939532230462064, |
| "grad_norm": 0.24663388333295883, |
| "learning_rate": 1.492600422832981e-05, |
| "loss": 0.2856, |
| "step": 1922 |
| }, |
| { |
| "epoch": 2.195094124358243, |
| "grad_norm": 0.2513589833120337, |
| "learning_rate": 1.4904862579281184e-05, |
| "loss": 0.3004, |
| "step": 1923 |
| }, |
| { |
| "epoch": 2.1962350256702794, |
| "grad_norm": 0.26311683859831203, |
| "learning_rate": 1.488372093023256e-05, |
| "loss": 0.2826, |
| "step": 1924 |
| }, |
| { |
| "epoch": 2.197375926982316, |
| "grad_norm": 0.22385424880185895, |
| "learning_rate": 1.4862579281183934e-05, |
| "loss": 0.2839, |
| "step": 1925 |
| }, |
| { |
| "epoch": 2.1985168282943524, |
| "grad_norm": 0.2159732264542219, |
| "learning_rate": 1.4841437632135308e-05, |
| "loss": 0.2959, |
| "step": 1926 |
| }, |
| { |
| "epoch": 2.199657729606389, |
| "grad_norm": 0.2162278305808399, |
| "learning_rate": 1.4820295983086682e-05, |
| "loss": 0.2891, |
| "step": 1927 |
| }, |
| { |
| "epoch": 2.2007986309184258, |
| "grad_norm": 0.24339108189160494, |
| "learning_rate": 1.4799154334038057e-05, |
| "loss": 0.3227, |
| "step": 1928 |
| }, |
| { |
| "epoch": 2.2019395322304622, |
| "grad_norm": 0.23716291707294404, |
| "learning_rate": 1.4778012684989432e-05, |
| "loss": 0.3111, |
| "step": 1929 |
| }, |
| { |
| "epoch": 2.2030804335424987, |
| "grad_norm": 0.2224056361602292, |
| "learning_rate": 1.4756871035940802e-05, |
| "loss": 0.2855, |
| "step": 1930 |
| }, |
| { |
| "epoch": 2.204221334854535, |
| "grad_norm": 0.22391025870440956, |
| "learning_rate": 1.4735729386892178e-05, |
| "loss": 0.2788, |
| "step": 1931 |
| }, |
| { |
| "epoch": 2.2053622361665717, |
| "grad_norm": 0.2044482302165388, |
| "learning_rate": 1.4714587737843552e-05, |
| "loss": 0.2935, |
| "step": 1932 |
| }, |
| { |
| "epoch": 2.206503137478608, |
| "grad_norm": 0.21986998112087408, |
| "learning_rate": 1.4693446088794926e-05, |
| "loss": 0.2848, |
| "step": 1933 |
| }, |
| { |
| "epoch": 2.2076440387906446, |
| "grad_norm": 0.23802896070685578, |
| "learning_rate": 1.4672304439746302e-05, |
| "loss": 0.2836, |
| "step": 1934 |
| }, |
| { |
| "epoch": 2.208784940102681, |
| "grad_norm": 0.22630388090138429, |
| "learning_rate": 1.4651162790697676e-05, |
| "loss": 0.3023, |
| "step": 1935 |
| }, |
| { |
| "epoch": 2.2099258414147176, |
| "grad_norm": 0.24389924649877973, |
| "learning_rate": 1.463002114164905e-05, |
| "loss": 0.266, |
| "step": 1936 |
| }, |
| { |
| "epoch": 2.211066742726754, |
| "grad_norm": 0.23067707246778427, |
| "learning_rate": 1.4608879492600424e-05, |
| "loss": 0.2921, |
| "step": 1937 |
| }, |
| { |
| "epoch": 2.2122076440387906, |
| "grad_norm": 0.24602098091273714, |
| "learning_rate": 1.4587737843551796e-05, |
| "loss": 0.2675, |
| "step": 1938 |
| }, |
| { |
| "epoch": 2.213348545350827, |
| "grad_norm": 0.22413934497837779, |
| "learning_rate": 1.456659619450317e-05, |
| "loss": 0.307, |
| "step": 1939 |
| }, |
| { |
| "epoch": 2.2144894466628635, |
| "grad_norm": 0.235601927149484, |
| "learning_rate": 1.4545454545454545e-05, |
| "loss": 0.3009, |
| "step": 1940 |
| }, |
| { |
| "epoch": 2.2156303479749, |
| "grad_norm": 0.23907967781378392, |
| "learning_rate": 1.452431289640592e-05, |
| "loss": 0.3082, |
| "step": 1941 |
| }, |
| { |
| "epoch": 2.2167712492869365, |
| "grad_norm": 0.24293175224705252, |
| "learning_rate": 1.4503171247357294e-05, |
| "loss": 0.2992, |
| "step": 1942 |
| }, |
| { |
| "epoch": 2.2179121505989734, |
| "grad_norm": 0.2552656926793362, |
| "learning_rate": 1.4482029598308669e-05, |
| "loss": 0.2947, |
| "step": 1943 |
| }, |
| { |
| "epoch": 2.21905305191101, |
| "grad_norm": 0.22778782163505898, |
| "learning_rate": 1.4460887949260044e-05, |
| "loss": 0.2996, |
| "step": 1944 |
| }, |
| { |
| "epoch": 2.2201939532230464, |
| "grad_norm": 0.23183846405236744, |
| "learning_rate": 1.4439746300211418e-05, |
| "loss": 0.2862, |
| "step": 1945 |
| }, |
| { |
| "epoch": 2.221334854535083, |
| "grad_norm": 0.2566338595557142, |
| "learning_rate": 1.4418604651162792e-05, |
| "loss": 0.2971, |
| "step": 1946 |
| }, |
| { |
| "epoch": 2.2224757558471193, |
| "grad_norm": 0.25108060135481586, |
| "learning_rate": 1.4397463002114165e-05, |
| "loss": 0.3039, |
| "step": 1947 |
| }, |
| { |
| "epoch": 2.223616657159156, |
| "grad_norm": 0.21702286457482928, |
| "learning_rate": 1.4376321353065539e-05, |
| "loss": 0.291, |
| "step": 1948 |
| }, |
| { |
| "epoch": 2.2247575584711923, |
| "grad_norm": 0.2281526549309785, |
| "learning_rate": 1.4355179704016913e-05, |
| "loss": 0.2869, |
| "step": 1949 |
| }, |
| { |
| "epoch": 2.2258984597832288, |
| "grad_norm": 0.2297634774448516, |
| "learning_rate": 1.4334038054968287e-05, |
| "loss": 0.2862, |
| "step": 1950 |
| }, |
| { |
| "epoch": 2.2270393610952652, |
| "grad_norm": 0.224251286343074, |
| "learning_rate": 1.4312896405919663e-05, |
| "loss": 0.2978, |
| "step": 1951 |
| }, |
| { |
| "epoch": 2.2281802624073017, |
| "grad_norm": 0.22372187364380428, |
| "learning_rate": 1.4291754756871037e-05, |
| "loss": 0.2829, |
| "step": 1952 |
| }, |
| { |
| "epoch": 2.229321163719338, |
| "grad_norm": 0.2582197182895316, |
| "learning_rate": 1.4270613107822411e-05, |
| "loss": 0.3214, |
| "step": 1953 |
| }, |
| { |
| "epoch": 2.2304620650313747, |
| "grad_norm": 0.22771491528249513, |
| "learning_rate": 1.4249471458773787e-05, |
| "loss": 0.2929, |
| "step": 1954 |
| }, |
| { |
| "epoch": 2.231602966343411, |
| "grad_norm": 0.22136843625693173, |
| "learning_rate": 1.422832980972516e-05, |
| "loss": 0.295, |
| "step": 1955 |
| }, |
| { |
| "epoch": 2.2327438676554476, |
| "grad_norm": 0.26906645511646826, |
| "learning_rate": 1.4207188160676532e-05, |
| "loss": 0.2893, |
| "step": 1956 |
| }, |
| { |
| "epoch": 2.233884768967484, |
| "grad_norm": 0.2215278537540451, |
| "learning_rate": 1.4186046511627907e-05, |
| "loss": 0.288, |
| "step": 1957 |
| }, |
| { |
| "epoch": 2.235025670279521, |
| "grad_norm": 0.23908694956372106, |
| "learning_rate": 1.4164904862579281e-05, |
| "loss": 0.2938, |
| "step": 1958 |
| }, |
| { |
| "epoch": 2.2361665715915575, |
| "grad_norm": 0.229217654612187, |
| "learning_rate": 1.4143763213530655e-05, |
| "loss": 0.2942, |
| "step": 1959 |
| }, |
| { |
| "epoch": 2.237307472903594, |
| "grad_norm": 0.23519915486491552, |
| "learning_rate": 1.412262156448203e-05, |
| "loss": 0.2873, |
| "step": 1960 |
| }, |
| { |
| "epoch": 2.2384483742156305, |
| "grad_norm": 0.2403014255500897, |
| "learning_rate": 1.4101479915433405e-05, |
| "loss": 0.3018, |
| "step": 1961 |
| }, |
| { |
| "epoch": 2.239589275527667, |
| "grad_norm": 0.22939951535091463, |
| "learning_rate": 1.408033826638478e-05, |
| "loss": 0.2882, |
| "step": 1962 |
| }, |
| { |
| "epoch": 2.2407301768397034, |
| "grad_norm": 0.24287675445152404, |
| "learning_rate": 1.4059196617336153e-05, |
| "loss": 0.3037, |
| "step": 1963 |
| }, |
| { |
| "epoch": 2.24187107815174, |
| "grad_norm": 0.2345981608383373, |
| "learning_rate": 1.403805496828753e-05, |
| "loss": 0.3208, |
| "step": 1964 |
| }, |
| { |
| "epoch": 2.2430119794637764, |
| "grad_norm": 0.24968054421359923, |
| "learning_rate": 1.40169133192389e-05, |
| "loss": 0.2937, |
| "step": 1965 |
| }, |
| { |
| "epoch": 2.244152880775813, |
| "grad_norm": 0.21193313579929754, |
| "learning_rate": 1.3995771670190274e-05, |
| "loss": 0.2972, |
| "step": 1966 |
| }, |
| { |
| "epoch": 2.2452937820878494, |
| "grad_norm": 0.238901730891219, |
| "learning_rate": 1.397463002114165e-05, |
| "loss": 0.2956, |
| "step": 1967 |
| }, |
| { |
| "epoch": 2.246434683399886, |
| "grad_norm": 0.2189680066373367, |
| "learning_rate": 1.3953488372093024e-05, |
| "loss": 0.2849, |
| "step": 1968 |
| }, |
| { |
| "epoch": 2.2475755847119223, |
| "grad_norm": 0.2129342166114009, |
| "learning_rate": 1.3932346723044398e-05, |
| "loss": 0.2988, |
| "step": 1969 |
| }, |
| { |
| "epoch": 2.248716486023959, |
| "grad_norm": 0.260367276734894, |
| "learning_rate": 1.3911205073995774e-05, |
| "loss": 0.3065, |
| "step": 1970 |
| }, |
| { |
| "epoch": 2.2498573873359953, |
| "grad_norm": 0.23116646083828635, |
| "learning_rate": 1.3890063424947148e-05, |
| "loss": 0.2802, |
| "step": 1971 |
| }, |
| { |
| "epoch": 2.2509982886480318, |
| "grad_norm": 0.23575182654734847, |
| "learning_rate": 1.3868921775898522e-05, |
| "loss": 0.3006, |
| "step": 1972 |
| }, |
| { |
| "epoch": 2.2521391899600687, |
| "grad_norm": 0.23514500823576498, |
| "learning_rate": 1.3847780126849896e-05, |
| "loss": 0.286, |
| "step": 1973 |
| }, |
| { |
| "epoch": 2.253280091272105, |
| "grad_norm": 0.2455896325382261, |
| "learning_rate": 1.3826638477801268e-05, |
| "loss": 0.2873, |
| "step": 1974 |
| }, |
| { |
| "epoch": 2.2544209925841416, |
| "grad_norm": 0.23967956911885113, |
| "learning_rate": 1.3805496828752642e-05, |
| "loss": 0.3073, |
| "step": 1975 |
| }, |
| { |
| "epoch": 2.255561893896178, |
| "grad_norm": 0.2586497727165531, |
| "learning_rate": 1.3784355179704016e-05, |
| "loss": 0.2953, |
| "step": 1976 |
| }, |
| { |
| "epoch": 2.2567027952082146, |
| "grad_norm": 0.22560983749066454, |
| "learning_rate": 1.3763213530655392e-05, |
| "loss": 0.282, |
| "step": 1977 |
| }, |
| { |
| "epoch": 2.257843696520251, |
| "grad_norm": 0.24888255862010317, |
| "learning_rate": 1.3742071881606766e-05, |
| "loss": 0.3008, |
| "step": 1978 |
| }, |
| { |
| "epoch": 2.2589845978322876, |
| "grad_norm": 0.24641463696901852, |
| "learning_rate": 1.372093023255814e-05, |
| "loss": 0.2981, |
| "step": 1979 |
| }, |
| { |
| "epoch": 2.260125499144324, |
| "grad_norm": 0.2161155293814851, |
| "learning_rate": 1.3699788583509516e-05, |
| "loss": 0.2926, |
| "step": 1980 |
| }, |
| { |
| "epoch": 2.2612664004563605, |
| "grad_norm": 0.24532498098160077, |
| "learning_rate": 1.367864693446089e-05, |
| "loss": 0.2832, |
| "step": 1981 |
| }, |
| { |
| "epoch": 2.262407301768397, |
| "grad_norm": 0.2015557674816523, |
| "learning_rate": 1.365750528541226e-05, |
| "loss": 0.2889, |
| "step": 1982 |
| }, |
| { |
| "epoch": 2.2635482030804335, |
| "grad_norm": 0.2763485972223904, |
| "learning_rate": 1.3636363636363637e-05, |
| "loss": 0.3033, |
| "step": 1983 |
| }, |
| { |
| "epoch": 2.26468910439247, |
| "grad_norm": 0.23187776287773346, |
| "learning_rate": 1.361522198731501e-05, |
| "loss": 0.2805, |
| "step": 1984 |
| }, |
| { |
| "epoch": 2.2658300057045064, |
| "grad_norm": 0.21897157764795727, |
| "learning_rate": 1.3594080338266385e-05, |
| "loss": 0.2804, |
| "step": 1985 |
| }, |
| { |
| "epoch": 2.266970907016543, |
| "grad_norm": 0.2196787744861354, |
| "learning_rate": 1.3572938689217759e-05, |
| "loss": 0.2838, |
| "step": 1986 |
| }, |
| { |
| "epoch": 2.2681118083285794, |
| "grad_norm": 0.23196784986462152, |
| "learning_rate": 1.3551797040169135e-05, |
| "loss": 0.3006, |
| "step": 1987 |
| }, |
| { |
| "epoch": 2.2692527096406163, |
| "grad_norm": 0.2521901334329305, |
| "learning_rate": 1.3530655391120509e-05, |
| "loss": 0.2873, |
| "step": 1988 |
| }, |
| { |
| "epoch": 2.2703936109526524, |
| "grad_norm": 0.23247846103417055, |
| "learning_rate": 1.3509513742071883e-05, |
| "loss": 0.2866, |
| "step": 1989 |
| }, |
| { |
| "epoch": 2.2715345122646893, |
| "grad_norm": 0.269518441841022, |
| "learning_rate": 1.3488372093023258e-05, |
| "loss": 0.3135, |
| "step": 1990 |
| }, |
| { |
| "epoch": 2.2726754135767258, |
| "grad_norm": 0.23004727811310058, |
| "learning_rate": 1.3467230443974629e-05, |
| "loss": 0.2914, |
| "step": 1991 |
| }, |
| { |
| "epoch": 2.2738163148887622, |
| "grad_norm": 0.2483150678244816, |
| "learning_rate": 1.3446088794926003e-05, |
| "loss": 0.3105, |
| "step": 1992 |
| }, |
| { |
| "epoch": 2.2749572162007987, |
| "grad_norm": 0.24207883143871808, |
| "learning_rate": 1.3424947145877379e-05, |
| "loss": 0.292, |
| "step": 1993 |
| }, |
| { |
| "epoch": 2.276098117512835, |
| "grad_norm": 0.2292806033794932, |
| "learning_rate": 1.3403805496828753e-05, |
| "loss": 0.2896, |
| "step": 1994 |
| }, |
| { |
| "epoch": 2.2772390188248717, |
| "grad_norm": 0.21696103815825676, |
| "learning_rate": 1.3382663847780127e-05, |
| "loss": 0.3019, |
| "step": 1995 |
| }, |
| { |
| "epoch": 2.278379920136908, |
| "grad_norm": 0.24517163113794077, |
| "learning_rate": 1.3361522198731501e-05, |
| "loss": 0.3005, |
| "step": 1996 |
| }, |
| { |
| "epoch": 2.2795208214489446, |
| "grad_norm": 0.25085833283446013, |
| "learning_rate": 1.3340380549682877e-05, |
| "loss": 0.3081, |
| "step": 1997 |
| }, |
| { |
| "epoch": 2.280661722760981, |
| "grad_norm": 0.21047117649117664, |
| "learning_rate": 1.3319238900634251e-05, |
| "loss": 0.2961, |
| "step": 1998 |
| }, |
| { |
| "epoch": 2.2818026240730176, |
| "grad_norm": 0.23178127896840034, |
| "learning_rate": 1.3298097251585625e-05, |
| "loss": 0.2902, |
| "step": 1999 |
| }, |
| { |
| "epoch": 2.282943525385054, |
| "grad_norm": 0.23749065791689886, |
| "learning_rate": 1.3276955602536997e-05, |
| "loss": 0.2872, |
| "step": 2000 |
| }, |
| { |
| "epoch": 2.2840844266970906, |
| "grad_norm": 0.2075544823403873, |
| "learning_rate": 1.3255813953488372e-05, |
| "loss": 0.2819, |
| "step": 2001 |
| }, |
| { |
| "epoch": 2.285225328009127, |
| "grad_norm": 0.20462954299061928, |
| "learning_rate": 1.3234672304439746e-05, |
| "loss": 0.2948, |
| "step": 2002 |
| }, |
| { |
| "epoch": 2.286366229321164, |
| "grad_norm": 0.2365701652861174, |
| "learning_rate": 1.3213530655391121e-05, |
| "loss": 0.281, |
| "step": 2003 |
| }, |
| { |
| "epoch": 2.2875071306332, |
| "grad_norm": 0.23216020430008696, |
| "learning_rate": 1.3192389006342495e-05, |
| "loss": 0.2957, |
| "step": 2004 |
| }, |
| { |
| "epoch": 2.288648031945237, |
| "grad_norm": 0.22488722691116822, |
| "learning_rate": 1.317124735729387e-05, |
| "loss": 0.3038, |
| "step": 2005 |
| }, |
| { |
| "epoch": 2.2897889332572734, |
| "grad_norm": 0.2247997716859529, |
| "learning_rate": 1.3150105708245244e-05, |
| "loss": 0.2948, |
| "step": 2006 |
| }, |
| { |
| "epoch": 2.29092983456931, |
| "grad_norm": 0.23588924302825817, |
| "learning_rate": 1.312896405919662e-05, |
| "loss": 0.3015, |
| "step": 2007 |
| }, |
| { |
| "epoch": 2.2920707358813464, |
| "grad_norm": 0.203969359420377, |
| "learning_rate": 1.3107822410147993e-05, |
| "loss": 0.2833, |
| "step": 2008 |
| }, |
| { |
| "epoch": 2.293211637193383, |
| "grad_norm": 0.2235108319356959, |
| "learning_rate": 1.3086680761099366e-05, |
| "loss": 0.2984, |
| "step": 2009 |
| }, |
| { |
| "epoch": 2.2943525385054193, |
| "grad_norm": 0.23896468835409596, |
| "learning_rate": 1.306553911205074e-05, |
| "loss": 0.2864, |
| "step": 2010 |
| }, |
| { |
| "epoch": 2.295493439817456, |
| "grad_norm": 0.2212794132404757, |
| "learning_rate": 1.3044397463002114e-05, |
| "loss": 0.3006, |
| "step": 2011 |
| }, |
| { |
| "epoch": 2.2966343411294923, |
| "grad_norm": 0.23225833735132453, |
| "learning_rate": 1.3023255813953488e-05, |
| "loss": 0.2852, |
| "step": 2012 |
| }, |
| { |
| "epoch": 2.2977752424415288, |
| "grad_norm": 0.23082470670882826, |
| "learning_rate": 1.3002114164904864e-05, |
| "loss": 0.3054, |
| "step": 2013 |
| }, |
| { |
| "epoch": 2.2989161437535652, |
| "grad_norm": 0.2360677430344232, |
| "learning_rate": 1.2980972515856238e-05, |
| "loss": 0.2877, |
| "step": 2014 |
| }, |
| { |
| "epoch": 2.3000570450656017, |
| "grad_norm": 0.2404968090073572, |
| "learning_rate": 1.2959830866807612e-05, |
| "loss": 0.296, |
| "step": 2015 |
| }, |
| { |
| "epoch": 2.301197946377638, |
| "grad_norm": 0.24836315958275604, |
| "learning_rate": 1.2938689217758988e-05, |
| "loss": 0.2932, |
| "step": 2016 |
| }, |
| { |
| "epoch": 2.3023388476896747, |
| "grad_norm": 0.2337381742291952, |
| "learning_rate": 1.2917547568710358e-05, |
| "loss": 0.287, |
| "step": 2017 |
| }, |
| { |
| "epoch": 2.3034797490017116, |
| "grad_norm": 0.2232820057505206, |
| "learning_rate": 1.2896405919661732e-05, |
| "loss": 0.2992, |
| "step": 2018 |
| }, |
| { |
| "epoch": 2.3046206503137476, |
| "grad_norm": 0.21481193248947944, |
| "learning_rate": 1.2875264270613108e-05, |
| "loss": 0.2941, |
| "step": 2019 |
| }, |
| { |
| "epoch": 2.3057615516257846, |
| "grad_norm": 0.23287659442211797, |
| "learning_rate": 1.2854122621564482e-05, |
| "loss": 0.2888, |
| "step": 2020 |
| }, |
| { |
| "epoch": 2.306902452937821, |
| "grad_norm": 0.2441450826097555, |
| "learning_rate": 1.2832980972515856e-05, |
| "loss": 0.2881, |
| "step": 2021 |
| }, |
| { |
| "epoch": 2.3080433542498575, |
| "grad_norm": 0.21631900341301358, |
| "learning_rate": 1.281183932346723e-05, |
| "loss": 0.3031, |
| "step": 2022 |
| }, |
| { |
| "epoch": 2.309184255561894, |
| "grad_norm": 0.23685841465022145, |
| "learning_rate": 1.2790697674418606e-05, |
| "loss": 0.2889, |
| "step": 2023 |
| }, |
| { |
| "epoch": 2.3103251568739305, |
| "grad_norm": 0.24752960920348968, |
| "learning_rate": 1.276955602536998e-05, |
| "loss": 0.3048, |
| "step": 2024 |
| }, |
| { |
| "epoch": 2.311466058185967, |
| "grad_norm": 0.2141925809115063, |
| "learning_rate": 1.2748414376321354e-05, |
| "loss": 0.3126, |
| "step": 2025 |
| }, |
| { |
| "epoch": 2.3126069594980034, |
| "grad_norm": 0.23469096398654987, |
| "learning_rate": 1.2727272727272727e-05, |
| "loss": 0.3067, |
| "step": 2026 |
| }, |
| { |
| "epoch": 2.31374786081004, |
| "grad_norm": 0.24021289203570448, |
| "learning_rate": 1.27061310782241e-05, |
| "loss": 0.2849, |
| "step": 2027 |
| }, |
| { |
| "epoch": 2.3148887621220764, |
| "grad_norm": 0.2224644417413248, |
| "learning_rate": 1.2684989429175475e-05, |
| "loss": 0.2924, |
| "step": 2028 |
| }, |
| { |
| "epoch": 2.316029663434113, |
| "grad_norm": 0.23049113832054982, |
| "learning_rate": 1.266384778012685e-05, |
| "loss": 0.2971, |
| "step": 2029 |
| }, |
| { |
| "epoch": 2.3171705647461494, |
| "grad_norm": 0.2340970748955576, |
| "learning_rate": 1.2642706131078225e-05, |
| "loss": 0.2934, |
| "step": 2030 |
| }, |
| { |
| "epoch": 2.318311466058186, |
| "grad_norm": 0.22710680663083077, |
| "learning_rate": 1.2621564482029599e-05, |
| "loss": 0.3011, |
| "step": 2031 |
| }, |
| { |
| "epoch": 2.3194523673702223, |
| "grad_norm": 0.2214909866498558, |
| "learning_rate": 1.2600422832980973e-05, |
| "loss": 0.2922, |
| "step": 2032 |
| }, |
| { |
| "epoch": 2.320593268682259, |
| "grad_norm": 0.2370022891229573, |
| "learning_rate": 1.2579281183932349e-05, |
| "loss": 0.2952, |
| "step": 2033 |
| }, |
| { |
| "epoch": 2.3217341699942953, |
| "grad_norm": 0.23549458230786854, |
| "learning_rate": 1.2558139534883723e-05, |
| "loss": 0.3035, |
| "step": 2034 |
| }, |
| { |
| "epoch": 2.322875071306332, |
| "grad_norm": 0.21809602956352517, |
| "learning_rate": 1.2536997885835095e-05, |
| "loss": 0.3024, |
| "step": 2035 |
| }, |
| { |
| "epoch": 2.3240159726183687, |
| "grad_norm": 0.2166411089709946, |
| "learning_rate": 1.251585623678647e-05, |
| "loss": 0.296, |
| "step": 2036 |
| }, |
| { |
| "epoch": 2.325156873930405, |
| "grad_norm": 0.26463884719859104, |
| "learning_rate": 1.2494714587737843e-05, |
| "loss": 0.2671, |
| "step": 2037 |
| }, |
| { |
| "epoch": 2.3262977752424416, |
| "grad_norm": 0.22532754670856348, |
| "learning_rate": 1.2473572938689217e-05, |
| "loss": 0.3002, |
| "step": 2038 |
| }, |
| { |
| "epoch": 2.327438676554478, |
| "grad_norm": 0.2407040758208473, |
| "learning_rate": 1.2452431289640593e-05, |
| "loss": 0.2984, |
| "step": 2039 |
| }, |
| { |
| "epoch": 2.3285795778665146, |
| "grad_norm": 0.26048354489435793, |
| "learning_rate": 1.2431289640591967e-05, |
| "loss": 0.2936, |
| "step": 2040 |
| }, |
| { |
| "epoch": 2.329720479178551, |
| "grad_norm": 0.24546852772138006, |
| "learning_rate": 1.2410147991543341e-05, |
| "loss": 0.2935, |
| "step": 2041 |
| }, |
| { |
| "epoch": 2.3308613804905876, |
| "grad_norm": 0.2884947521367451, |
| "learning_rate": 1.2389006342494715e-05, |
| "loss": 0.2987, |
| "step": 2042 |
| }, |
| { |
| "epoch": 2.332002281802624, |
| "grad_norm": 0.2147628745332112, |
| "learning_rate": 1.236786469344609e-05, |
| "loss": 0.2902, |
| "step": 2043 |
| }, |
| { |
| "epoch": 2.3331431831146605, |
| "grad_norm": 0.25053785188898486, |
| "learning_rate": 1.2346723044397463e-05, |
| "loss": 0.2936, |
| "step": 2044 |
| }, |
| { |
| "epoch": 2.334284084426697, |
| "grad_norm": 0.2579239729203553, |
| "learning_rate": 1.2325581395348838e-05, |
| "loss": 0.2935, |
| "step": 2045 |
| }, |
| { |
| "epoch": 2.3354249857387335, |
| "grad_norm": 0.21423908107360926, |
| "learning_rate": 1.2304439746300212e-05, |
| "loss": 0.296, |
| "step": 2046 |
| }, |
| { |
| "epoch": 2.33656588705077, |
| "grad_norm": 0.25558415641280685, |
| "learning_rate": 1.2283298097251586e-05, |
| "loss": 0.293, |
| "step": 2047 |
| }, |
| { |
| "epoch": 2.3377067883628064, |
| "grad_norm": 0.243119900240472, |
| "learning_rate": 1.226215644820296e-05, |
| "loss": 0.2909, |
| "step": 2048 |
| }, |
| { |
| "epoch": 2.338847689674843, |
| "grad_norm": 0.2328624664465957, |
| "learning_rate": 1.2241014799154336e-05, |
| "loss": 0.2824, |
| "step": 2049 |
| }, |
| { |
| "epoch": 2.33998859098688, |
| "grad_norm": 0.2152655139207514, |
| "learning_rate": 1.2219873150105708e-05, |
| "loss": 0.294, |
| "step": 2050 |
| }, |
| { |
| "epoch": 2.3411294922989163, |
| "grad_norm": 0.2298995263704241, |
| "learning_rate": 1.2198731501057082e-05, |
| "loss": 0.3064, |
| "step": 2051 |
| }, |
| { |
| "epoch": 2.342270393610953, |
| "grad_norm": 0.2313471598713492, |
| "learning_rate": 1.2177589852008458e-05, |
| "loss": 0.2936, |
| "step": 2052 |
| }, |
| { |
| "epoch": 2.3434112949229893, |
| "grad_norm": 0.23453565544223756, |
| "learning_rate": 1.2156448202959832e-05, |
| "loss": 0.2889, |
| "step": 2053 |
| }, |
| { |
| "epoch": 2.3445521962350258, |
| "grad_norm": 0.21356838246444326, |
| "learning_rate": 1.2135306553911206e-05, |
| "loss": 0.2835, |
| "step": 2054 |
| }, |
| { |
| "epoch": 2.3456930975470622, |
| "grad_norm": 0.20309860095085247, |
| "learning_rate": 1.211416490486258e-05, |
| "loss": 0.2943, |
| "step": 2055 |
| }, |
| { |
| "epoch": 2.3468339988590987, |
| "grad_norm": 0.21615460920929327, |
| "learning_rate": 1.2093023255813954e-05, |
| "loss": 0.2864, |
| "step": 2056 |
| }, |
| { |
| "epoch": 2.347974900171135, |
| "grad_norm": 0.23818904219323894, |
| "learning_rate": 1.2071881606765328e-05, |
| "loss": 0.2899, |
| "step": 2057 |
| }, |
| { |
| "epoch": 2.3491158014831717, |
| "grad_norm": 0.2378427224755335, |
| "learning_rate": 1.2050739957716702e-05, |
| "loss": 0.2674, |
| "step": 2058 |
| }, |
| { |
| "epoch": 2.350256702795208, |
| "grad_norm": 0.22758941310375355, |
| "learning_rate": 1.2029598308668076e-05, |
| "loss": 0.306, |
| "step": 2059 |
| }, |
| { |
| "epoch": 2.3513976041072446, |
| "grad_norm": 0.25202783217641567, |
| "learning_rate": 1.200845665961945e-05, |
| "loss": 0.2889, |
| "step": 2060 |
| }, |
| { |
| "epoch": 2.352538505419281, |
| "grad_norm": 0.21174390435097373, |
| "learning_rate": 1.1987315010570824e-05, |
| "loss": 0.2912, |
| "step": 2061 |
| }, |
| { |
| "epoch": 2.3536794067313176, |
| "grad_norm": 0.22922473498775675, |
| "learning_rate": 1.19661733615222e-05, |
| "loss": 0.2954, |
| "step": 2062 |
| }, |
| { |
| "epoch": 2.354820308043354, |
| "grad_norm": 0.24159279897526093, |
| "learning_rate": 1.1945031712473574e-05, |
| "loss": 0.2958, |
| "step": 2063 |
| }, |
| { |
| "epoch": 2.3559612093553906, |
| "grad_norm": 0.25268059685668554, |
| "learning_rate": 1.1923890063424947e-05, |
| "loss": 0.2868, |
| "step": 2064 |
| }, |
| { |
| "epoch": 2.3571021106674275, |
| "grad_norm": 0.2263904802772943, |
| "learning_rate": 1.1902748414376322e-05, |
| "loss": 0.299, |
| "step": 2065 |
| }, |
| { |
| "epoch": 2.358243011979464, |
| "grad_norm": 0.2246350090323415, |
| "learning_rate": 1.1881606765327696e-05, |
| "loss": 0.2851, |
| "step": 2066 |
| }, |
| { |
| "epoch": 2.3593839132915004, |
| "grad_norm": 0.22816530324187517, |
| "learning_rate": 1.186046511627907e-05, |
| "loss": 0.2899, |
| "step": 2067 |
| }, |
| { |
| "epoch": 2.360524814603537, |
| "grad_norm": 0.2164548765906234, |
| "learning_rate": 1.1839323467230445e-05, |
| "loss": 0.2766, |
| "step": 2068 |
| }, |
| { |
| "epoch": 2.3616657159155734, |
| "grad_norm": 0.23513941860792906, |
| "learning_rate": 1.1818181818181819e-05, |
| "loss": 0.2841, |
| "step": 2069 |
| }, |
| { |
| "epoch": 2.36280661722761, |
| "grad_norm": 0.22235880511179723, |
| "learning_rate": 1.1797040169133193e-05, |
| "loss": 0.2732, |
| "step": 2070 |
| }, |
| { |
| "epoch": 2.3639475185396464, |
| "grad_norm": 0.2303519935227575, |
| "learning_rate": 1.1775898520084567e-05, |
| "loss": 0.3139, |
| "step": 2071 |
| }, |
| { |
| "epoch": 2.365088419851683, |
| "grad_norm": 0.20961595927626084, |
| "learning_rate": 1.1754756871035941e-05, |
| "loss": 0.2875, |
| "step": 2072 |
| }, |
| { |
| "epoch": 2.3662293211637193, |
| "grad_norm": 0.21603133983276732, |
| "learning_rate": 1.1733615221987315e-05, |
| "loss": 0.2884, |
| "step": 2073 |
| }, |
| { |
| "epoch": 2.367370222475756, |
| "grad_norm": 0.21596790812542116, |
| "learning_rate": 1.1712473572938689e-05, |
| "loss": 0.3083, |
| "step": 2074 |
| }, |
| { |
| "epoch": 2.3685111237877923, |
| "grad_norm": 0.25087015544580316, |
| "learning_rate": 1.1691331923890065e-05, |
| "loss": 0.2832, |
| "step": 2075 |
| }, |
| { |
| "epoch": 2.3696520250998288, |
| "grad_norm": 0.2349808227008684, |
| "learning_rate": 1.1670190274841439e-05, |
| "loss": 0.3084, |
| "step": 2076 |
| }, |
| { |
| "epoch": 2.3707929264118652, |
| "grad_norm": 0.21001164027904437, |
| "learning_rate": 1.1649048625792811e-05, |
| "loss": 0.2802, |
| "step": 2077 |
| }, |
| { |
| "epoch": 2.3719338277239017, |
| "grad_norm": 0.2474660750534766, |
| "learning_rate": 1.1627906976744187e-05, |
| "loss": 0.2921, |
| "step": 2078 |
| }, |
| { |
| "epoch": 2.373074729035938, |
| "grad_norm": 0.24463465462042744, |
| "learning_rate": 1.1606765327695561e-05, |
| "loss": 0.2937, |
| "step": 2079 |
| }, |
| { |
| "epoch": 2.374215630347975, |
| "grad_norm": 0.24342454656237167, |
| "learning_rate": 1.1585623678646935e-05, |
| "loss": 0.292, |
| "step": 2080 |
| }, |
| { |
| "epoch": 2.3753565316600116, |
| "grad_norm": 0.2463926019467057, |
| "learning_rate": 1.156448202959831e-05, |
| "loss": 0.2972, |
| "step": 2081 |
| }, |
| { |
| "epoch": 2.376497432972048, |
| "grad_norm": 0.23720071752094882, |
| "learning_rate": 1.1543340380549683e-05, |
| "loss": 0.2917, |
| "step": 2082 |
| }, |
| { |
| "epoch": 2.3776383342840846, |
| "grad_norm": 0.21546829355069966, |
| "learning_rate": 1.1522198731501057e-05, |
| "loss": 0.2934, |
| "step": 2083 |
| }, |
| { |
| "epoch": 2.378779235596121, |
| "grad_norm": 0.2518528409099341, |
| "learning_rate": 1.1501057082452431e-05, |
| "loss": 0.2957, |
| "step": 2084 |
| }, |
| { |
| "epoch": 2.3799201369081575, |
| "grad_norm": 0.23902483436916538, |
| "learning_rate": 1.1479915433403807e-05, |
| "loss": 0.2801, |
| "step": 2085 |
| }, |
| { |
| "epoch": 2.381061038220194, |
| "grad_norm": 0.22742879481333675, |
| "learning_rate": 1.145877378435518e-05, |
| "loss": 0.3089, |
| "step": 2086 |
| }, |
| { |
| "epoch": 2.3822019395322305, |
| "grad_norm": 0.2058956665211023, |
| "learning_rate": 1.1437632135306554e-05, |
| "loss": 0.2823, |
| "step": 2087 |
| }, |
| { |
| "epoch": 2.383342840844267, |
| "grad_norm": 0.22631377036694728, |
| "learning_rate": 1.141649048625793e-05, |
| "loss": 0.2775, |
| "step": 2088 |
| }, |
| { |
| "epoch": 2.3844837421563034, |
| "grad_norm": 0.2214324306878773, |
| "learning_rate": 1.1395348837209304e-05, |
| "loss": 0.2891, |
| "step": 2089 |
| }, |
| { |
| "epoch": 2.38562464346834, |
| "grad_norm": 0.23356628029155893, |
| "learning_rate": 1.1374207188160676e-05, |
| "loss": 0.2874, |
| "step": 2090 |
| }, |
| { |
| "epoch": 2.3867655447803764, |
| "grad_norm": 0.24243095398966297, |
| "learning_rate": 1.1353065539112052e-05, |
| "loss": 0.2934, |
| "step": 2091 |
| }, |
| { |
| "epoch": 2.387906446092413, |
| "grad_norm": 0.21508838940257163, |
| "learning_rate": 1.1331923890063426e-05, |
| "loss": 0.2913, |
| "step": 2092 |
| }, |
| { |
| "epoch": 2.3890473474044494, |
| "grad_norm": 0.24449508059101233, |
| "learning_rate": 1.13107822410148e-05, |
| "loss": 0.2932, |
| "step": 2093 |
| }, |
| { |
| "epoch": 2.390188248716486, |
| "grad_norm": 0.23075538723084565, |
| "learning_rate": 1.1289640591966174e-05, |
| "loss": 0.2946, |
| "step": 2094 |
| }, |
| { |
| "epoch": 2.3913291500285228, |
| "grad_norm": 0.21406924795031834, |
| "learning_rate": 1.1268498942917548e-05, |
| "loss": 0.2757, |
| "step": 2095 |
| }, |
| { |
| "epoch": 2.3924700513405592, |
| "grad_norm": 0.2137290364922405, |
| "learning_rate": 1.1247357293868922e-05, |
| "loss": 0.2976, |
| "step": 2096 |
| }, |
| { |
| "epoch": 2.3936109526525957, |
| "grad_norm": 0.23118716853112048, |
| "learning_rate": 1.1226215644820296e-05, |
| "loss": 0.2792, |
| "step": 2097 |
| }, |
| { |
| "epoch": 2.394751853964632, |
| "grad_norm": 0.23055275379491955, |
| "learning_rate": 1.1205073995771672e-05, |
| "loss": 0.2877, |
| "step": 2098 |
| }, |
| { |
| "epoch": 2.3958927552766687, |
| "grad_norm": 0.23762556799317847, |
| "learning_rate": 1.1183932346723044e-05, |
| "loss": 0.2909, |
| "step": 2099 |
| }, |
| { |
| "epoch": 2.397033656588705, |
| "grad_norm": 0.22760056957997504, |
| "learning_rate": 1.1162790697674418e-05, |
| "loss": 0.305, |
| "step": 2100 |
| }, |
| { |
| "epoch": 2.3981745579007416, |
| "grad_norm": 0.2164934127449452, |
| "learning_rate": 1.1141649048625794e-05, |
| "loss": 0.2836, |
| "step": 2101 |
| }, |
| { |
| "epoch": 2.399315459212778, |
| "grad_norm": 0.2369221361495161, |
| "learning_rate": 1.1120507399577168e-05, |
| "loss": 0.2883, |
| "step": 2102 |
| }, |
| { |
| "epoch": 2.4004563605248146, |
| "grad_norm": 0.21848646801955754, |
| "learning_rate": 1.109936575052854e-05, |
| "loss": 0.2785, |
| "step": 2103 |
| }, |
| { |
| "epoch": 2.401597261836851, |
| "grad_norm": 0.22794077248598318, |
| "learning_rate": 1.1078224101479916e-05, |
| "loss": 0.2817, |
| "step": 2104 |
| }, |
| { |
| "epoch": 2.4027381631488876, |
| "grad_norm": 0.23285629065646596, |
| "learning_rate": 1.105708245243129e-05, |
| "loss": 0.2886, |
| "step": 2105 |
| }, |
| { |
| "epoch": 2.403879064460924, |
| "grad_norm": 0.23290817753384455, |
| "learning_rate": 1.1035940803382664e-05, |
| "loss": 0.2806, |
| "step": 2106 |
| }, |
| { |
| "epoch": 2.4050199657729605, |
| "grad_norm": 0.23158661692280794, |
| "learning_rate": 1.1014799154334039e-05, |
| "loss": 0.3063, |
| "step": 2107 |
| }, |
| { |
| "epoch": 2.406160867084997, |
| "grad_norm": 0.20833408878949644, |
| "learning_rate": 1.0993657505285413e-05, |
| "loss": 0.2886, |
| "step": 2108 |
| }, |
| { |
| "epoch": 2.4073017683970335, |
| "grad_norm": 0.23083635889774368, |
| "learning_rate": 1.0972515856236787e-05, |
| "loss": 0.2869, |
| "step": 2109 |
| }, |
| { |
| "epoch": 2.4084426697090704, |
| "grad_norm": 0.22576281424305952, |
| "learning_rate": 1.095137420718816e-05, |
| "loss": 0.2854, |
| "step": 2110 |
| }, |
| { |
| "epoch": 2.4095835710211064, |
| "grad_norm": 0.22884347743104985, |
| "learning_rate": 1.0930232558139537e-05, |
| "loss": 0.2967, |
| "step": 2111 |
| }, |
| { |
| "epoch": 2.4107244723331434, |
| "grad_norm": 0.22415283034169417, |
| "learning_rate": 1.0909090909090909e-05, |
| "loss": 0.2913, |
| "step": 2112 |
| }, |
| { |
| "epoch": 2.41186537364518, |
| "grad_norm": 0.21200894004522153, |
| "learning_rate": 1.0887949260042283e-05, |
| "loss": 0.2888, |
| "step": 2113 |
| }, |
| { |
| "epoch": 2.4130062749572163, |
| "grad_norm": 0.20964684591494562, |
| "learning_rate": 1.0866807610993659e-05, |
| "loss": 0.2925, |
| "step": 2114 |
| }, |
| { |
| "epoch": 2.414147176269253, |
| "grad_norm": 0.24476217114724486, |
| "learning_rate": 1.0845665961945033e-05, |
| "loss": 0.2767, |
| "step": 2115 |
| }, |
| { |
| "epoch": 2.4152880775812893, |
| "grad_norm": 0.2505050259919476, |
| "learning_rate": 1.0824524312896405e-05, |
| "loss": 0.2974, |
| "step": 2116 |
| }, |
| { |
| "epoch": 2.4164289788933258, |
| "grad_norm": 0.22788028722264833, |
| "learning_rate": 1.0803382663847781e-05, |
| "loss": 0.2748, |
| "step": 2117 |
| }, |
| { |
| "epoch": 2.4175698802053622, |
| "grad_norm": 0.23594844937596657, |
| "learning_rate": 1.0782241014799155e-05, |
| "loss": 0.2773, |
| "step": 2118 |
| }, |
| { |
| "epoch": 2.4187107815173987, |
| "grad_norm": 0.22373608681920468, |
| "learning_rate": 1.0761099365750529e-05, |
| "loss": 0.2968, |
| "step": 2119 |
| }, |
| { |
| "epoch": 2.419851682829435, |
| "grad_norm": 0.20267836607609419, |
| "learning_rate": 1.0739957716701903e-05, |
| "loss": 0.2875, |
| "step": 2120 |
| }, |
| { |
| "epoch": 2.4209925841414717, |
| "grad_norm": 0.23304066668147824, |
| "learning_rate": 1.0718816067653277e-05, |
| "loss": 0.2784, |
| "step": 2121 |
| }, |
| { |
| "epoch": 2.422133485453508, |
| "grad_norm": 0.22386813115682758, |
| "learning_rate": 1.0697674418604651e-05, |
| "loss": 0.2895, |
| "step": 2122 |
| }, |
| { |
| "epoch": 2.4232743867655446, |
| "grad_norm": 0.23791481430326425, |
| "learning_rate": 1.0676532769556025e-05, |
| "loss": 0.2973, |
| "step": 2123 |
| }, |
| { |
| "epoch": 2.424415288077581, |
| "grad_norm": 0.22929508872335239, |
| "learning_rate": 1.0655391120507401e-05, |
| "loss": 0.2882, |
| "step": 2124 |
| }, |
| { |
| "epoch": 2.425556189389618, |
| "grad_norm": 0.2548891481570048, |
| "learning_rate": 1.0634249471458774e-05, |
| "loss": 0.2862, |
| "step": 2125 |
| }, |
| { |
| "epoch": 2.426697090701654, |
| "grad_norm": 0.21746741378702147, |
| "learning_rate": 1.0613107822410148e-05, |
| "loss": 0.2987, |
| "step": 2126 |
| }, |
| { |
| "epoch": 2.427837992013691, |
| "grad_norm": 0.21251661156113041, |
| "learning_rate": 1.0591966173361523e-05, |
| "loss": 0.2894, |
| "step": 2127 |
| }, |
| { |
| "epoch": 2.4289788933257275, |
| "grad_norm": 0.25780499036245796, |
| "learning_rate": 1.0570824524312897e-05, |
| "loss": 0.2856, |
| "step": 2128 |
| }, |
| { |
| "epoch": 2.430119794637764, |
| "grad_norm": 0.23654473246154903, |
| "learning_rate": 1.0549682875264272e-05, |
| "loss": 0.3007, |
| "step": 2129 |
| }, |
| { |
| "epoch": 2.4312606959498004, |
| "grad_norm": 0.23356461407264056, |
| "learning_rate": 1.0528541226215646e-05, |
| "loss": 0.3256, |
| "step": 2130 |
| }, |
| { |
| "epoch": 2.432401597261837, |
| "grad_norm": 0.24245974715235033, |
| "learning_rate": 1.050739957716702e-05, |
| "loss": 0.3106, |
| "step": 2131 |
| }, |
| { |
| "epoch": 2.4335424985738734, |
| "grad_norm": 0.24303489817253957, |
| "learning_rate": 1.0486257928118394e-05, |
| "loss": 0.2955, |
| "step": 2132 |
| }, |
| { |
| "epoch": 2.43468339988591, |
| "grad_norm": 0.24005732028686433, |
| "learning_rate": 1.0465116279069768e-05, |
| "loss": 0.2893, |
| "step": 2133 |
| }, |
| { |
| "epoch": 2.4358243011979464, |
| "grad_norm": 0.2121910038291667, |
| "learning_rate": 1.0443974630021142e-05, |
| "loss": 0.2945, |
| "step": 2134 |
| }, |
| { |
| "epoch": 2.436965202509983, |
| "grad_norm": 0.2387084030486226, |
| "learning_rate": 1.0422832980972516e-05, |
| "loss": 0.2903, |
| "step": 2135 |
| }, |
| { |
| "epoch": 2.4381061038220193, |
| "grad_norm": 0.23633958382284798, |
| "learning_rate": 1.040169133192389e-05, |
| "loss": 0.2996, |
| "step": 2136 |
| }, |
| { |
| "epoch": 2.439247005134056, |
| "grad_norm": 0.22037384346987707, |
| "learning_rate": 1.0380549682875266e-05, |
| "loss": 0.2839, |
| "step": 2137 |
| }, |
| { |
| "epoch": 2.4403879064460923, |
| "grad_norm": 0.21706680631361816, |
| "learning_rate": 1.0359408033826638e-05, |
| "loss": 0.2975, |
| "step": 2138 |
| }, |
| { |
| "epoch": 2.4415288077581287, |
| "grad_norm": 0.2111778175282013, |
| "learning_rate": 1.0338266384778012e-05, |
| "loss": 0.2729, |
| "step": 2139 |
| }, |
| { |
| "epoch": 2.4426697090701657, |
| "grad_norm": 0.2370242617846862, |
| "learning_rate": 1.0317124735729388e-05, |
| "loss": 0.2909, |
| "step": 2140 |
| }, |
| { |
| "epoch": 2.4438106103822017, |
| "grad_norm": 0.23853498865235603, |
| "learning_rate": 1.0295983086680762e-05, |
| "loss": 0.2932, |
| "step": 2141 |
| }, |
| { |
| "epoch": 2.4449515116942386, |
| "grad_norm": 0.2521182998898568, |
| "learning_rate": 1.0274841437632136e-05, |
| "loss": 0.3059, |
| "step": 2142 |
| }, |
| { |
| "epoch": 2.446092413006275, |
| "grad_norm": 0.23322546814866815, |
| "learning_rate": 1.025369978858351e-05, |
| "loss": 0.295, |
| "step": 2143 |
| }, |
| { |
| "epoch": 2.4472333143183116, |
| "grad_norm": 0.2321246083656867, |
| "learning_rate": 1.0232558139534884e-05, |
| "loss": 0.2916, |
| "step": 2144 |
| }, |
| { |
| "epoch": 2.448374215630348, |
| "grad_norm": 0.21203888608803423, |
| "learning_rate": 1.0211416490486258e-05, |
| "loss": 0.3057, |
| "step": 2145 |
| }, |
| { |
| "epoch": 2.4495151169423846, |
| "grad_norm": 0.24675500559447644, |
| "learning_rate": 1.0190274841437632e-05, |
| "loss": 0.3005, |
| "step": 2146 |
| }, |
| { |
| "epoch": 2.450656018254421, |
| "grad_norm": 0.2149188388562516, |
| "learning_rate": 1.0169133192389007e-05, |
| "loss": 0.2913, |
| "step": 2147 |
| }, |
| { |
| "epoch": 2.4517969195664575, |
| "grad_norm": 0.22455843922740962, |
| "learning_rate": 1.014799154334038e-05, |
| "loss": 0.2911, |
| "step": 2148 |
| }, |
| { |
| "epoch": 2.452937820878494, |
| "grad_norm": 0.2365688821621369, |
| "learning_rate": 1.0126849894291755e-05, |
| "loss": 0.3009, |
| "step": 2149 |
| }, |
| { |
| "epoch": 2.4540787221905305, |
| "grad_norm": 0.236174371463899, |
| "learning_rate": 1.010570824524313e-05, |
| "loss": 0.2963, |
| "step": 2150 |
| }, |
| { |
| "epoch": 2.455219623502567, |
| "grad_norm": 0.22166605922121535, |
| "learning_rate": 1.0084566596194505e-05, |
| "loss": 0.3, |
| "step": 2151 |
| }, |
| { |
| "epoch": 2.4563605248146034, |
| "grad_norm": 0.21578175432032534, |
| "learning_rate": 1.0063424947145877e-05, |
| "loss": 0.3036, |
| "step": 2152 |
| }, |
| { |
| "epoch": 2.45750142612664, |
| "grad_norm": 0.20735663165711912, |
| "learning_rate": 1.0042283298097253e-05, |
| "loss": 0.2903, |
| "step": 2153 |
| }, |
| { |
| "epoch": 2.4586423274386764, |
| "grad_norm": 0.21996192089426445, |
| "learning_rate": 1.0021141649048627e-05, |
| "loss": 0.2907, |
| "step": 2154 |
| }, |
| { |
| "epoch": 2.4597832287507133, |
| "grad_norm": 0.2458406652272878, |
| "learning_rate": 1e-05, |
| "loss": 0.2969, |
| "step": 2155 |
| }, |
| { |
| "epoch": 2.4609241300627493, |
| "grad_norm": 0.22647445428101715, |
| "learning_rate": 9.978858350951375e-06, |
| "loss": 0.3052, |
| "step": 2156 |
| }, |
| { |
| "epoch": 2.4620650313747863, |
| "grad_norm": 0.2219779270023204, |
| "learning_rate": 9.957716701902749e-06, |
| "loss": 0.2824, |
| "step": 2157 |
| }, |
| { |
| "epoch": 2.4632059326868228, |
| "grad_norm": 0.23756600468667605, |
| "learning_rate": 9.936575052854123e-06, |
| "loss": 0.3011, |
| "step": 2158 |
| }, |
| { |
| "epoch": 2.4643468339988592, |
| "grad_norm": 0.2272830607275422, |
| "learning_rate": 9.915433403805497e-06, |
| "loss": 0.2718, |
| "step": 2159 |
| }, |
| { |
| "epoch": 2.4654877353108957, |
| "grad_norm": 0.2145277871400877, |
| "learning_rate": 9.894291754756871e-06, |
| "loss": 0.2941, |
| "step": 2160 |
| }, |
| { |
| "epoch": 2.466628636622932, |
| "grad_norm": 0.2376090963043766, |
| "learning_rate": 9.873150105708245e-06, |
| "loss": 0.3067, |
| "step": 2161 |
| }, |
| { |
| "epoch": 2.4677695379349687, |
| "grad_norm": 0.24356987631727217, |
| "learning_rate": 9.85200845665962e-06, |
| "loss": 0.3022, |
| "step": 2162 |
| }, |
| { |
| "epoch": 2.468910439247005, |
| "grad_norm": 0.22299783197978315, |
| "learning_rate": 9.830866807610995e-06, |
| "loss": 0.2869, |
| "step": 2163 |
| }, |
| { |
| "epoch": 2.4700513405590416, |
| "grad_norm": 0.20939756640471135, |
| "learning_rate": 9.80972515856237e-06, |
| "loss": 0.3098, |
| "step": 2164 |
| }, |
| { |
| "epoch": 2.471192241871078, |
| "grad_norm": 0.21609710238107147, |
| "learning_rate": 9.788583509513742e-06, |
| "loss": 0.2952, |
| "step": 2165 |
| }, |
| { |
| "epoch": 2.4723331431831146, |
| "grad_norm": 0.2372720663818419, |
| "learning_rate": 9.767441860465117e-06, |
| "loss": 0.2898, |
| "step": 2166 |
| }, |
| { |
| "epoch": 2.473474044495151, |
| "grad_norm": 0.21121175197196843, |
| "learning_rate": 9.746300211416491e-06, |
| "loss": 0.3025, |
| "step": 2167 |
| }, |
| { |
| "epoch": 2.4746149458071875, |
| "grad_norm": 0.24198203548706435, |
| "learning_rate": 9.725158562367865e-06, |
| "loss": 0.296, |
| "step": 2168 |
| }, |
| { |
| "epoch": 2.475755847119224, |
| "grad_norm": 0.24194232553304587, |
| "learning_rate": 9.70401691331924e-06, |
| "loss": 0.2853, |
| "step": 2169 |
| }, |
| { |
| "epoch": 2.4768967484312605, |
| "grad_norm": 0.22799957616792252, |
| "learning_rate": 9.682875264270614e-06, |
| "loss": 0.2862, |
| "step": 2170 |
| }, |
| { |
| "epoch": 2.478037649743297, |
| "grad_norm": 0.227565930275027, |
| "learning_rate": 9.661733615221988e-06, |
| "loss": 0.2922, |
| "step": 2171 |
| }, |
| { |
| "epoch": 2.479178551055334, |
| "grad_norm": 0.21324291845071908, |
| "learning_rate": 9.640591966173362e-06, |
| "loss": 0.2957, |
| "step": 2172 |
| }, |
| { |
| "epoch": 2.4803194523673704, |
| "grad_norm": 0.24298054311441278, |
| "learning_rate": 9.619450317124738e-06, |
| "loss": 0.2781, |
| "step": 2173 |
| }, |
| { |
| "epoch": 2.481460353679407, |
| "grad_norm": 0.19336392182500792, |
| "learning_rate": 9.59830866807611e-06, |
| "loss": 0.2779, |
| "step": 2174 |
| }, |
| { |
| "epoch": 2.4826012549914434, |
| "grad_norm": 0.20345662355740218, |
| "learning_rate": 9.577167019027484e-06, |
| "loss": 0.2823, |
| "step": 2175 |
| }, |
| { |
| "epoch": 2.48374215630348, |
| "grad_norm": 0.20790668803942755, |
| "learning_rate": 9.55602536997886e-06, |
| "loss": 0.2947, |
| "step": 2176 |
| }, |
| { |
| "epoch": 2.4848830576155163, |
| "grad_norm": 0.21470421388061137, |
| "learning_rate": 9.534883720930234e-06, |
| "loss": 0.288, |
| "step": 2177 |
| }, |
| { |
| "epoch": 2.486023958927553, |
| "grad_norm": 0.2373711556108657, |
| "learning_rate": 9.513742071881606e-06, |
| "loss": 0.2871, |
| "step": 2178 |
| }, |
| { |
| "epoch": 2.4871648602395893, |
| "grad_norm": 0.2137356437142869, |
| "learning_rate": 9.492600422832982e-06, |
| "loss": 0.281, |
| "step": 2179 |
| }, |
| { |
| "epoch": 2.4883057615516257, |
| "grad_norm": 0.2160640542317364, |
| "learning_rate": 9.471458773784356e-06, |
| "loss": 0.2909, |
| "step": 2180 |
| }, |
| { |
| "epoch": 2.4894466628636622, |
| "grad_norm": 0.23681297605390203, |
| "learning_rate": 9.45031712473573e-06, |
| "loss": 0.2976, |
| "step": 2181 |
| }, |
| { |
| "epoch": 2.4905875641756987, |
| "grad_norm": 0.21619935440988564, |
| "learning_rate": 9.429175475687104e-06, |
| "loss": 0.2965, |
| "step": 2182 |
| }, |
| { |
| "epoch": 2.491728465487735, |
| "grad_norm": 0.22582877145805355, |
| "learning_rate": 9.408033826638478e-06, |
| "loss": 0.2884, |
| "step": 2183 |
| }, |
| { |
| "epoch": 2.4928693667997717, |
| "grad_norm": 0.24291200698676815, |
| "learning_rate": 9.386892177589852e-06, |
| "loss": 0.2833, |
| "step": 2184 |
| }, |
| { |
| "epoch": 2.494010268111808, |
| "grad_norm": 0.22462516903821608, |
| "learning_rate": 9.365750528541226e-06, |
| "loss": 0.2814, |
| "step": 2185 |
| }, |
| { |
| "epoch": 2.4951511694238446, |
| "grad_norm": 0.2150912847250357, |
| "learning_rate": 9.344608879492602e-06, |
| "loss": 0.2865, |
| "step": 2186 |
| }, |
| { |
| "epoch": 2.4962920707358816, |
| "grad_norm": 0.22046725631639622, |
| "learning_rate": 9.323467230443975e-06, |
| "loss": 0.3042, |
| "step": 2187 |
| }, |
| { |
| "epoch": 2.497432972047918, |
| "grad_norm": 0.21876472045649098, |
| "learning_rate": 9.302325581395349e-06, |
| "loss": 0.3086, |
| "step": 2188 |
| }, |
| { |
| "epoch": 2.4985738733599545, |
| "grad_norm": 0.22155910910209872, |
| "learning_rate": 9.281183932346724e-06, |
| "loss": 0.2913, |
| "step": 2189 |
| }, |
| { |
| "epoch": 2.499714774671991, |
| "grad_norm": 0.21640048073756032, |
| "learning_rate": 9.260042283298098e-06, |
| "loss": 0.2945, |
| "step": 2190 |
| }, |
| { |
| "epoch": 2.5008556759840275, |
| "grad_norm": 0.22250078526965852, |
| "learning_rate": 9.23890063424947e-06, |
| "loss": 0.3035, |
| "step": 2191 |
| }, |
| { |
| "epoch": 2.501996577296064, |
| "grad_norm": 0.24474339500328393, |
| "learning_rate": 9.217758985200847e-06, |
| "loss": 0.3127, |
| "step": 2192 |
| }, |
| { |
| "epoch": 2.5031374786081004, |
| "grad_norm": 0.251308976662145, |
| "learning_rate": 9.19661733615222e-06, |
| "loss": 0.2875, |
| "step": 2193 |
| }, |
| { |
| "epoch": 2.504278379920137, |
| "grad_norm": 0.22794637405036033, |
| "learning_rate": 9.175475687103595e-06, |
| "loss": 0.2913, |
| "step": 2194 |
| }, |
| { |
| "epoch": 2.5054192812321734, |
| "grad_norm": 0.259791985825867, |
| "learning_rate": 9.154334038054969e-06, |
| "loss": 0.3014, |
| "step": 2195 |
| }, |
| { |
| "epoch": 2.50656018254421, |
| "grad_norm": 0.22447975268191678, |
| "learning_rate": 9.133192389006343e-06, |
| "loss": 0.2852, |
| "step": 2196 |
| }, |
| { |
| "epoch": 2.5077010838562463, |
| "grad_norm": 0.2339651033453112, |
| "learning_rate": 9.112050739957717e-06, |
| "loss": 0.2981, |
| "step": 2197 |
| }, |
| { |
| "epoch": 2.508841985168283, |
| "grad_norm": 0.2413662095243327, |
| "learning_rate": 9.090909090909091e-06, |
| "loss": 0.297, |
| "step": 2198 |
| }, |
| { |
| "epoch": 2.5099828864803193, |
| "grad_norm": 0.25405342039518297, |
| "learning_rate": 9.069767441860467e-06, |
| "loss": 0.2871, |
| "step": 2199 |
| }, |
| { |
| "epoch": 2.5111237877923562, |
| "grad_norm": 0.23920425875866733, |
| "learning_rate": 9.04862579281184e-06, |
| "loss": 0.2875, |
| "step": 2200 |
| }, |
| { |
| "epoch": 2.5122646891043923, |
| "grad_norm": 0.2051445130760935, |
| "learning_rate": 9.027484143763213e-06, |
| "loss": 0.2758, |
| "step": 2201 |
| }, |
| { |
| "epoch": 2.513405590416429, |
| "grad_norm": 0.20124858870683618, |
| "learning_rate": 9.006342494714589e-06, |
| "loss": 0.2972, |
| "step": 2202 |
| }, |
| { |
| "epoch": 2.5145464917284652, |
| "grad_norm": 0.23302207610442713, |
| "learning_rate": 8.985200845665963e-06, |
| "loss": 0.2978, |
| "step": 2203 |
| }, |
| { |
| "epoch": 2.515687393040502, |
| "grad_norm": 0.21344843871744304, |
| "learning_rate": 8.964059196617335e-06, |
| "loss": 0.301, |
| "step": 2204 |
| }, |
| { |
| "epoch": 2.5168282943525386, |
| "grad_norm": 0.2407781938188346, |
| "learning_rate": 8.942917547568711e-06, |
| "loss": 0.281, |
| "step": 2205 |
| }, |
| { |
| "epoch": 2.517969195664575, |
| "grad_norm": 0.20516201626203046, |
| "learning_rate": 8.921775898520085e-06, |
| "loss": 0.3002, |
| "step": 2206 |
| }, |
| { |
| "epoch": 2.5191100969766116, |
| "grad_norm": 0.2104381049294445, |
| "learning_rate": 8.90063424947146e-06, |
| "loss": 0.2979, |
| "step": 2207 |
| }, |
| { |
| "epoch": 2.520250998288648, |
| "grad_norm": 0.22424201493799636, |
| "learning_rate": 8.879492600422833e-06, |
| "loss": 0.2845, |
| "step": 2208 |
| }, |
| { |
| "epoch": 2.5213918996006845, |
| "grad_norm": 0.23906885464829122, |
| "learning_rate": 8.858350951374208e-06, |
| "loss": 0.3064, |
| "step": 2209 |
| }, |
| { |
| "epoch": 2.522532800912721, |
| "grad_norm": 0.21700945798058027, |
| "learning_rate": 8.837209302325582e-06, |
| "loss": 0.2946, |
| "step": 2210 |
| }, |
| { |
| "epoch": 2.5236737022247575, |
| "grad_norm": 0.20269201104999035, |
| "learning_rate": 8.816067653276956e-06, |
| "loss": 0.2918, |
| "step": 2211 |
| }, |
| { |
| "epoch": 2.524814603536794, |
| "grad_norm": 0.1983271277482634, |
| "learning_rate": 8.794926004228331e-06, |
| "loss": 0.2823, |
| "step": 2212 |
| }, |
| { |
| "epoch": 2.5259555048488305, |
| "grad_norm": 0.2071653143990948, |
| "learning_rate": 8.773784355179704e-06, |
| "loss": 0.2987, |
| "step": 2213 |
| }, |
| { |
| "epoch": 2.527096406160867, |
| "grad_norm": 0.21535784337570055, |
| "learning_rate": 8.752642706131078e-06, |
| "loss": 0.2962, |
| "step": 2214 |
| }, |
| { |
| "epoch": 2.5282373074729034, |
| "grad_norm": 0.2296330117605354, |
| "learning_rate": 8.731501057082454e-06, |
| "loss": 0.3056, |
| "step": 2215 |
| }, |
| { |
| "epoch": 2.52937820878494, |
| "grad_norm": 0.2271558756128669, |
| "learning_rate": 8.710359408033828e-06, |
| "loss": 0.2892, |
| "step": 2216 |
| }, |
| { |
| "epoch": 2.530519110096977, |
| "grad_norm": 0.2105137344073212, |
| "learning_rate": 8.6892177589852e-06, |
| "loss": 0.2947, |
| "step": 2217 |
| }, |
| { |
| "epoch": 2.531660011409013, |
| "grad_norm": 0.2184000564503314, |
| "learning_rate": 8.668076109936576e-06, |
| "loss": 0.2809, |
| "step": 2218 |
| }, |
| { |
| "epoch": 2.53280091272105, |
| "grad_norm": 0.2263050553315689, |
| "learning_rate": 8.64693446088795e-06, |
| "loss": 0.2788, |
| "step": 2219 |
| }, |
| { |
| "epoch": 2.5339418140330863, |
| "grad_norm": 0.21604440983648726, |
| "learning_rate": 8.625792811839324e-06, |
| "loss": 0.2936, |
| "step": 2220 |
| }, |
| { |
| "epoch": 2.5350827153451228, |
| "grad_norm": 0.21737368520487446, |
| "learning_rate": 8.604651162790698e-06, |
| "loss": 0.2961, |
| "step": 2221 |
| }, |
| { |
| "epoch": 2.5362236166571592, |
| "grad_norm": 0.21773148010014565, |
| "learning_rate": 8.583509513742072e-06, |
| "loss": 0.2884, |
| "step": 2222 |
| }, |
| { |
| "epoch": 2.5373645179691957, |
| "grad_norm": 0.22892926444409067, |
| "learning_rate": 8.562367864693446e-06, |
| "loss": 0.3067, |
| "step": 2223 |
| }, |
| { |
| "epoch": 2.538505419281232, |
| "grad_norm": 0.21226778252034528, |
| "learning_rate": 8.54122621564482e-06, |
| "loss": 0.2936, |
| "step": 2224 |
| }, |
| { |
| "epoch": 2.5396463205932687, |
| "grad_norm": 0.2152635025793325, |
| "learning_rate": 8.520084566596196e-06, |
| "loss": 0.3102, |
| "step": 2225 |
| }, |
| { |
| "epoch": 2.540787221905305, |
| "grad_norm": 0.21770436762764223, |
| "learning_rate": 8.498942917547568e-06, |
| "loss": 0.2983, |
| "step": 2226 |
| }, |
| { |
| "epoch": 2.5419281232173416, |
| "grad_norm": 0.22410803091422948, |
| "learning_rate": 8.477801268498943e-06, |
| "loss": 0.2982, |
| "step": 2227 |
| }, |
| { |
| "epoch": 2.543069024529378, |
| "grad_norm": 0.20713974718886613, |
| "learning_rate": 8.456659619450318e-06, |
| "loss": 0.2785, |
| "step": 2228 |
| }, |
| { |
| "epoch": 2.5442099258414146, |
| "grad_norm": 0.21352924445956112, |
| "learning_rate": 8.435517970401692e-06, |
| "loss": 0.2773, |
| "step": 2229 |
| }, |
| { |
| "epoch": 2.545350827153451, |
| "grad_norm": 0.20187171844616342, |
| "learning_rate": 8.414376321353066e-06, |
| "loss": 0.2995, |
| "step": 2230 |
| }, |
| { |
| "epoch": 2.5464917284654875, |
| "grad_norm": 0.20941049925190608, |
| "learning_rate": 8.39323467230444e-06, |
| "loss": 0.2784, |
| "step": 2231 |
| }, |
| { |
| "epoch": 2.5476326297775245, |
| "grad_norm": 0.20180411877439458, |
| "learning_rate": 8.372093023255815e-06, |
| "loss": 0.2773, |
| "step": 2232 |
| }, |
| { |
| "epoch": 2.5487735310895605, |
| "grad_norm": 0.22093030719312548, |
| "learning_rate": 8.350951374207189e-06, |
| "loss": 0.3012, |
| "step": 2233 |
| }, |
| { |
| "epoch": 2.5499144324015974, |
| "grad_norm": 0.22523096523898684, |
| "learning_rate": 8.329809725158563e-06, |
| "loss": 0.2922, |
| "step": 2234 |
| }, |
| { |
| "epoch": 2.551055333713634, |
| "grad_norm": 0.22343591900578977, |
| "learning_rate": 8.308668076109937e-06, |
| "loss": 0.3093, |
| "step": 2235 |
| }, |
| { |
| "epoch": 2.5521962350256704, |
| "grad_norm": 0.2161498369058084, |
| "learning_rate": 8.287526427061311e-06, |
| "loss": 0.2969, |
| "step": 2236 |
| }, |
| { |
| "epoch": 2.553337136337707, |
| "grad_norm": 0.22562631086956914, |
| "learning_rate": 8.266384778012685e-06, |
| "loss": 0.3016, |
| "step": 2237 |
| }, |
| { |
| "epoch": 2.5544780376497433, |
| "grad_norm": 0.2369459126707438, |
| "learning_rate": 8.24524312896406e-06, |
| "loss": 0.2904, |
| "step": 2238 |
| }, |
| { |
| "epoch": 2.55561893896178, |
| "grad_norm": 0.22448499213847747, |
| "learning_rate": 8.224101479915433e-06, |
| "loss": 0.2853, |
| "step": 2239 |
| }, |
| { |
| "epoch": 2.5567598402738163, |
| "grad_norm": 0.21029356493483137, |
| "learning_rate": 8.202959830866807e-06, |
| "loss": 0.2843, |
| "step": 2240 |
| }, |
| { |
| "epoch": 2.557900741585853, |
| "grad_norm": 0.22200835961714357, |
| "learning_rate": 8.181818181818183e-06, |
| "loss": 0.3057, |
| "step": 2241 |
| }, |
| { |
| "epoch": 2.5590416428978893, |
| "grad_norm": 0.23044204049657901, |
| "learning_rate": 8.160676532769557e-06, |
| "loss": 0.2962, |
| "step": 2242 |
| }, |
| { |
| "epoch": 2.5601825442099257, |
| "grad_norm": 0.20553292626259104, |
| "learning_rate": 8.139534883720931e-06, |
| "loss": 0.2745, |
| "step": 2243 |
| }, |
| { |
| "epoch": 2.5613234455219622, |
| "grad_norm": 0.20915386111446832, |
| "learning_rate": 8.118393234672305e-06, |
| "loss": 0.2958, |
| "step": 2244 |
| }, |
| { |
| "epoch": 2.5624643468339987, |
| "grad_norm": 0.22334896184814618, |
| "learning_rate": 8.09725158562368e-06, |
| "loss": 0.2972, |
| "step": 2245 |
| }, |
| { |
| "epoch": 2.563605248146035, |
| "grad_norm": 0.23809497758629466, |
| "learning_rate": 8.076109936575053e-06, |
| "loss": 0.2942, |
| "step": 2246 |
| }, |
| { |
| "epoch": 2.564746149458072, |
| "grad_norm": 0.24069339582590796, |
| "learning_rate": 8.054968287526427e-06, |
| "loss": 0.2918, |
| "step": 2247 |
| }, |
| { |
| "epoch": 2.565887050770108, |
| "grad_norm": 0.23602440927239052, |
| "learning_rate": 8.033826638477801e-06, |
| "loss": 0.3028, |
| "step": 2248 |
| }, |
| { |
| "epoch": 2.567027952082145, |
| "grad_norm": 0.2171746495174681, |
| "learning_rate": 8.012684989429176e-06, |
| "loss": 0.2916, |
| "step": 2249 |
| }, |
| { |
| "epoch": 2.5681688533941815, |
| "grad_norm": 0.2068130866348627, |
| "learning_rate": 7.99154334038055e-06, |
| "loss": 0.2745, |
| "step": 2250 |
| }, |
| { |
| "epoch": 2.569309754706218, |
| "grad_norm": 0.2177101252393696, |
| "learning_rate": 7.970401691331925e-06, |
| "loss": 0.2975, |
| "step": 2251 |
| }, |
| { |
| "epoch": 2.5704506560182545, |
| "grad_norm": 0.2735012912904912, |
| "learning_rate": 7.9492600422833e-06, |
| "loss": 0.2926, |
| "step": 2252 |
| }, |
| { |
| "epoch": 2.571591557330291, |
| "grad_norm": 0.20464785110092396, |
| "learning_rate": 7.928118393234672e-06, |
| "loss": 0.2927, |
| "step": 2253 |
| }, |
| { |
| "epoch": 2.5727324586423275, |
| "grad_norm": 0.21455342420906462, |
| "learning_rate": 7.906976744186048e-06, |
| "loss": 0.3056, |
| "step": 2254 |
| }, |
| { |
| "epoch": 2.573873359954364, |
| "grad_norm": 0.22559564092009435, |
| "learning_rate": 7.885835095137422e-06, |
| "loss": 0.2865, |
| "step": 2255 |
| }, |
| { |
| "epoch": 2.5750142612664004, |
| "grad_norm": 0.2294901793229017, |
| "learning_rate": 7.864693446088796e-06, |
| "loss": 0.2853, |
| "step": 2256 |
| }, |
| { |
| "epoch": 2.576155162578437, |
| "grad_norm": 0.2147093578647149, |
| "learning_rate": 7.843551797040168e-06, |
| "loss": 0.2936, |
| "step": 2257 |
| }, |
| { |
| "epoch": 2.5772960638904734, |
| "grad_norm": 0.2031963357013273, |
| "learning_rate": 7.822410147991544e-06, |
| "loss": 0.2852, |
| "step": 2258 |
| }, |
| { |
| "epoch": 2.57843696520251, |
| "grad_norm": 0.23046080341522127, |
| "learning_rate": 7.801268498942918e-06, |
| "loss": 0.2934, |
| "step": 2259 |
| }, |
| { |
| "epoch": 2.5795778665145463, |
| "grad_norm": 0.2273970672894729, |
| "learning_rate": 7.780126849894292e-06, |
| "loss": 0.3049, |
| "step": 2260 |
| }, |
| { |
| "epoch": 2.580718767826583, |
| "grad_norm": 0.20061748470118457, |
| "learning_rate": 7.758985200845666e-06, |
| "loss": 0.2856, |
| "step": 2261 |
| }, |
| { |
| "epoch": 2.5818596691386198, |
| "grad_norm": 0.22065013702452976, |
| "learning_rate": 7.73784355179704e-06, |
| "loss": 0.3123, |
| "step": 2262 |
| }, |
| { |
| "epoch": 2.583000570450656, |
| "grad_norm": 0.22822659923283384, |
| "learning_rate": 7.716701902748414e-06, |
| "loss": 0.2822, |
| "step": 2263 |
| }, |
| { |
| "epoch": 2.5841414717626927, |
| "grad_norm": 0.21413158459854104, |
| "learning_rate": 7.69556025369979e-06, |
| "loss": 0.2976, |
| "step": 2264 |
| }, |
| { |
| "epoch": 2.585282373074729, |
| "grad_norm": 0.21690708574915987, |
| "learning_rate": 7.674418604651164e-06, |
| "loss": 0.3013, |
| "step": 2265 |
| }, |
| { |
| "epoch": 2.5864232743867657, |
| "grad_norm": 0.20274340989060452, |
| "learning_rate": 7.653276955602536e-06, |
| "loss": 0.2986, |
| "step": 2266 |
| }, |
| { |
| "epoch": 2.587564175698802, |
| "grad_norm": 0.22857150975728216, |
| "learning_rate": 7.632135306553912e-06, |
| "loss": 0.2988, |
| "step": 2267 |
| }, |
| { |
| "epoch": 2.5887050770108386, |
| "grad_norm": 0.24680265809712104, |
| "learning_rate": 7.610993657505286e-06, |
| "loss": 0.3058, |
| "step": 2268 |
| }, |
| { |
| "epoch": 2.589845978322875, |
| "grad_norm": 0.21717337738812287, |
| "learning_rate": 7.58985200845666e-06, |
| "loss": 0.2759, |
| "step": 2269 |
| }, |
| { |
| "epoch": 2.5909868796349116, |
| "grad_norm": 0.1956386453417279, |
| "learning_rate": 7.568710359408034e-06, |
| "loss": 0.2997, |
| "step": 2270 |
| }, |
| { |
| "epoch": 2.592127780946948, |
| "grad_norm": 0.21352206012373648, |
| "learning_rate": 7.547568710359408e-06, |
| "loss": 0.2947, |
| "step": 2271 |
| }, |
| { |
| "epoch": 2.5932686822589845, |
| "grad_norm": 0.21869565704668345, |
| "learning_rate": 7.526427061310783e-06, |
| "loss": 0.2867, |
| "step": 2272 |
| }, |
| { |
| "epoch": 2.594409583571021, |
| "grad_norm": 0.2199935791732718, |
| "learning_rate": 7.5052854122621575e-06, |
| "loss": 0.286, |
| "step": 2273 |
| }, |
| { |
| "epoch": 2.5955504848830575, |
| "grad_norm": 0.2077071830689117, |
| "learning_rate": 7.484143763213532e-06, |
| "loss": 0.2862, |
| "step": 2274 |
| }, |
| { |
| "epoch": 2.596691386195094, |
| "grad_norm": 0.19250504102605728, |
| "learning_rate": 7.463002114164905e-06, |
| "loss": 0.272, |
| "step": 2275 |
| }, |
| { |
| "epoch": 2.5978322875071305, |
| "grad_norm": 0.2543282781990224, |
| "learning_rate": 7.44186046511628e-06, |
| "loss": 0.2899, |
| "step": 2276 |
| }, |
| { |
| "epoch": 2.5989731888191674, |
| "grad_norm": 0.2227953001922955, |
| "learning_rate": 7.420718816067654e-06, |
| "loss": 0.3072, |
| "step": 2277 |
| }, |
| { |
| "epoch": 2.6001140901312034, |
| "grad_norm": 0.2119324966397027, |
| "learning_rate": 7.399577167019029e-06, |
| "loss": 0.2818, |
| "step": 2278 |
| }, |
| { |
| "epoch": 2.6012549914432403, |
| "grad_norm": 0.21825598326524892, |
| "learning_rate": 7.378435517970401e-06, |
| "loss": 0.307, |
| "step": 2279 |
| }, |
| { |
| "epoch": 2.602395892755277, |
| "grad_norm": 0.19780618819467255, |
| "learning_rate": 7.357293868921776e-06, |
| "loss": 0.2719, |
| "step": 2280 |
| }, |
| { |
| "epoch": 2.6035367940673133, |
| "grad_norm": 0.20187862019079622, |
| "learning_rate": 7.336152219873151e-06, |
| "loss": 0.3001, |
| "step": 2281 |
| }, |
| { |
| "epoch": 2.60467769537935, |
| "grad_norm": 0.1957602418547089, |
| "learning_rate": 7.315010570824525e-06, |
| "loss": 0.2893, |
| "step": 2282 |
| }, |
| { |
| "epoch": 2.6058185966913863, |
| "grad_norm": 0.32010591246894465, |
| "learning_rate": 7.293868921775898e-06, |
| "loss": 0.3003, |
| "step": 2283 |
| }, |
| { |
| "epoch": 2.6069594980034227, |
| "grad_norm": 0.21826237399726542, |
| "learning_rate": 7.272727272727272e-06, |
| "loss": 0.2889, |
| "step": 2284 |
| }, |
| { |
| "epoch": 2.6081003993154592, |
| "grad_norm": 0.2106352951066305, |
| "learning_rate": 7.251585623678647e-06, |
| "loss": 0.2827, |
| "step": 2285 |
| }, |
| { |
| "epoch": 2.6092413006274957, |
| "grad_norm": 0.2078174915090593, |
| "learning_rate": 7.230443974630022e-06, |
| "loss": 0.2914, |
| "step": 2286 |
| }, |
| { |
| "epoch": 2.610382201939532, |
| "grad_norm": 0.22396488141909118, |
| "learning_rate": 7.209302325581396e-06, |
| "loss": 0.3042, |
| "step": 2287 |
| }, |
| { |
| "epoch": 2.6115231032515687, |
| "grad_norm": 0.2290191223253906, |
| "learning_rate": 7.1881606765327695e-06, |
| "loss": 0.2893, |
| "step": 2288 |
| }, |
| { |
| "epoch": 2.612664004563605, |
| "grad_norm": 0.21481339577130223, |
| "learning_rate": 7.1670190274841435e-06, |
| "loss": 0.278, |
| "step": 2289 |
| }, |
| { |
| "epoch": 2.6138049058756416, |
| "grad_norm": 0.22863107764420157, |
| "learning_rate": 7.1458773784355185e-06, |
| "loss": 0.2937, |
| "step": 2290 |
| }, |
| { |
| "epoch": 2.614945807187678, |
| "grad_norm": 0.20446333945789158, |
| "learning_rate": 7.124735729386893e-06, |
| "loss": 0.2803, |
| "step": 2291 |
| }, |
| { |
| "epoch": 2.616086708499715, |
| "grad_norm": 0.2139924313184036, |
| "learning_rate": 7.103594080338266e-06, |
| "loss": 0.2973, |
| "step": 2292 |
| }, |
| { |
| "epoch": 2.617227609811751, |
| "grad_norm": 0.2272213739485047, |
| "learning_rate": 7.082452431289641e-06, |
| "loss": 0.2915, |
| "step": 2293 |
| }, |
| { |
| "epoch": 2.618368511123788, |
| "grad_norm": 0.23274850175302836, |
| "learning_rate": 7.061310782241015e-06, |
| "loss": 0.2871, |
| "step": 2294 |
| }, |
| { |
| "epoch": 2.619509412435824, |
| "grad_norm": 0.2200811712932776, |
| "learning_rate": 7.04016913319239e-06, |
| "loss": 0.2939, |
| "step": 2295 |
| }, |
| { |
| "epoch": 2.620650313747861, |
| "grad_norm": 0.21688950824573724, |
| "learning_rate": 7.019027484143765e-06, |
| "loss": 0.2981, |
| "step": 2296 |
| }, |
| { |
| "epoch": 2.6217912150598974, |
| "grad_norm": 0.21223187386382966, |
| "learning_rate": 6.997885835095137e-06, |
| "loss": 0.2898, |
| "step": 2297 |
| }, |
| { |
| "epoch": 2.622932116371934, |
| "grad_norm": 0.21679812155701778, |
| "learning_rate": 6.976744186046512e-06, |
| "loss": 0.303, |
| "step": 2298 |
| }, |
| { |
| "epoch": 2.6240730176839704, |
| "grad_norm": 0.213840792985224, |
| "learning_rate": 6.955602536997887e-06, |
| "loss": 0.2905, |
| "step": 2299 |
| }, |
| { |
| "epoch": 2.625213918996007, |
| "grad_norm": 0.2324092568365301, |
| "learning_rate": 6.934460887949261e-06, |
| "loss": 0.3023, |
| "step": 2300 |
| }, |
| { |
| "epoch": 2.6263548203080433, |
| "grad_norm": 0.21224593855037074, |
| "learning_rate": 6.913319238900634e-06, |
| "loss": 0.2786, |
| "step": 2301 |
| }, |
| { |
| "epoch": 2.62749572162008, |
| "grad_norm": 0.2191741025335595, |
| "learning_rate": 6.892177589852008e-06, |
| "loss": 0.2915, |
| "step": 2302 |
| }, |
| { |
| "epoch": 2.6286366229321163, |
| "grad_norm": 0.2050190605945917, |
| "learning_rate": 6.871035940803383e-06, |
| "loss": 0.3005, |
| "step": 2303 |
| }, |
| { |
| "epoch": 2.629777524244153, |
| "grad_norm": 0.19422534480342057, |
| "learning_rate": 6.849894291754758e-06, |
| "loss": 0.2813, |
| "step": 2304 |
| }, |
| { |
| "epoch": 2.6309184255561893, |
| "grad_norm": 0.2024986617966428, |
| "learning_rate": 6.82875264270613e-06, |
| "loss": 0.2931, |
| "step": 2305 |
| }, |
| { |
| "epoch": 2.6320593268682257, |
| "grad_norm": 0.2016325798838157, |
| "learning_rate": 6.807610993657505e-06, |
| "loss": 0.2757, |
| "step": 2306 |
| }, |
| { |
| "epoch": 2.6332002281802627, |
| "grad_norm": 0.19900248841956134, |
| "learning_rate": 6.786469344608879e-06, |
| "loss": 0.2731, |
| "step": 2307 |
| }, |
| { |
| "epoch": 2.6343411294922987, |
| "grad_norm": 0.22345370720066865, |
| "learning_rate": 6.765327695560254e-06, |
| "loss": 0.2915, |
| "step": 2308 |
| }, |
| { |
| "epoch": 2.6354820308043356, |
| "grad_norm": 0.22172722286619925, |
| "learning_rate": 6.744186046511629e-06, |
| "loss": 0.2763, |
| "step": 2309 |
| }, |
| { |
| "epoch": 2.6366229321163717, |
| "grad_norm": 0.20927209846966097, |
| "learning_rate": 6.723044397463002e-06, |
| "loss": 0.3024, |
| "step": 2310 |
| }, |
| { |
| "epoch": 2.6377638334284086, |
| "grad_norm": 0.1995585604700486, |
| "learning_rate": 6.7019027484143765e-06, |
| "loss": 0.2986, |
| "step": 2311 |
| }, |
| { |
| "epoch": 2.638904734740445, |
| "grad_norm": 0.24437397232747674, |
| "learning_rate": 6.680761099365751e-06, |
| "loss": 0.2906, |
| "step": 2312 |
| }, |
| { |
| "epoch": 2.6400456360524815, |
| "grad_norm": 0.23672311901146992, |
| "learning_rate": 6.6596194503171255e-06, |
| "loss": 0.3059, |
| "step": 2313 |
| }, |
| { |
| "epoch": 2.641186537364518, |
| "grad_norm": 0.20833196620847985, |
| "learning_rate": 6.638477801268499e-06, |
| "loss": 0.2814, |
| "step": 2314 |
| }, |
| { |
| "epoch": 2.6423274386765545, |
| "grad_norm": 0.21171378116649958, |
| "learning_rate": 6.617336152219873e-06, |
| "loss": 0.2923, |
| "step": 2315 |
| }, |
| { |
| "epoch": 2.643468339988591, |
| "grad_norm": 0.21992329243569908, |
| "learning_rate": 6.596194503171248e-06, |
| "loss": 0.293, |
| "step": 2316 |
| }, |
| { |
| "epoch": 2.6446092413006275, |
| "grad_norm": 0.19236579441830645, |
| "learning_rate": 6.575052854122622e-06, |
| "loss": 0.2882, |
| "step": 2317 |
| }, |
| { |
| "epoch": 2.645750142612664, |
| "grad_norm": 0.21732952448802234, |
| "learning_rate": 6.553911205073997e-06, |
| "loss": 0.3002, |
| "step": 2318 |
| }, |
| { |
| "epoch": 2.6468910439247004, |
| "grad_norm": 0.21930948530716543, |
| "learning_rate": 6.53276955602537e-06, |
| "loss": 0.2837, |
| "step": 2319 |
| }, |
| { |
| "epoch": 2.648031945236737, |
| "grad_norm": 0.20276319027259337, |
| "learning_rate": 6.511627906976744e-06, |
| "loss": 0.2941, |
| "step": 2320 |
| }, |
| { |
| "epoch": 2.6491728465487734, |
| "grad_norm": 0.20664191060677842, |
| "learning_rate": 6.490486257928119e-06, |
| "loss": 0.2973, |
| "step": 2321 |
| }, |
| { |
| "epoch": 2.6503137478608103, |
| "grad_norm": 0.20686816856995033, |
| "learning_rate": 6.469344608879494e-06, |
| "loss": 0.2876, |
| "step": 2322 |
| }, |
| { |
| "epoch": 2.6514546491728463, |
| "grad_norm": 0.21791289664974559, |
| "learning_rate": 6.448202959830866e-06, |
| "loss": 0.2938, |
| "step": 2323 |
| }, |
| { |
| "epoch": 2.6525955504848833, |
| "grad_norm": 0.20835972672843145, |
| "learning_rate": 6.427061310782241e-06, |
| "loss": 0.2965, |
| "step": 2324 |
| }, |
| { |
| "epoch": 2.6537364517969193, |
| "grad_norm": 0.19895481748874236, |
| "learning_rate": 6.405919661733615e-06, |
| "loss": 0.2878, |
| "step": 2325 |
| }, |
| { |
| "epoch": 2.6548773531089562, |
| "grad_norm": 0.2061114756476564, |
| "learning_rate": 6.38477801268499e-06, |
| "loss": 0.2883, |
| "step": 2326 |
| }, |
| { |
| "epoch": 2.6560182544209927, |
| "grad_norm": 0.19673018088924696, |
| "learning_rate": 6.363636363636363e-06, |
| "loss": 0.2982, |
| "step": 2327 |
| }, |
| { |
| "epoch": 2.657159155733029, |
| "grad_norm": 0.20789548693413587, |
| "learning_rate": 6.3424947145877375e-06, |
| "loss": 0.283, |
| "step": 2328 |
| }, |
| { |
| "epoch": 2.6583000570450657, |
| "grad_norm": 0.20423004517268986, |
| "learning_rate": 6.321353065539112e-06, |
| "loss": 0.2985, |
| "step": 2329 |
| }, |
| { |
| "epoch": 2.659440958357102, |
| "grad_norm": 0.21369437414101625, |
| "learning_rate": 6.3002114164904865e-06, |
| "loss": 0.2951, |
| "step": 2330 |
| }, |
| { |
| "epoch": 2.6605818596691386, |
| "grad_norm": 0.2024038323173226, |
| "learning_rate": 6.279069767441861e-06, |
| "loss": 0.2983, |
| "step": 2331 |
| }, |
| { |
| "epoch": 2.661722760981175, |
| "grad_norm": 0.22749689909229562, |
| "learning_rate": 6.257928118393235e-06, |
| "loss": 0.29, |
| "step": 2332 |
| }, |
| { |
| "epoch": 2.6628636622932116, |
| "grad_norm": 0.2054459051238387, |
| "learning_rate": 6.236786469344609e-06, |
| "loss": 0.2975, |
| "step": 2333 |
| }, |
| { |
| "epoch": 2.664004563605248, |
| "grad_norm": 0.22028377224426043, |
| "learning_rate": 6.215644820295984e-06, |
| "loss": 0.2935, |
| "step": 2334 |
| }, |
| { |
| "epoch": 2.6651454649172845, |
| "grad_norm": 0.21504971194559508, |
| "learning_rate": 6.194503171247358e-06, |
| "loss": 0.3184, |
| "step": 2335 |
| }, |
| { |
| "epoch": 2.666286366229321, |
| "grad_norm": 0.2453904967082021, |
| "learning_rate": 6.173361522198732e-06, |
| "loss": 0.286, |
| "step": 2336 |
| }, |
| { |
| "epoch": 2.667427267541358, |
| "grad_norm": 0.21061911886303802, |
| "learning_rate": 6.152219873150106e-06, |
| "loss": 0.3064, |
| "step": 2337 |
| }, |
| { |
| "epoch": 2.668568168853394, |
| "grad_norm": 0.2133958177486806, |
| "learning_rate": 6.13107822410148e-06, |
| "loss": 0.2897, |
| "step": 2338 |
| }, |
| { |
| "epoch": 2.669709070165431, |
| "grad_norm": 0.22350309093921972, |
| "learning_rate": 6.109936575052854e-06, |
| "loss": 0.2908, |
| "step": 2339 |
| }, |
| { |
| "epoch": 2.670849971477467, |
| "grad_norm": 0.1994310994630131, |
| "learning_rate": 6.088794926004229e-06, |
| "loss": 0.3016, |
| "step": 2340 |
| }, |
| { |
| "epoch": 2.671990872789504, |
| "grad_norm": 0.242456861299583, |
| "learning_rate": 6.067653276955603e-06, |
| "loss": 0.2782, |
| "step": 2341 |
| }, |
| { |
| "epoch": 2.6731317741015403, |
| "grad_norm": 0.20070378596638844, |
| "learning_rate": 6.046511627906977e-06, |
| "loss": 0.275, |
| "step": 2342 |
| }, |
| { |
| "epoch": 2.674272675413577, |
| "grad_norm": 0.20533693117962545, |
| "learning_rate": 6.025369978858351e-06, |
| "loss": 0.2886, |
| "step": 2343 |
| }, |
| { |
| "epoch": 2.6754135767256133, |
| "grad_norm": 0.24120919621556927, |
| "learning_rate": 6.004228329809725e-06, |
| "loss": 0.3033, |
| "step": 2344 |
| }, |
| { |
| "epoch": 2.67655447803765, |
| "grad_norm": 0.2154183445042602, |
| "learning_rate": 5.9830866807611e-06, |
| "loss": 0.3066, |
| "step": 2345 |
| }, |
| { |
| "epoch": 2.6776953793496863, |
| "grad_norm": 0.20613740694158517, |
| "learning_rate": 5.961945031712473e-06, |
| "loss": 0.3162, |
| "step": 2346 |
| }, |
| { |
| "epoch": 2.6788362806617227, |
| "grad_norm": 0.22079946197964784, |
| "learning_rate": 5.940803382663848e-06, |
| "loss": 0.2847, |
| "step": 2347 |
| }, |
| { |
| "epoch": 2.6799771819737592, |
| "grad_norm": 0.21631086212021008, |
| "learning_rate": 5.919661733615222e-06, |
| "loss": 0.2809, |
| "step": 2348 |
| }, |
| { |
| "epoch": 2.6811180832857957, |
| "grad_norm": 0.22122238445645395, |
| "learning_rate": 5.898520084566596e-06, |
| "loss": 0.3071, |
| "step": 2349 |
| }, |
| { |
| "epoch": 2.682258984597832, |
| "grad_norm": 0.2209980769754549, |
| "learning_rate": 5.8773784355179705e-06, |
| "loss": 0.2933, |
| "step": 2350 |
| }, |
| { |
| "epoch": 2.6833998859098687, |
| "grad_norm": 0.20106379921531103, |
| "learning_rate": 5.8562367864693445e-06, |
| "loss": 0.2692, |
| "step": 2351 |
| }, |
| { |
| "epoch": 2.684540787221905, |
| "grad_norm": 0.20397362922563297, |
| "learning_rate": 5.8350951374207194e-06, |
| "loss": 0.2815, |
| "step": 2352 |
| }, |
| { |
| "epoch": 2.6856816885339416, |
| "grad_norm": 0.20098081864470793, |
| "learning_rate": 5.8139534883720935e-06, |
| "loss": 0.2811, |
| "step": 2353 |
| }, |
| { |
| "epoch": 2.6868225898459785, |
| "grad_norm": 0.19634429911239146, |
| "learning_rate": 5.792811839323468e-06, |
| "loss": 0.2754, |
| "step": 2354 |
| }, |
| { |
| "epoch": 2.6879634911580146, |
| "grad_norm": 0.22762609745240756, |
| "learning_rate": 5.771670190274842e-06, |
| "loss": 0.2772, |
| "step": 2355 |
| }, |
| { |
| "epoch": 2.6891043924700515, |
| "grad_norm": 0.19275931276843722, |
| "learning_rate": 5.750528541226216e-06, |
| "loss": 0.3034, |
| "step": 2356 |
| }, |
| { |
| "epoch": 2.690245293782088, |
| "grad_norm": 0.20721106140373188, |
| "learning_rate": 5.72938689217759e-06, |
| "loss": 0.2817, |
| "step": 2357 |
| }, |
| { |
| "epoch": 2.6913861950941245, |
| "grad_norm": 0.21585429405699674, |
| "learning_rate": 5.708245243128965e-06, |
| "loss": 0.3001, |
| "step": 2358 |
| }, |
| { |
| "epoch": 2.692527096406161, |
| "grad_norm": 0.22737139061335532, |
| "learning_rate": 5.687103594080338e-06, |
| "loss": 0.2791, |
| "step": 2359 |
| }, |
| { |
| "epoch": 2.6936679977181974, |
| "grad_norm": 0.2052614228941748, |
| "learning_rate": 5.665961945031713e-06, |
| "loss": 0.2986, |
| "step": 2360 |
| }, |
| { |
| "epoch": 2.694808899030234, |
| "grad_norm": 0.21375194844104548, |
| "learning_rate": 5.644820295983087e-06, |
| "loss": 0.2929, |
| "step": 2361 |
| }, |
| { |
| "epoch": 2.6959498003422704, |
| "grad_norm": 0.21440238679198403, |
| "learning_rate": 5.623678646934461e-06, |
| "loss": 0.2774, |
| "step": 2362 |
| }, |
| { |
| "epoch": 2.697090701654307, |
| "grad_norm": 0.20394690527332443, |
| "learning_rate": 5.602536997885836e-06, |
| "loss": 0.2999, |
| "step": 2363 |
| }, |
| { |
| "epoch": 2.6982316029663433, |
| "grad_norm": 0.22082160108841933, |
| "learning_rate": 5.581395348837209e-06, |
| "loss": 0.2891, |
| "step": 2364 |
| }, |
| { |
| "epoch": 2.69937250427838, |
| "grad_norm": 0.2154910615773614, |
| "learning_rate": 5.560253699788584e-06, |
| "loss": 0.2907, |
| "step": 2365 |
| }, |
| { |
| "epoch": 2.7005134055904163, |
| "grad_norm": 0.19966209756699427, |
| "learning_rate": 5.539112050739958e-06, |
| "loss": 0.2792, |
| "step": 2366 |
| }, |
| { |
| "epoch": 2.701654306902453, |
| "grad_norm": 0.20758371417711657, |
| "learning_rate": 5.517970401691332e-06, |
| "loss": 0.2772, |
| "step": 2367 |
| }, |
| { |
| "epoch": 2.7027952082144893, |
| "grad_norm": 0.19655111008329076, |
| "learning_rate": 5.496828752642706e-06, |
| "loss": 0.2859, |
| "step": 2368 |
| }, |
| { |
| "epoch": 2.703936109526526, |
| "grad_norm": 0.21344377891772437, |
| "learning_rate": 5.47568710359408e-06, |
| "loss": 0.273, |
| "step": 2369 |
| }, |
| { |
| "epoch": 2.7050770108385622, |
| "grad_norm": 0.19954667152573125, |
| "learning_rate": 5.4545454545454545e-06, |
| "loss": 0.27, |
| "step": 2370 |
| }, |
| { |
| "epoch": 2.706217912150599, |
| "grad_norm": 0.20696421237106963, |
| "learning_rate": 5.433403805496829e-06, |
| "loss": 0.2741, |
| "step": 2371 |
| }, |
| { |
| "epoch": 2.7073588134626356, |
| "grad_norm": 0.19888310424958752, |
| "learning_rate": 5.412262156448203e-06, |
| "loss": 0.2895, |
| "step": 2372 |
| }, |
| { |
| "epoch": 2.708499714774672, |
| "grad_norm": 0.22935801181778648, |
| "learning_rate": 5.3911205073995775e-06, |
| "loss": 0.2893, |
| "step": 2373 |
| }, |
| { |
| "epoch": 2.7096406160867086, |
| "grad_norm": 0.21186470683679917, |
| "learning_rate": 5.369978858350952e-06, |
| "loss": 0.287, |
| "step": 2374 |
| }, |
| { |
| "epoch": 2.710781517398745, |
| "grad_norm": 0.18801028249751817, |
| "learning_rate": 5.348837209302326e-06, |
| "loss": 0.2861, |
| "step": 2375 |
| }, |
| { |
| "epoch": 2.7119224187107815, |
| "grad_norm": 0.2116535183061984, |
| "learning_rate": 5.327695560253701e-06, |
| "loss": 0.3056, |
| "step": 2376 |
| }, |
| { |
| "epoch": 2.713063320022818, |
| "grad_norm": 0.21346624281502724, |
| "learning_rate": 5.306553911205074e-06, |
| "loss": 0.2971, |
| "step": 2377 |
| }, |
| { |
| "epoch": 2.7142042213348545, |
| "grad_norm": 0.20321563705952317, |
| "learning_rate": 5.285412262156449e-06, |
| "loss": 0.2808, |
| "step": 2378 |
| }, |
| { |
| "epoch": 2.715345122646891, |
| "grad_norm": 0.2115089124503202, |
| "learning_rate": 5.264270613107823e-06, |
| "loss": 0.2885, |
| "step": 2379 |
| }, |
| { |
| "epoch": 2.7164860239589275, |
| "grad_norm": 0.22902298132952414, |
| "learning_rate": 5.243128964059197e-06, |
| "loss": 0.2921, |
| "step": 2380 |
| }, |
| { |
| "epoch": 2.717626925270964, |
| "grad_norm": 0.20349307589386711, |
| "learning_rate": 5.221987315010571e-06, |
| "loss": 0.2846, |
| "step": 2381 |
| }, |
| { |
| "epoch": 2.7187678265830004, |
| "grad_norm": 0.18793703684238383, |
| "learning_rate": 5.200845665961945e-06, |
| "loss": 0.2935, |
| "step": 2382 |
| }, |
| { |
| "epoch": 2.719908727895037, |
| "grad_norm": 0.2519734868072542, |
| "learning_rate": 5.179704016913319e-06, |
| "loss": 0.2838, |
| "step": 2383 |
| }, |
| { |
| "epoch": 2.721049629207074, |
| "grad_norm": 0.197819968737116, |
| "learning_rate": 5.158562367864694e-06, |
| "loss": 0.3027, |
| "step": 2384 |
| }, |
| { |
| "epoch": 2.72219053051911, |
| "grad_norm": 0.2086766984476139, |
| "learning_rate": 5.137420718816068e-06, |
| "loss": 0.2865, |
| "step": 2385 |
| }, |
| { |
| "epoch": 2.723331431831147, |
| "grad_norm": 0.19345032801304457, |
| "learning_rate": 5.116279069767442e-06, |
| "loss": 0.2973, |
| "step": 2386 |
| }, |
| { |
| "epoch": 2.7244723331431833, |
| "grad_norm": 0.22577042718511026, |
| "learning_rate": 5.095137420718816e-06, |
| "loss": 0.2906, |
| "step": 2387 |
| }, |
| { |
| "epoch": 2.7256132344552197, |
| "grad_norm": 0.21603357817376984, |
| "learning_rate": 5.07399577167019e-06, |
| "loss": 0.2807, |
| "step": 2388 |
| }, |
| { |
| "epoch": 2.7267541357672562, |
| "grad_norm": 0.21219120304091335, |
| "learning_rate": 5.052854122621565e-06, |
| "loss": 0.291, |
| "step": 2389 |
| }, |
| { |
| "epoch": 2.7278950370792927, |
| "grad_norm": 0.1942871235301068, |
| "learning_rate": 5.0317124735729385e-06, |
| "loss": 0.2863, |
| "step": 2390 |
| }, |
| { |
| "epoch": 2.729035938391329, |
| "grad_norm": 0.2047396916851407, |
| "learning_rate": 5.010570824524313e-06, |
| "loss": 0.2834, |
| "step": 2391 |
| }, |
| { |
| "epoch": 2.7301768397033657, |
| "grad_norm": 0.19544577571703675, |
| "learning_rate": 4.9894291754756874e-06, |
| "loss": 0.2985, |
| "step": 2392 |
| }, |
| { |
| "epoch": 2.731317741015402, |
| "grad_norm": 0.20598080382300346, |
| "learning_rate": 4.9682875264270615e-06, |
| "loss": 0.28, |
| "step": 2393 |
| }, |
| { |
| "epoch": 2.7324586423274386, |
| "grad_norm": 0.19446571137719146, |
| "learning_rate": 4.947145877378436e-06, |
| "loss": 0.2749, |
| "step": 2394 |
| }, |
| { |
| "epoch": 2.733599543639475, |
| "grad_norm": 0.19509428149486063, |
| "learning_rate": 4.92600422832981e-06, |
| "loss": 0.309, |
| "step": 2395 |
| }, |
| { |
| "epoch": 2.7347404449515116, |
| "grad_norm": 0.2091106077639519, |
| "learning_rate": 4.904862579281185e-06, |
| "loss": 0.268, |
| "step": 2396 |
| }, |
| { |
| "epoch": 2.735881346263548, |
| "grad_norm": 0.19395912384493028, |
| "learning_rate": 4.883720930232559e-06, |
| "loss": 0.2749, |
| "step": 2397 |
| }, |
| { |
| "epoch": 2.7370222475755845, |
| "grad_norm": 0.2128904563883501, |
| "learning_rate": 4.862579281183933e-06, |
| "loss": 0.3169, |
| "step": 2398 |
| }, |
| { |
| "epoch": 2.7381631488876215, |
| "grad_norm": 0.2120146570263807, |
| "learning_rate": 4.841437632135307e-06, |
| "loss": 0.2999, |
| "step": 2399 |
| }, |
| { |
| "epoch": 2.7393040501996575, |
| "grad_norm": 0.21299550942100914, |
| "learning_rate": 4.820295983086681e-06, |
| "loss": 0.2803, |
| "step": 2400 |
| }, |
| { |
| "epoch": 2.7404449515116944, |
| "grad_norm": 0.20165472101507262, |
| "learning_rate": 4.799154334038055e-06, |
| "loss": 0.2741, |
| "step": 2401 |
| }, |
| { |
| "epoch": 2.741585852823731, |
| "grad_norm": 0.20587824908237448, |
| "learning_rate": 4.77801268498943e-06, |
| "loss": 0.3079, |
| "step": 2402 |
| }, |
| { |
| "epoch": 2.7427267541357674, |
| "grad_norm": 0.20745487685308733, |
| "learning_rate": 4.756871035940803e-06, |
| "loss": 0.2988, |
| "step": 2403 |
| }, |
| { |
| "epoch": 2.743867655447804, |
| "grad_norm": 0.19662301496766177, |
| "learning_rate": 4.735729386892178e-06, |
| "loss": 0.2895, |
| "step": 2404 |
| }, |
| { |
| "epoch": 2.7450085567598403, |
| "grad_norm": 0.2047896485844238, |
| "learning_rate": 4.714587737843552e-06, |
| "loss": 0.2908, |
| "step": 2405 |
| }, |
| { |
| "epoch": 2.746149458071877, |
| "grad_norm": 0.22075407121101182, |
| "learning_rate": 4.693446088794926e-06, |
| "loss": 0.2924, |
| "step": 2406 |
| }, |
| { |
| "epoch": 2.7472903593839133, |
| "grad_norm": 0.20989441506128534, |
| "learning_rate": 4.672304439746301e-06, |
| "loss": 0.2752, |
| "step": 2407 |
| }, |
| { |
| "epoch": 2.74843126069595, |
| "grad_norm": 0.19330236930243655, |
| "learning_rate": 4.651162790697674e-06, |
| "loss": 0.2873, |
| "step": 2408 |
| }, |
| { |
| "epoch": 2.7495721620079863, |
| "grad_norm": 0.21471432313236352, |
| "learning_rate": 4.630021141649049e-06, |
| "loss": 0.2984, |
| "step": 2409 |
| }, |
| { |
| "epoch": 2.7507130633200227, |
| "grad_norm": 0.1990241754745363, |
| "learning_rate": 4.608879492600423e-06, |
| "loss": 0.3024, |
| "step": 2410 |
| }, |
| { |
| "epoch": 2.7518539646320592, |
| "grad_norm": 0.2175123910831802, |
| "learning_rate": 4.587737843551797e-06, |
| "loss": 0.2862, |
| "step": 2411 |
| }, |
| { |
| "epoch": 2.7529948659440957, |
| "grad_norm": 0.2022442697895259, |
| "learning_rate": 4.5665961945031714e-06, |
| "loss": 0.3061, |
| "step": 2412 |
| }, |
| { |
| "epoch": 2.754135767256132, |
| "grad_norm": 0.202210410401036, |
| "learning_rate": 4.5454545454545455e-06, |
| "loss": 0.3099, |
| "step": 2413 |
| }, |
| { |
| "epoch": 2.755276668568169, |
| "grad_norm": 0.20841630948380577, |
| "learning_rate": 4.52431289640592e-06, |
| "loss": 0.2872, |
| "step": 2414 |
| }, |
| { |
| "epoch": 2.756417569880205, |
| "grad_norm": 0.20962608268730534, |
| "learning_rate": 4.5031712473572945e-06, |
| "loss": 0.2799, |
| "step": 2415 |
| }, |
| { |
| "epoch": 2.757558471192242, |
| "grad_norm": 0.20684129654773203, |
| "learning_rate": 4.482029598308668e-06, |
| "loss": 0.2755, |
| "step": 2416 |
| }, |
| { |
| "epoch": 2.7586993725042785, |
| "grad_norm": 0.20412423424441156, |
| "learning_rate": 4.460887949260043e-06, |
| "loss": 0.2872, |
| "step": 2417 |
| }, |
| { |
| "epoch": 2.759840273816315, |
| "grad_norm": 0.21520346828361941, |
| "learning_rate": 4.439746300211417e-06, |
| "loss": 0.2998, |
| "step": 2418 |
| }, |
| { |
| "epoch": 2.7609811751283515, |
| "grad_norm": 0.21069047843020636, |
| "learning_rate": 4.418604651162791e-06, |
| "loss": 0.2838, |
| "step": 2419 |
| }, |
| { |
| "epoch": 2.762122076440388, |
| "grad_norm": 0.2097851230483762, |
| "learning_rate": 4.397463002114166e-06, |
| "loss": 0.3005, |
| "step": 2420 |
| }, |
| { |
| "epoch": 2.7632629777524245, |
| "grad_norm": 0.1990108557800723, |
| "learning_rate": 4.376321353065539e-06, |
| "loss": 0.2814, |
| "step": 2421 |
| }, |
| { |
| "epoch": 2.764403879064461, |
| "grad_norm": 0.19641216914165438, |
| "learning_rate": 4.355179704016914e-06, |
| "loss": 0.2813, |
| "step": 2422 |
| }, |
| { |
| "epoch": 2.7655447803764974, |
| "grad_norm": 0.21405575621485598, |
| "learning_rate": 4.334038054968288e-06, |
| "loss": 0.2878, |
| "step": 2423 |
| }, |
| { |
| "epoch": 2.766685681688534, |
| "grad_norm": 0.21401069015463817, |
| "learning_rate": 4.312896405919662e-06, |
| "loss": 0.2894, |
| "step": 2424 |
| }, |
| { |
| "epoch": 2.7678265830005704, |
| "grad_norm": 0.22891323803035088, |
| "learning_rate": 4.291754756871036e-06, |
| "loss": 0.3081, |
| "step": 2425 |
| }, |
| { |
| "epoch": 2.768967484312607, |
| "grad_norm": 0.2109649751414716, |
| "learning_rate": 4.27061310782241e-06, |
| "loss": 0.2908, |
| "step": 2426 |
| }, |
| { |
| "epoch": 2.7701083856246433, |
| "grad_norm": 0.21252536464308053, |
| "learning_rate": 4.249471458773784e-06, |
| "loss": 0.2936, |
| "step": 2427 |
| }, |
| { |
| "epoch": 2.77124928693668, |
| "grad_norm": 0.19865252632831235, |
| "learning_rate": 4.228329809725159e-06, |
| "loss": 0.2954, |
| "step": 2428 |
| }, |
| { |
| "epoch": 2.7723901882487167, |
| "grad_norm": 0.2153522039069563, |
| "learning_rate": 4.207188160676533e-06, |
| "loss": 0.2974, |
| "step": 2429 |
| }, |
| { |
| "epoch": 2.773531089560753, |
| "grad_norm": 0.21887123860802177, |
| "learning_rate": 4.186046511627907e-06, |
| "loss": 0.2843, |
| "step": 2430 |
| }, |
| { |
| "epoch": 2.7746719908727897, |
| "grad_norm": 0.21186766551947683, |
| "learning_rate": 4.164904862579281e-06, |
| "loss": 0.2828, |
| "step": 2431 |
| }, |
| { |
| "epoch": 2.7758128921848257, |
| "grad_norm": 0.19989097612840076, |
| "learning_rate": 4.1437632135306554e-06, |
| "loss": 0.2701, |
| "step": 2432 |
| }, |
| { |
| "epoch": 2.7769537934968627, |
| "grad_norm": 0.18958199905520784, |
| "learning_rate": 4.12262156448203e-06, |
| "loss": 0.2759, |
| "step": 2433 |
| }, |
| { |
| "epoch": 2.778094694808899, |
| "grad_norm": 0.20469102511481307, |
| "learning_rate": 4.101479915433404e-06, |
| "loss": 0.2927, |
| "step": 2434 |
| }, |
| { |
| "epoch": 2.7792355961209356, |
| "grad_norm": 0.2045040607399336, |
| "learning_rate": 4.0803382663847785e-06, |
| "loss": 0.2808, |
| "step": 2435 |
| }, |
| { |
| "epoch": 2.780376497432972, |
| "grad_norm": 0.20416663418050893, |
| "learning_rate": 4.059196617336153e-06, |
| "loss": 0.3044, |
| "step": 2436 |
| }, |
| { |
| "epoch": 2.7815173987450086, |
| "grad_norm": 0.21126468810168858, |
| "learning_rate": 4.038054968287527e-06, |
| "loss": 0.2864, |
| "step": 2437 |
| }, |
| { |
| "epoch": 2.782658300057045, |
| "grad_norm": 0.21333692375010985, |
| "learning_rate": 4.016913319238901e-06, |
| "loss": 0.3066, |
| "step": 2438 |
| }, |
| { |
| "epoch": 2.7837992013690815, |
| "grad_norm": 0.2063409848079501, |
| "learning_rate": 3.995771670190275e-06, |
| "loss": 0.2901, |
| "step": 2439 |
| }, |
| { |
| "epoch": 2.784940102681118, |
| "grad_norm": 0.21945699246555966, |
| "learning_rate": 3.97463002114165e-06, |
| "loss": 0.292, |
| "step": 2440 |
| }, |
| { |
| "epoch": 2.7860810039931545, |
| "grad_norm": 0.21272225883141066, |
| "learning_rate": 3.953488372093024e-06, |
| "loss": 0.289, |
| "step": 2441 |
| }, |
| { |
| "epoch": 2.787221905305191, |
| "grad_norm": 0.1998651488460456, |
| "learning_rate": 3.932346723044398e-06, |
| "loss": 0.3048, |
| "step": 2442 |
| }, |
| { |
| "epoch": 2.7883628066172275, |
| "grad_norm": 0.21027926042200606, |
| "learning_rate": 3.911205073995772e-06, |
| "loss": 0.2788, |
| "step": 2443 |
| }, |
| { |
| "epoch": 2.7895037079292644, |
| "grad_norm": 0.23160367083032785, |
| "learning_rate": 3.890063424947146e-06, |
| "loss": 0.2791, |
| "step": 2444 |
| }, |
| { |
| "epoch": 2.7906446092413004, |
| "grad_norm": 0.21688813469461285, |
| "learning_rate": 3.86892177589852e-06, |
| "loss": 0.2808, |
| "step": 2445 |
| }, |
| { |
| "epoch": 2.7917855105533373, |
| "grad_norm": 0.20893812255772268, |
| "learning_rate": 3.847780126849895e-06, |
| "loss": 0.2939, |
| "step": 2446 |
| }, |
| { |
| "epoch": 2.7929264118653734, |
| "grad_norm": 0.21161195500726673, |
| "learning_rate": 3.826638477801268e-06, |
| "loss": 0.2982, |
| "step": 2447 |
| }, |
| { |
| "epoch": 2.7940673131774103, |
| "grad_norm": 0.22159348068245183, |
| "learning_rate": 3.805496828752643e-06, |
| "loss": 0.2909, |
| "step": 2448 |
| }, |
| { |
| "epoch": 2.795208214489447, |
| "grad_norm": 0.19693523939694024, |
| "learning_rate": 3.784355179704017e-06, |
| "loss": 0.3072, |
| "step": 2449 |
| }, |
| { |
| "epoch": 2.7963491158014833, |
| "grad_norm": 0.20541335027371957, |
| "learning_rate": 3.7632135306553913e-06, |
| "loss": 0.29, |
| "step": 2450 |
| }, |
| { |
| "epoch": 2.7974900171135197, |
| "grad_norm": 0.2021310692826138, |
| "learning_rate": 3.742071881606766e-06, |
| "loss": 0.2932, |
| "step": 2451 |
| }, |
| { |
| "epoch": 2.7986309184255562, |
| "grad_norm": 0.20551180712915548, |
| "learning_rate": 3.72093023255814e-06, |
| "loss": 0.2958, |
| "step": 2452 |
| }, |
| { |
| "epoch": 2.7997718197375927, |
| "grad_norm": 0.2171587485859702, |
| "learning_rate": 3.6997885835095144e-06, |
| "loss": 0.2901, |
| "step": 2453 |
| }, |
| { |
| "epoch": 2.800912721049629, |
| "grad_norm": 0.19791283797515585, |
| "learning_rate": 3.678646934460888e-06, |
| "loss": 0.292, |
| "step": 2454 |
| }, |
| { |
| "epoch": 2.8020536223616657, |
| "grad_norm": 0.1989943243503315, |
| "learning_rate": 3.6575052854122625e-06, |
| "loss": 0.2921, |
| "step": 2455 |
| }, |
| { |
| "epoch": 2.803194523673702, |
| "grad_norm": 0.20434592117335035, |
| "learning_rate": 3.636363636363636e-06, |
| "loss": 0.3064, |
| "step": 2456 |
| }, |
| { |
| "epoch": 2.8043354249857386, |
| "grad_norm": 0.2084756469685634, |
| "learning_rate": 3.615221987315011e-06, |
| "loss": 0.2957, |
| "step": 2457 |
| }, |
| { |
| "epoch": 2.805476326297775, |
| "grad_norm": 0.20296767278473282, |
| "learning_rate": 3.5940803382663847e-06, |
| "loss": 0.2849, |
| "step": 2458 |
| }, |
| { |
| "epoch": 2.806617227609812, |
| "grad_norm": 0.19379089522361004, |
| "learning_rate": 3.5729386892177592e-06, |
| "loss": 0.306, |
| "step": 2459 |
| }, |
| { |
| "epoch": 2.807758128921848, |
| "grad_norm": 0.2114373361742132, |
| "learning_rate": 3.551797040169133e-06, |
| "loss": 0.2811, |
| "step": 2460 |
| }, |
| { |
| "epoch": 2.808899030233885, |
| "grad_norm": 0.19648544797014308, |
| "learning_rate": 3.5306553911205074e-06, |
| "loss": 0.2903, |
| "step": 2461 |
| }, |
| { |
| "epoch": 2.810039931545921, |
| "grad_norm": 0.19136569333549805, |
| "learning_rate": 3.5095137420718823e-06, |
| "loss": 0.2896, |
| "step": 2462 |
| }, |
| { |
| "epoch": 2.811180832857958, |
| "grad_norm": 3.255184569034401, |
| "learning_rate": 3.488372093023256e-06, |
| "loss": 0.3308, |
| "step": 2463 |
| }, |
| { |
| "epoch": 2.8123217341699944, |
| "grad_norm": 0.23148723562030504, |
| "learning_rate": 3.4672304439746304e-06, |
| "loss": 0.2929, |
| "step": 2464 |
| }, |
| { |
| "epoch": 2.813462635482031, |
| "grad_norm": 0.19251575021641812, |
| "learning_rate": 3.446088794926004e-06, |
| "loss": 0.2816, |
| "step": 2465 |
| }, |
| { |
| "epoch": 2.8146035367940674, |
| "grad_norm": 0.20358606861072642, |
| "learning_rate": 3.424947145877379e-06, |
| "loss": 0.2809, |
| "step": 2466 |
| }, |
| { |
| "epoch": 2.815744438106104, |
| "grad_norm": 0.2525881711594538, |
| "learning_rate": 3.4038054968287527e-06, |
| "loss": 0.2799, |
| "step": 2467 |
| }, |
| { |
| "epoch": 2.8168853394181403, |
| "grad_norm": 0.19051484999821816, |
| "learning_rate": 3.382663847780127e-06, |
| "loss": 0.2996, |
| "step": 2468 |
| }, |
| { |
| "epoch": 2.818026240730177, |
| "grad_norm": 0.21665936441766315, |
| "learning_rate": 3.361522198731501e-06, |
| "loss": 0.3008, |
| "step": 2469 |
| }, |
| { |
| "epoch": 2.8191671420422133, |
| "grad_norm": 0.21141505886515194, |
| "learning_rate": 3.3403805496828753e-06, |
| "loss": 0.2803, |
| "step": 2470 |
| }, |
| { |
| "epoch": 2.82030804335425, |
| "grad_norm": 0.2013970938079833, |
| "learning_rate": 3.3192389006342494e-06, |
| "loss": 0.2894, |
| "step": 2471 |
| }, |
| { |
| "epoch": 2.8214489446662863, |
| "grad_norm": 0.19323798161487787, |
| "learning_rate": 3.298097251585624e-06, |
| "loss": 0.2864, |
| "step": 2472 |
| }, |
| { |
| "epoch": 2.8225898459783227, |
| "grad_norm": 0.201226274752575, |
| "learning_rate": 3.2769556025369984e-06, |
| "loss": 0.3071, |
| "step": 2473 |
| }, |
| { |
| "epoch": 2.823730747290359, |
| "grad_norm": 0.2074222646039902, |
| "learning_rate": 3.255813953488372e-06, |
| "loss": 0.3135, |
| "step": 2474 |
| }, |
| { |
| "epoch": 2.8248716486023957, |
| "grad_norm": 0.20696506174071955, |
| "learning_rate": 3.234672304439747e-06, |
| "loss": 0.2734, |
| "step": 2475 |
| }, |
| { |
| "epoch": 2.8260125499144326, |
| "grad_norm": 0.19955249590119115, |
| "learning_rate": 3.2135306553911206e-06, |
| "loss": 0.2826, |
| "step": 2476 |
| }, |
| { |
| "epoch": 2.8271534512264687, |
| "grad_norm": 0.2156852402716923, |
| "learning_rate": 3.192389006342495e-06, |
| "loss": 0.3096, |
| "step": 2477 |
| }, |
| { |
| "epoch": 2.8282943525385056, |
| "grad_norm": 0.20841607177541718, |
| "learning_rate": 3.1712473572938687e-06, |
| "loss": 0.2849, |
| "step": 2478 |
| }, |
| { |
| "epoch": 2.829435253850542, |
| "grad_norm": 0.1941983020977351, |
| "learning_rate": 3.1501057082452432e-06, |
| "loss": 0.278, |
| "step": 2479 |
| }, |
| { |
| "epoch": 2.8305761551625785, |
| "grad_norm": 0.20869712403007026, |
| "learning_rate": 3.1289640591966173e-06, |
| "loss": 0.2873, |
| "step": 2480 |
| }, |
| { |
| "epoch": 2.831717056474615, |
| "grad_norm": 0.20706120097683225, |
| "learning_rate": 3.107822410147992e-06, |
| "loss": 0.2833, |
| "step": 2481 |
| }, |
| { |
| "epoch": 2.8328579577866515, |
| "grad_norm": 0.1982144805085283, |
| "learning_rate": 3.086680761099366e-06, |
| "loss": 0.2942, |
| "step": 2482 |
| }, |
| { |
| "epoch": 2.833998859098688, |
| "grad_norm": 0.20975348933509538, |
| "learning_rate": 3.06553911205074e-06, |
| "loss": 0.2797, |
| "step": 2483 |
| }, |
| { |
| "epoch": 2.8351397604107245, |
| "grad_norm": 0.21647387880347907, |
| "learning_rate": 3.0443974630021144e-06, |
| "loss": 0.2802, |
| "step": 2484 |
| }, |
| { |
| "epoch": 2.836280661722761, |
| "grad_norm": 0.20027142878455498, |
| "learning_rate": 3.0232558139534885e-06, |
| "loss": 0.2901, |
| "step": 2485 |
| }, |
| { |
| "epoch": 2.8374215630347974, |
| "grad_norm": 0.21680907668024518, |
| "learning_rate": 3.0021141649048626e-06, |
| "loss": 0.2915, |
| "step": 2486 |
| }, |
| { |
| "epoch": 2.838562464346834, |
| "grad_norm": 0.21538879323186988, |
| "learning_rate": 2.9809725158562367e-06, |
| "loss": 0.3016, |
| "step": 2487 |
| }, |
| { |
| "epoch": 2.8397033656588704, |
| "grad_norm": 0.21981713377113177, |
| "learning_rate": 2.959830866807611e-06, |
| "loss": 0.2818, |
| "step": 2488 |
| }, |
| { |
| "epoch": 2.840844266970907, |
| "grad_norm": 0.19697892124577174, |
| "learning_rate": 2.9386892177589852e-06, |
| "loss": 0.2892, |
| "step": 2489 |
| }, |
| { |
| "epoch": 2.8419851682829433, |
| "grad_norm": 0.18909864372788007, |
| "learning_rate": 2.9175475687103597e-06, |
| "loss": 0.2976, |
| "step": 2490 |
| }, |
| { |
| "epoch": 2.8431260695949803, |
| "grad_norm": 0.18454775144527993, |
| "learning_rate": 2.896405919661734e-06, |
| "loss": 0.3028, |
| "step": 2491 |
| }, |
| { |
| "epoch": 2.8442669709070163, |
| "grad_norm": 0.2310587393078742, |
| "learning_rate": 2.875264270613108e-06, |
| "loss": 0.298, |
| "step": 2492 |
| }, |
| { |
| "epoch": 2.8454078722190532, |
| "grad_norm": 0.21248087744098798, |
| "learning_rate": 2.8541226215644824e-06, |
| "loss": 0.29, |
| "step": 2493 |
| }, |
| { |
| "epoch": 2.8465487735310897, |
| "grad_norm": 0.19507301929030554, |
| "learning_rate": 2.8329809725158564e-06, |
| "loss": 0.2799, |
| "step": 2494 |
| }, |
| { |
| "epoch": 2.847689674843126, |
| "grad_norm": 0.19860147238212592, |
| "learning_rate": 2.8118393234672305e-06, |
| "loss": 0.2867, |
| "step": 2495 |
| }, |
| { |
| "epoch": 2.8488305761551627, |
| "grad_norm": 0.19602116174876164, |
| "learning_rate": 2.7906976744186046e-06, |
| "loss": 0.2984, |
| "step": 2496 |
| }, |
| { |
| "epoch": 2.849971477467199, |
| "grad_norm": 0.19553454209538595, |
| "learning_rate": 2.769556025369979e-06, |
| "loss": 0.2972, |
| "step": 2497 |
| }, |
| { |
| "epoch": 2.8511123787792356, |
| "grad_norm": 0.2040292670050931, |
| "learning_rate": 2.748414376321353e-06, |
| "loss": 0.3003, |
| "step": 2498 |
| }, |
| { |
| "epoch": 2.852253280091272, |
| "grad_norm": 0.20204674645438503, |
| "learning_rate": 2.7272727272727272e-06, |
| "loss": 0.3008, |
| "step": 2499 |
| }, |
| { |
| "epoch": 2.8533941814033086, |
| "grad_norm": 0.19934424552625263, |
| "learning_rate": 2.7061310782241013e-06, |
| "loss": 0.2881, |
| "step": 2500 |
| }, |
| { |
| "epoch": 2.854535082715345, |
| "grad_norm": 0.18785474732210933, |
| "learning_rate": 2.684989429175476e-06, |
| "loss": 0.2794, |
| "step": 2501 |
| }, |
| { |
| "epoch": 2.8556759840273815, |
| "grad_norm": 0.20006434112569327, |
| "learning_rate": 2.6638477801268503e-06, |
| "loss": 0.2964, |
| "step": 2502 |
| }, |
| { |
| "epoch": 2.856816885339418, |
| "grad_norm": 0.1977342860009862, |
| "learning_rate": 2.6427061310782244e-06, |
| "loss": 0.2845, |
| "step": 2503 |
| }, |
| { |
| "epoch": 2.8579577866514545, |
| "grad_norm": 0.20632919899960922, |
| "learning_rate": 2.6215644820295984e-06, |
| "loss": 0.2765, |
| "step": 2504 |
| }, |
| { |
| "epoch": 2.859098687963491, |
| "grad_norm": 0.21051236583497268, |
| "learning_rate": 2.6004228329809725e-06, |
| "loss": 0.305, |
| "step": 2505 |
| }, |
| { |
| "epoch": 2.860239589275528, |
| "grad_norm": 0.20023237671477812, |
| "learning_rate": 2.579281183932347e-06, |
| "loss": 0.274, |
| "step": 2506 |
| }, |
| { |
| "epoch": 2.861380490587564, |
| "grad_norm": 0.1828118884191061, |
| "learning_rate": 2.558139534883721e-06, |
| "loss": 0.3021, |
| "step": 2507 |
| }, |
| { |
| "epoch": 2.862521391899601, |
| "grad_norm": 0.20392801822251036, |
| "learning_rate": 2.536997885835095e-06, |
| "loss": 0.2843, |
| "step": 2508 |
| }, |
| { |
| "epoch": 2.8636622932116373, |
| "grad_norm": 0.20830765542926766, |
| "learning_rate": 2.5158562367864692e-06, |
| "loss": 0.2953, |
| "step": 2509 |
| }, |
| { |
| "epoch": 2.864803194523674, |
| "grad_norm": 0.2120174031270726, |
| "learning_rate": 2.4947145877378437e-06, |
| "loss": 0.2805, |
| "step": 2510 |
| }, |
| { |
| "epoch": 2.8659440958357103, |
| "grad_norm": 0.1945197850552631, |
| "learning_rate": 2.473572938689218e-06, |
| "loss": 0.289, |
| "step": 2511 |
| }, |
| { |
| "epoch": 2.867084997147747, |
| "grad_norm": 0.19141709513826796, |
| "learning_rate": 2.4524312896405923e-06, |
| "loss": 0.3101, |
| "step": 2512 |
| }, |
| { |
| "epoch": 2.8682258984597833, |
| "grad_norm": 0.21186283438035136, |
| "learning_rate": 2.4312896405919664e-06, |
| "loss": 0.2898, |
| "step": 2513 |
| }, |
| { |
| "epoch": 2.8693667997718197, |
| "grad_norm": 0.1945697871372187, |
| "learning_rate": 2.4101479915433404e-06, |
| "loss": 0.2907, |
| "step": 2514 |
| }, |
| { |
| "epoch": 2.8705077010838562, |
| "grad_norm": 0.19475854706319662, |
| "learning_rate": 2.389006342494715e-06, |
| "loss": 0.2913, |
| "step": 2515 |
| }, |
| { |
| "epoch": 2.8716486023958927, |
| "grad_norm": 0.19694923949612048, |
| "learning_rate": 2.367864693446089e-06, |
| "loss": 0.2989, |
| "step": 2516 |
| }, |
| { |
| "epoch": 2.872789503707929, |
| "grad_norm": 0.20020422907016433, |
| "learning_rate": 2.346723044397463e-06, |
| "loss": 0.2843, |
| "step": 2517 |
| }, |
| { |
| "epoch": 2.8739304050199657, |
| "grad_norm": 0.20878342093258945, |
| "learning_rate": 2.325581395348837e-06, |
| "loss": 0.283, |
| "step": 2518 |
| }, |
| { |
| "epoch": 2.875071306332002, |
| "grad_norm": 0.19905491086930102, |
| "learning_rate": 2.3044397463002116e-06, |
| "loss": 0.3028, |
| "step": 2519 |
| }, |
| { |
| "epoch": 2.8762122076440386, |
| "grad_norm": 0.19423780902965673, |
| "learning_rate": 2.2832980972515857e-06, |
| "loss": 0.2761, |
| "step": 2520 |
| }, |
| { |
| "epoch": 2.8773531089560755, |
| "grad_norm": 0.2002744524607323, |
| "learning_rate": 2.26215644820296e-06, |
| "loss": 0.2836, |
| "step": 2521 |
| }, |
| { |
| "epoch": 2.8784940102681116, |
| "grad_norm": 0.1989239278844251, |
| "learning_rate": 2.241014799154334e-06, |
| "loss": 0.3083, |
| "step": 2522 |
| }, |
| { |
| "epoch": 2.8796349115801485, |
| "grad_norm": 0.21159960733423624, |
| "learning_rate": 2.2198731501057084e-06, |
| "loss": 0.2943, |
| "step": 2523 |
| }, |
| { |
| "epoch": 2.880775812892185, |
| "grad_norm": 0.18973753627567133, |
| "learning_rate": 2.198731501057083e-06, |
| "loss": 0.2857, |
| "step": 2524 |
| }, |
| { |
| "epoch": 2.8819167142042215, |
| "grad_norm": 0.20692276429512232, |
| "learning_rate": 2.177589852008457e-06, |
| "loss": 0.3059, |
| "step": 2525 |
| }, |
| { |
| "epoch": 2.883057615516258, |
| "grad_norm": 0.20607858347875038, |
| "learning_rate": 2.156448202959831e-06, |
| "loss": 0.2748, |
| "step": 2526 |
| }, |
| { |
| "epoch": 2.8841985168282944, |
| "grad_norm": 0.19766789002244206, |
| "learning_rate": 2.135306553911205e-06, |
| "loss": 0.2864, |
| "step": 2527 |
| }, |
| { |
| "epoch": 2.885339418140331, |
| "grad_norm": 0.20508355580996102, |
| "learning_rate": 2.1141649048625796e-06, |
| "loss": 0.2825, |
| "step": 2528 |
| }, |
| { |
| "epoch": 2.8864803194523674, |
| "grad_norm": 0.21621004836804458, |
| "learning_rate": 2.0930232558139536e-06, |
| "loss": 0.2939, |
| "step": 2529 |
| }, |
| { |
| "epoch": 2.887621220764404, |
| "grad_norm": 0.20185327841415296, |
| "learning_rate": 2.0718816067653277e-06, |
| "loss": 0.2839, |
| "step": 2530 |
| }, |
| { |
| "epoch": 2.8887621220764403, |
| "grad_norm": 0.1933977567661312, |
| "learning_rate": 2.050739957716702e-06, |
| "loss": 0.3008, |
| "step": 2531 |
| }, |
| { |
| "epoch": 2.889903023388477, |
| "grad_norm": 0.2089992242477195, |
| "learning_rate": 2.0295983086680763e-06, |
| "loss": 0.2959, |
| "step": 2532 |
| }, |
| { |
| "epoch": 2.8910439247005133, |
| "grad_norm": 0.2030088255511792, |
| "learning_rate": 2.0084566596194504e-06, |
| "loss": 0.2766, |
| "step": 2533 |
| }, |
| { |
| "epoch": 2.89218482601255, |
| "grad_norm": 0.20334283930171593, |
| "learning_rate": 1.987315010570825e-06, |
| "loss": 0.2758, |
| "step": 2534 |
| }, |
| { |
| "epoch": 2.8933257273245863, |
| "grad_norm": 0.19141292320551842, |
| "learning_rate": 1.966173361522199e-06, |
| "loss": 0.2863, |
| "step": 2535 |
| }, |
| { |
| "epoch": 2.894466628636623, |
| "grad_norm": 0.20218732073147072, |
| "learning_rate": 1.945031712473573e-06, |
| "loss": 0.28, |
| "step": 2536 |
| }, |
| { |
| "epoch": 2.895607529948659, |
| "grad_norm": 0.1918627608925985, |
| "learning_rate": 1.9238900634249475e-06, |
| "loss": 0.2948, |
| "step": 2537 |
| }, |
| { |
| "epoch": 2.896748431260696, |
| "grad_norm": 0.19060330399001432, |
| "learning_rate": 1.9027484143763216e-06, |
| "loss": 0.2737, |
| "step": 2538 |
| }, |
| { |
| "epoch": 2.8978893325727326, |
| "grad_norm": 0.18229171529313887, |
| "learning_rate": 1.8816067653276956e-06, |
| "loss": 0.2916, |
| "step": 2539 |
| }, |
| { |
| "epoch": 2.899030233884769, |
| "grad_norm": 0.20382857407887608, |
| "learning_rate": 1.86046511627907e-06, |
| "loss": 0.2862, |
| "step": 2540 |
| }, |
| { |
| "epoch": 2.9001711351968056, |
| "grad_norm": 0.1979576770452718, |
| "learning_rate": 1.839323467230444e-06, |
| "loss": 0.2855, |
| "step": 2541 |
| }, |
| { |
| "epoch": 2.901312036508842, |
| "grad_norm": 0.23856764466276376, |
| "learning_rate": 1.818181818181818e-06, |
| "loss": 0.2861, |
| "step": 2542 |
| }, |
| { |
| "epoch": 2.9024529378208785, |
| "grad_norm": 0.18177042300595342, |
| "learning_rate": 1.7970401691331924e-06, |
| "loss": 0.2875, |
| "step": 2543 |
| }, |
| { |
| "epoch": 2.903593839132915, |
| "grad_norm": 0.20699386822550767, |
| "learning_rate": 1.7758985200845664e-06, |
| "loss": 0.292, |
| "step": 2544 |
| }, |
| { |
| "epoch": 2.9047347404449515, |
| "grad_norm": 0.1919349320022479, |
| "learning_rate": 1.7547568710359411e-06, |
| "loss": 0.294, |
| "step": 2545 |
| }, |
| { |
| "epoch": 2.905875641756988, |
| "grad_norm": 0.21020672149500216, |
| "learning_rate": 1.7336152219873152e-06, |
| "loss": 0.2909, |
| "step": 2546 |
| }, |
| { |
| "epoch": 2.9070165430690245, |
| "grad_norm": 0.19606757753191717, |
| "learning_rate": 1.7124735729386895e-06, |
| "loss": 0.2933, |
| "step": 2547 |
| }, |
| { |
| "epoch": 2.908157444381061, |
| "grad_norm": 0.21670306350149213, |
| "learning_rate": 1.6913319238900636e-06, |
| "loss": 0.2977, |
| "step": 2548 |
| }, |
| { |
| "epoch": 2.9092983456930974, |
| "grad_norm": 0.20976365996925933, |
| "learning_rate": 1.6701902748414376e-06, |
| "loss": 0.2873, |
| "step": 2549 |
| }, |
| { |
| "epoch": 2.910439247005134, |
| "grad_norm": 0.20449092038401195, |
| "learning_rate": 1.649048625792812e-06, |
| "loss": 0.3063, |
| "step": 2550 |
| }, |
| { |
| "epoch": 2.911580148317171, |
| "grad_norm": 0.19231918973044662, |
| "learning_rate": 1.627906976744186e-06, |
| "loss": 0.2914, |
| "step": 2551 |
| }, |
| { |
| "epoch": 2.912721049629207, |
| "grad_norm": 0.1936538427333248, |
| "learning_rate": 1.6067653276955603e-06, |
| "loss": 0.2913, |
| "step": 2552 |
| }, |
| { |
| "epoch": 2.913861950941244, |
| "grad_norm": 0.19368708996765574, |
| "learning_rate": 1.5856236786469344e-06, |
| "loss": 0.2917, |
| "step": 2553 |
| }, |
| { |
| "epoch": 2.91500285225328, |
| "grad_norm": 0.20383004181874495, |
| "learning_rate": 1.5644820295983086e-06, |
| "loss": 0.3022, |
| "step": 2554 |
| }, |
| { |
| "epoch": 2.9161437535653167, |
| "grad_norm": 0.19381726160739035, |
| "learning_rate": 1.543340380549683e-06, |
| "loss": 0.2995, |
| "step": 2555 |
| }, |
| { |
| "epoch": 2.9172846548773532, |
| "grad_norm": 0.20273695423735769, |
| "learning_rate": 1.5221987315010572e-06, |
| "loss": 0.3121, |
| "step": 2556 |
| }, |
| { |
| "epoch": 2.9184255561893897, |
| "grad_norm": 0.2036090775938918, |
| "learning_rate": 1.5010570824524313e-06, |
| "loss": 0.2825, |
| "step": 2557 |
| }, |
| { |
| "epoch": 2.919566457501426, |
| "grad_norm": 0.1862428612768711, |
| "learning_rate": 1.4799154334038056e-06, |
| "loss": 0.2778, |
| "step": 2558 |
| }, |
| { |
| "epoch": 2.9207073588134627, |
| "grad_norm": 0.19043762405083364, |
| "learning_rate": 1.4587737843551799e-06, |
| "loss": 0.2841, |
| "step": 2559 |
| }, |
| { |
| "epoch": 2.921848260125499, |
| "grad_norm": 0.20187309787287008, |
| "learning_rate": 1.437632135306554e-06, |
| "loss": 0.3121, |
| "step": 2560 |
| }, |
| { |
| "epoch": 2.9229891614375356, |
| "grad_norm": 0.1946742127017689, |
| "learning_rate": 1.4164904862579282e-06, |
| "loss": 0.293, |
| "step": 2561 |
| }, |
| { |
| "epoch": 2.924130062749572, |
| "grad_norm": 0.208031129118613, |
| "learning_rate": 1.3953488372093023e-06, |
| "loss": 0.2792, |
| "step": 2562 |
| }, |
| { |
| "epoch": 2.9252709640616086, |
| "grad_norm": 0.20002498323975684, |
| "learning_rate": 1.3742071881606766e-06, |
| "loss": 0.2943, |
| "step": 2563 |
| }, |
| { |
| "epoch": 2.926411865373645, |
| "grad_norm": 0.19726715468112263, |
| "learning_rate": 1.3530655391120506e-06, |
| "loss": 0.2843, |
| "step": 2564 |
| }, |
| { |
| "epoch": 2.9275527666856815, |
| "grad_norm": 0.19063146152359087, |
| "learning_rate": 1.3319238900634251e-06, |
| "loss": 0.2652, |
| "step": 2565 |
| }, |
| { |
| "epoch": 2.9286936679977185, |
| "grad_norm": 0.19186174328788505, |
| "learning_rate": 1.3107822410147992e-06, |
| "loss": 0.2789, |
| "step": 2566 |
| }, |
| { |
| "epoch": 2.9298345693097545, |
| "grad_norm": 0.18565312435433468, |
| "learning_rate": 1.2896405919661735e-06, |
| "loss": 0.2711, |
| "step": 2567 |
| }, |
| { |
| "epoch": 2.9309754706217914, |
| "grad_norm": 0.20681081424437364, |
| "learning_rate": 1.2684989429175476e-06, |
| "loss": 0.295, |
| "step": 2568 |
| }, |
| { |
| "epoch": 2.9321163719338275, |
| "grad_norm": 0.18971818353896588, |
| "learning_rate": 1.2473572938689219e-06, |
| "loss": 0.2926, |
| "step": 2569 |
| }, |
| { |
| "epoch": 2.9332572732458644, |
| "grad_norm": 0.19957945564892926, |
| "learning_rate": 1.2262156448202961e-06, |
| "loss": 0.2859, |
| "step": 2570 |
| }, |
| { |
| "epoch": 2.934398174557901, |
| "grad_norm": 0.19095388008305295, |
| "learning_rate": 1.2050739957716702e-06, |
| "loss": 0.287, |
| "step": 2571 |
| }, |
| { |
| "epoch": 2.9355390758699373, |
| "grad_norm": 0.1954767979123498, |
| "learning_rate": 1.1839323467230445e-06, |
| "loss": 0.2918, |
| "step": 2572 |
| }, |
| { |
| "epoch": 2.936679977181974, |
| "grad_norm": 0.18193127967812484, |
| "learning_rate": 1.1627906976744186e-06, |
| "loss": 0.2991, |
| "step": 2573 |
| }, |
| { |
| "epoch": 2.9378208784940103, |
| "grad_norm": 0.19683880921857755, |
| "learning_rate": 1.1416490486257929e-06, |
| "loss": 0.2959, |
| "step": 2574 |
| }, |
| { |
| "epoch": 2.938961779806047, |
| "grad_norm": 0.1938599942036892, |
| "learning_rate": 1.120507399577167e-06, |
| "loss": 0.2921, |
| "step": 2575 |
| }, |
| { |
| "epoch": 2.9401026811180833, |
| "grad_norm": 0.19585647152155478, |
| "learning_rate": 1.0993657505285414e-06, |
| "loss": 0.2876, |
| "step": 2576 |
| }, |
| { |
| "epoch": 2.9412435824301197, |
| "grad_norm": 0.18568095514600264, |
| "learning_rate": 1.0782241014799155e-06, |
| "loss": 0.2827, |
| "step": 2577 |
| }, |
| { |
| "epoch": 2.942384483742156, |
| "grad_norm": 0.18521242123000087, |
| "learning_rate": 1.0570824524312898e-06, |
| "loss": 0.2776, |
| "step": 2578 |
| }, |
| { |
| "epoch": 2.9435253850541927, |
| "grad_norm": 0.19199229150066516, |
| "learning_rate": 1.0359408033826639e-06, |
| "loss": 0.2952, |
| "step": 2579 |
| }, |
| { |
| "epoch": 2.944666286366229, |
| "grad_norm": 0.21405090207622357, |
| "learning_rate": 1.0147991543340381e-06, |
| "loss": 0.3068, |
| "step": 2580 |
| }, |
| { |
| "epoch": 2.945807187678266, |
| "grad_norm": 0.19626351454485538, |
| "learning_rate": 9.936575052854124e-07, |
| "loss": 0.2755, |
| "step": 2581 |
| }, |
| { |
| "epoch": 2.946948088990302, |
| "grad_norm": 0.18502284402400002, |
| "learning_rate": 9.725158562367865e-07, |
| "loss": 0.2841, |
| "step": 2582 |
| }, |
| { |
| "epoch": 2.948088990302339, |
| "grad_norm": 0.1982315842625302, |
| "learning_rate": 9.513742071881608e-07, |
| "loss": 0.2918, |
| "step": 2583 |
| }, |
| { |
| "epoch": 2.949229891614375, |
| "grad_norm": 0.18425481134910698, |
| "learning_rate": 9.30232558139535e-07, |
| "loss": 0.2942, |
| "step": 2584 |
| }, |
| { |
| "epoch": 2.950370792926412, |
| "grad_norm": 0.19674722242444975, |
| "learning_rate": 9.09090909090909e-07, |
| "loss": 0.2962, |
| "step": 2585 |
| }, |
| { |
| "epoch": 2.9515116942384485, |
| "grad_norm": 0.18753143414095313, |
| "learning_rate": 8.879492600422832e-07, |
| "loss": 0.2852, |
| "step": 2586 |
| }, |
| { |
| "epoch": 2.952652595550485, |
| "grad_norm": 0.21810746998579797, |
| "learning_rate": 8.668076109936576e-07, |
| "loss": 0.2932, |
| "step": 2587 |
| }, |
| { |
| "epoch": 2.9537934968625215, |
| "grad_norm": 0.19022816490059893, |
| "learning_rate": 8.456659619450318e-07, |
| "loss": 0.2977, |
| "step": 2588 |
| }, |
| { |
| "epoch": 2.954934398174558, |
| "grad_norm": 0.18911069763423244, |
| "learning_rate": 8.24524312896406e-07, |
| "loss": 0.2994, |
| "step": 2589 |
| }, |
| { |
| "epoch": 2.9560752994865944, |
| "grad_norm": 0.19384914458501556, |
| "learning_rate": 8.033826638477801e-07, |
| "loss": 0.298, |
| "step": 2590 |
| }, |
| { |
| "epoch": 2.957216200798631, |
| "grad_norm": 0.18640080999088518, |
| "learning_rate": 7.822410147991543e-07, |
| "loss": 0.2921, |
| "step": 2591 |
| }, |
| { |
| "epoch": 2.9583571021106674, |
| "grad_norm": 0.19120986595410241, |
| "learning_rate": 7.610993657505286e-07, |
| "loss": 0.2807, |
| "step": 2592 |
| }, |
| { |
| "epoch": 2.959498003422704, |
| "grad_norm": 0.17738248639668097, |
| "learning_rate": 7.399577167019028e-07, |
| "loss": 0.2794, |
| "step": 2593 |
| }, |
| { |
| "epoch": 2.9606389047347403, |
| "grad_norm": 0.20010347777133208, |
| "learning_rate": 7.18816067653277e-07, |
| "loss": 0.2788, |
| "step": 2594 |
| }, |
| { |
| "epoch": 2.961779806046777, |
| "grad_norm": 0.19032191300940796, |
| "learning_rate": 6.976744186046511e-07, |
| "loss": 0.2844, |
| "step": 2595 |
| }, |
| { |
| "epoch": 2.9629207073588133, |
| "grad_norm": 0.18550298786618058, |
| "learning_rate": 6.765327695560253e-07, |
| "loss": 0.2864, |
| "step": 2596 |
| }, |
| { |
| "epoch": 2.96406160867085, |
| "grad_norm": 0.18691192560025538, |
| "learning_rate": 6.553911205073996e-07, |
| "loss": 0.296, |
| "step": 2597 |
| }, |
| { |
| "epoch": 2.9652025099828867, |
| "grad_norm": 0.18180543168740507, |
| "learning_rate": 6.342494714587738e-07, |
| "loss": 0.2853, |
| "step": 2598 |
| }, |
| { |
| "epoch": 2.9663434112949227, |
| "grad_norm": 0.18467998613229547, |
| "learning_rate": 6.131078224101481e-07, |
| "loss": 0.2927, |
| "step": 2599 |
| }, |
| { |
| "epoch": 2.9674843126069597, |
| "grad_norm": 0.19071773172373238, |
| "learning_rate": 5.919661733615223e-07, |
| "loss": 0.288, |
| "step": 2600 |
| }, |
| { |
| "epoch": 2.968625213918996, |
| "grad_norm": 0.2040159766721208, |
| "learning_rate": 5.708245243128964e-07, |
| "loss": 0.2977, |
| "step": 2601 |
| }, |
| { |
| "epoch": 2.9697661152310326, |
| "grad_norm": 0.18910715703892397, |
| "learning_rate": 5.496828752642707e-07, |
| "loss": 0.2943, |
| "step": 2602 |
| }, |
| { |
| "epoch": 2.970907016543069, |
| "grad_norm": 0.20003695025712895, |
| "learning_rate": 5.285412262156449e-07, |
| "loss": 0.2789, |
| "step": 2603 |
| }, |
| { |
| "epoch": 2.9720479178551056, |
| "grad_norm": 0.18261240038936224, |
| "learning_rate": 5.073995771670191e-07, |
| "loss": 0.3079, |
| "step": 2604 |
| }, |
| { |
| "epoch": 2.973188819167142, |
| "grad_norm": 0.19686254457945906, |
| "learning_rate": 4.862579281183933e-07, |
| "loss": 0.2868, |
| "step": 2605 |
| }, |
| { |
| "epoch": 2.9743297204791785, |
| "grad_norm": 0.19786667723149334, |
| "learning_rate": 4.651162790697675e-07, |
| "loss": 0.2968, |
| "step": 2606 |
| }, |
| { |
| "epoch": 2.975470621791215, |
| "grad_norm": 0.20652336498177548, |
| "learning_rate": 4.439746300211416e-07, |
| "loss": 0.3022, |
| "step": 2607 |
| }, |
| { |
| "epoch": 2.9766115231032515, |
| "grad_norm": 0.1967118773874935, |
| "learning_rate": 4.228329809725159e-07, |
| "loss": 0.2832, |
| "step": 2608 |
| }, |
| { |
| "epoch": 2.977752424415288, |
| "grad_norm": 0.1945048452350098, |
| "learning_rate": 4.0169133192389007e-07, |
| "loss": 0.2974, |
| "step": 2609 |
| }, |
| { |
| "epoch": 2.9788933257273245, |
| "grad_norm": 0.1889319361030166, |
| "learning_rate": 3.805496828752643e-07, |
| "loss": 0.2861, |
| "step": 2610 |
| }, |
| { |
| "epoch": 2.980034227039361, |
| "grad_norm": 0.20296137411834944, |
| "learning_rate": 3.594080338266385e-07, |
| "loss": 0.2938, |
| "step": 2611 |
| }, |
| { |
| "epoch": 2.9811751283513974, |
| "grad_norm": 0.18722476513226705, |
| "learning_rate": 3.3826638477801266e-07, |
| "loss": 0.2909, |
| "step": 2612 |
| }, |
| { |
| "epoch": 2.9823160296634343, |
| "grad_norm": 0.19444064275525808, |
| "learning_rate": 3.171247357293869e-07, |
| "loss": 0.2815, |
| "step": 2613 |
| }, |
| { |
| "epoch": 2.9834569309754704, |
| "grad_norm": 0.1908410516017102, |
| "learning_rate": 2.959830866807611e-07, |
| "loss": 0.2784, |
| "step": 2614 |
| }, |
| { |
| "epoch": 2.9845978322875073, |
| "grad_norm": 0.19556815957043194, |
| "learning_rate": 2.7484143763213536e-07, |
| "loss": 0.3052, |
| "step": 2615 |
| }, |
| { |
| "epoch": 2.985738733599544, |
| "grad_norm": 0.20031795787914, |
| "learning_rate": 2.5369978858350954e-07, |
| "loss": 0.3062, |
| "step": 2616 |
| }, |
| { |
| "epoch": 2.9868796349115803, |
| "grad_norm": 0.2120042672146075, |
| "learning_rate": 2.3255813953488374e-07, |
| "loss": 0.2793, |
| "step": 2617 |
| }, |
| { |
| "epoch": 2.9880205362236167, |
| "grad_norm": 0.2135770614047869, |
| "learning_rate": 2.1141649048625795e-07, |
| "loss": 0.278, |
| "step": 2618 |
| }, |
| { |
| "epoch": 2.989161437535653, |
| "grad_norm": 0.1865270923940558, |
| "learning_rate": 1.9027484143763215e-07, |
| "loss": 0.2858, |
| "step": 2619 |
| }, |
| { |
| "epoch": 2.9903023388476897, |
| "grad_norm": 0.1917240005764539, |
| "learning_rate": 1.6913319238900633e-07, |
| "loss": 0.2951, |
| "step": 2620 |
| }, |
| { |
| "epoch": 2.991443240159726, |
| "grad_norm": 0.18904300839758295, |
| "learning_rate": 1.4799154334038056e-07, |
| "loss": 0.2856, |
| "step": 2621 |
| }, |
| { |
| "epoch": 2.9925841414717627, |
| "grad_norm": 0.1836428754871979, |
| "learning_rate": 1.2684989429175477e-07, |
| "loss": 0.2911, |
| "step": 2622 |
| }, |
| { |
| "epoch": 2.993725042783799, |
| "grad_norm": 0.2057010148568336, |
| "learning_rate": 1.0570824524312897e-07, |
| "loss": 0.2841, |
| "step": 2623 |
| }, |
| { |
| "epoch": 2.9948659440958356, |
| "grad_norm": 0.19150844068855372, |
| "learning_rate": 8.456659619450317e-08, |
| "loss": 0.2928, |
| "step": 2624 |
| }, |
| { |
| "epoch": 2.996006845407872, |
| "grad_norm": 0.18710502056330086, |
| "learning_rate": 6.342494714587738e-08, |
| "loss": 0.2823, |
| "step": 2625 |
| }, |
| { |
| "epoch": 2.9971477467199086, |
| "grad_norm": 0.18850823231663, |
| "learning_rate": 4.228329809725158e-08, |
| "loss": 0.2842, |
| "step": 2626 |
| }, |
| { |
| "epoch": 2.998288648031945, |
| "grad_norm": 0.1861174706937187, |
| "learning_rate": 2.114164904862579e-08, |
| "loss": 0.2976, |
| "step": 2627 |
| }, |
| { |
| "epoch": 2.999429549343982, |
| "grad_norm": 0.18555388787832147, |
| "learning_rate": 0.0, |
| "loss": 0.2851, |
| "step": 2628 |
| }, |
| { |
| "epoch": 2.999429549343982, |
| "step": 2628, |
| "total_flos": 2.2479308431902638e+18, |
| "train_loss": 0.20299065702763502, |
| "train_runtime": 141958.5173, |
| "train_samples_per_second": 0.296, |
| "train_steps_per_second": 0.019 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 2628, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 2.2479308431902638e+18, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|