diff --git "a/trainer_state.json" "b/trainer_state.json" --- "a/trainer_state.json" +++ "b/trainer_state.json" @@ -3,3891 +3,9477 @@ "best_model_checkpoint": null, "epoch": 3.0, "eval_steps": 500, - "global_step": 552, + "global_step": 1350, "is_hyper_param_search": false, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { - "epoch": 0.005434782608695652, + "epoch": 0.0022222222222222222, "grad_norm": 0.0, "learning_rate": 0.0, - "loss": 10.5124, + "loss": 7.5063, "step": 1 }, { - "epoch": 0.010869565217391304, + "epoch": 0.0044444444444444444, "grad_norm": 0.0, "learning_rate": 0.0, - "loss": 10.5941, + "loss": 7.5259, "step": 2 }, { - "epoch": 0.016304347826086956, - "grad_norm": 8.096252368220718, - "learning_rate": 1.1764705882352942e-06, - "loss": 10.475, + "epoch": 0.006666666666666667, + "grad_norm": 14.722691054044644, + "learning_rate": 4.878048780487805e-07, + "loss": 7.8127, "step": 3 }, { - "epoch": 0.021739130434782608, - "grad_norm": 8.39383943803796, - "learning_rate": 2.3529411764705885e-06, - "loss": 10.4029, + "epoch": 0.008888888888888889, + "grad_norm": 15.997231564770075, + "learning_rate": 9.75609756097561e-07, + "loss": 7.6197, "step": 4 }, { - "epoch": 0.02717391304347826, - "grad_norm": 8.479649232958007, - "learning_rate": 3.529411764705883e-06, - "loss": 10.606, + "epoch": 0.011111111111111112, + "grad_norm": 14.467791387915264, + "learning_rate": 1.4634146341463414e-06, + "loss": 7.4453, "step": 5 }, { - "epoch": 0.03260869565217391, - "grad_norm": 8.388175109430223, - "learning_rate": 4.705882352941177e-06, - "loss": 10.4024, + "epoch": 0.013333333333333334, + "grad_norm": 14.984981636676713, + "learning_rate": 1.951219512195122e-06, + "loss": 7.5914, "step": 6 }, { - "epoch": 0.03804347826086957, - "grad_norm": 8.445899787393927, - "learning_rate": 5.882352941176471e-06, - "loss": 10.4772, + "epoch": 0.015555555555555555, + "grad_norm": 15.077118022052625, + "learning_rate": 2.4390243902439027e-06, + "loss": 7.4796, "step": 7 }, { - "epoch": 0.043478260869565216, - "grad_norm": 8.405772228388786, - "learning_rate": 7.058823529411766e-06, - "loss": 10.4004, + "epoch": 0.017777777777777778, + "grad_norm": 15.148026483750279, + "learning_rate": 2.926829268292683e-06, + "loss": 7.4247, "step": 8 }, { - "epoch": 0.04891304347826087, - "grad_norm": 8.44764590867685, - "learning_rate": 8.23529411764706e-06, - "loss": 10.1775, + "epoch": 0.02, + "grad_norm": 15.793248090870668, + "learning_rate": 3.414634146341464e-06, + "loss": 7.3934, "step": 9 }, { - "epoch": 0.05434782608695652, - "grad_norm": 8.23897507323131, - "learning_rate": 9.411764705882354e-06, - "loss": 10.2434, + "epoch": 0.022222222222222223, + "grad_norm": 14.745864579312784, + "learning_rate": 3.902439024390244e-06, + "loss": 7.2383, "step": 10 }, { - "epoch": 0.059782608695652176, - "grad_norm": 8.118852150518913, - "learning_rate": 1.0588235294117648e-05, - "loss": 9.7644, + "epoch": 0.024444444444444446, + "grad_norm": 13.548582285799728, + "learning_rate": 4.390243902439025e-06, + "loss": 7.1486, "step": 11 }, { - "epoch": 0.06521739130434782, - "grad_norm": 8.570315139494753, - "learning_rate": 1.1764705882352942e-05, - "loss": 9.7751, + "epoch": 0.02666666666666667, + "grad_norm": 14.720797876395546, + "learning_rate": 4.8780487804878055e-06, + "loss": 6.8787, "step": 12 }, { - "epoch": 0.07065217391304347, - "grad_norm": 8.622402474140065, - "learning_rate": 1.2941176470588238e-05, - "loss": 9.2685, + "epoch": 0.028888888888888888, + "grad_norm": 14.720797876395546, + "learning_rate": 4.8780487804878055e-06, + "loss": 6.7092, "step": 13 }, { - "epoch": 0.07608695652173914, - "grad_norm": 8.736670863686008, - "learning_rate": 1.4117647058823532e-05, - "loss": 8.897, + "epoch": 0.03111111111111111, + "grad_norm": 13.494405885070066, + "learning_rate": 5.365853658536586e-06, + "loss": 6.5932, "step": 14 }, { - "epoch": 0.08152173913043478, - "grad_norm": 9.172468108894085, - "learning_rate": 1.5294117647058822e-05, - "loss": 8.7101, + "epoch": 0.03333333333333333, + "grad_norm": 13.419197196396428, + "learning_rate": 5.853658536585366e-06, + "loss": 6.4646, "step": 15 }, { - "epoch": 0.08695652173913043, - "grad_norm": 10.228378996373296, - "learning_rate": 1.647058823529412e-05, - "loss": 8.3074, + "epoch": 0.035555555555555556, + "grad_norm": 12.585219419721852, + "learning_rate": 6.341463414634147e-06, + "loss": 6.3942, "step": 16 }, { - "epoch": 0.09239130434782608, - "grad_norm": 10.657372840257251, - "learning_rate": 1.7647058823529414e-05, - "loss": 7.8589, + "epoch": 0.03777777777777778, + "grad_norm": 12.394135586099884, + "learning_rate": 6.829268292682928e-06, + "loss": 6.0584, "step": 17 }, { - "epoch": 0.09782608695652174, - "grad_norm": 10.887433964524527, - "learning_rate": 1.8823529411764708e-05, - "loss": 7.4742, + "epoch": 0.04, + "grad_norm": 11.755060654107035, + "learning_rate": 7.317073170731707e-06, + "loss": 5.8665, "step": 18 }, { - "epoch": 0.10326086956521739, - "grad_norm": 11.682285639818433, - "learning_rate": 2e-05, - "loss": 6.8416, + "epoch": 0.042222222222222223, + "grad_norm": 9.878058848359446, + "learning_rate": 7.804878048780489e-06, + "loss": 5.7149, "step": 19 }, { - "epoch": 0.10869565217391304, - "grad_norm": 11.901377724871265, - "learning_rate": 1.999982759060109e-05, - "loss": 6.2183, + "epoch": 0.044444444444444446, + "grad_norm": 10.261336993815789, + "learning_rate": 8.292682926829268e-06, + "loss": 5.3142, "step": 20 }, { - "epoch": 0.11413043478260869, - "grad_norm": 11.383373292219964, - "learning_rate": 1.9999310368349344e-05, - "loss": 5.4371, + "epoch": 0.04666666666666667, + "grad_norm": 9.385615154826477, + "learning_rate": 8.78048780487805e-06, + "loss": 5.2707, "step": 21 }, { - "epoch": 0.11956521739130435, - "grad_norm": 9.311596334088138, - "learning_rate": 1.999844835107957e-05, - "loss": 4.7164, + "epoch": 0.04888888888888889, + "grad_norm": 9.00414600801183, + "learning_rate": 9.268292682926831e-06, + "loss": 4.8972, "step": 22 }, { - "epoch": 0.125, - "grad_norm": 8.688635937406437, - "learning_rate": 1.9997241568515742e-05, - "loss": 4.456, + "epoch": 0.051111111111111114, + "grad_norm": 8.99496502004504, + "learning_rate": 9.756097560975611e-06, + "loss": 4.6351, "step": 23 }, { - "epoch": 0.13043478260869565, - "grad_norm": 7.4122077747748305, - "learning_rate": 1.9995690062269985e-05, - "loss": 3.8875, + "epoch": 0.05333333333333334, + "grad_norm": 9.6195691646832, + "learning_rate": 1.024390243902439e-05, + "loss": 4.4307, "step": 24 }, { - "epoch": 0.1358695652173913, - "grad_norm": 6.888182537563505, - "learning_rate": 1.9993793885841157e-05, - "loss": 3.5685, + "epoch": 0.05555555555555555, + "grad_norm": 9.640243431977325, + "learning_rate": 1.0731707317073172e-05, + "loss": 4.1603, "step": 25 }, { - "epoch": 0.14130434782608695, - "grad_norm": 6.988607551936095, - "learning_rate": 1.9991553104612982e-05, - "loss": 3.4123, + "epoch": 0.057777777777777775, + "grad_norm": 9.644996529706356, + "learning_rate": 1.1219512195121953e-05, + "loss": 4.0732, "step": 26 }, { - "epoch": 0.14673913043478262, - "grad_norm": 7.211548625105269, - "learning_rate": 1.998896779585181e-05, - "loss": 3.0838, + "epoch": 0.06, + "grad_norm": 11.023960644254869, + "learning_rate": 1.1707317073170731e-05, + "loss": 3.5722, "step": 27 }, { - "epoch": 0.15217391304347827, - "grad_norm": 7.767483170773942, - "learning_rate": 1.998603804870395e-05, - "loss": 2.831, + "epoch": 0.06222222222222222, + "grad_norm": 11.514078388606869, + "learning_rate": 1.2195121951219513e-05, + "loss": 3.3489, "step": 28 }, { - "epoch": 0.15760869565217392, - "grad_norm": 7.950559222260086, - "learning_rate": 1.9982763964192586e-05, - "loss": 2.6297, + "epoch": 0.06444444444444444, + "grad_norm": 12.85876229981152, + "learning_rate": 1.2682926829268294e-05, + "loss": 2.8889, "step": 29 }, { - "epoch": 0.16304347826086957, - "grad_norm": 8.23795631455961, - "learning_rate": 1.9979145655214306e-05, - "loss": 2.2795, + "epoch": 0.06666666666666667, + "grad_norm": 12.726620056440314, + "learning_rate": 1.3170731707317076e-05, + "loss": 2.4981, "step": 30 }, { - "epoch": 0.16847826086956522, - "grad_norm": 8.57956169127235, - "learning_rate": 1.9975183246535212e-05, - "loss": 2.0509, + "epoch": 0.06888888888888889, + "grad_norm": 12.345960015475454, + "learning_rate": 1.3658536585365855e-05, + "loss": 2.0857, "step": 31 }, { - "epoch": 0.17391304347826086, - "grad_norm": 8.071070816084118, - "learning_rate": 1.99708768747866e-05, - "loss": 1.8279, + "epoch": 0.07111111111111111, + "grad_norm": 10.001492462135348, + "learning_rate": 1.4146341463414635e-05, + "loss": 1.6092, "step": 32 }, { - "epoch": 0.1793478260869565, - "grad_norm": 7.042152882720071, - "learning_rate": 1.9966226688460258e-05, - "loss": 1.3567, + "epoch": 0.07333333333333333, + "grad_norm": 8.31007323949108, + "learning_rate": 1.4634146341463415e-05, + "loss": 1.2106, "step": 33 }, { - "epoch": 0.18478260869565216, - "grad_norm": 4.814338676579685, - "learning_rate": 1.996123284790336e-05, - "loss": 0.9542, + "epoch": 0.07555555555555556, + "grad_norm": 6.0433858836555485, + "learning_rate": 1.5121951219512196e-05, + "loss": 1.0327, "step": 34 }, { - "epoch": 0.19021739130434784, - "grad_norm": 2.9434658655739474, - "learning_rate": 1.9955895525312913e-05, - "loss": 0.8261, + "epoch": 0.07777777777777778, + "grad_norm": 3.3804822559139613, + "learning_rate": 1.5609756097560978e-05, + "loss": 0.83, "step": 35 }, { - "epoch": 0.1956521739130435, - "grad_norm": 2.452806110360505, - "learning_rate": 1.995021490472983e-05, - "loss": 0.851, + "epoch": 0.08, + "grad_norm": 1.6575898216212128, + "learning_rate": 1.6097560975609757e-05, + "loss": 0.8246, "step": 36 }, { - "epoch": 0.20108695652173914, - "grad_norm": 1.6789979391543146, - "learning_rate": 1.9944191182032588e-05, - "loss": 0.8265, + "epoch": 0.08222222222222222, + "grad_norm": 1.1425531127984794, + "learning_rate": 1.6585365853658537e-05, + "loss": 0.7479, "step": 37 }, { - "epoch": 0.20652173913043478, - "grad_norm": 2.0007370440742154, - "learning_rate": 1.9937824564930474e-05, - "loss": 0.8181, + "epoch": 0.08444444444444445, + "grad_norm": 2.233938995336524, + "learning_rate": 1.7073170731707317e-05, + "loss": 0.846, "step": 38 }, { - "epoch": 0.21195652173913043, - "grad_norm": 2.493212508529885, - "learning_rate": 1.9931115272956405e-05, - "loss": 0.767, + "epoch": 0.08666666666666667, + "grad_norm": 1.19656933444323, + "learning_rate": 1.75609756097561e-05, + "loss": 0.7303, "step": 39 }, { - "epoch": 0.21739130434782608, - "grad_norm": 1.9209687838841931, - "learning_rate": 1.992406353745939e-05, - "loss": 0.7196, + "epoch": 0.08888888888888889, + "grad_norm": 0.8165279819725018, + "learning_rate": 1.804878048780488e-05, + "loss": 0.6628, "step": 40 }, { - "epoch": 0.22282608695652173, - "grad_norm": 1.8290330319103352, - "learning_rate": 1.9916669601596515e-05, - "loss": 0.7299, + "epoch": 0.09111111111111111, + "grad_norm": 1.08419220015424, + "learning_rate": 1.8536585365853663e-05, + "loss": 0.6638, "step": 41 }, { - "epoch": 0.22826086956521738, - "grad_norm": 1.7900648029089992, - "learning_rate": 1.990893372032459e-05, - "loss": 0.7229, + "epoch": 0.09333333333333334, + "grad_norm": 1.4937850679627862, + "learning_rate": 1.902439024390244e-05, + "loss": 0.7694, "step": 42 }, { - "epoch": 0.23369565217391305, - "grad_norm": 1.6749799534602232, - "learning_rate": 1.990085616039135e-05, - "loss": 0.7238, + "epoch": 0.09555555555555556, + "grad_norm": 0.918975020401649, + "learning_rate": 1.9512195121951222e-05, + "loss": 0.6888, "step": 43 }, { - "epoch": 0.2391304347826087, - "grad_norm": 1.986613572625418, - "learning_rate": 1.989243720032624e-05, - "loss": 0.7332, + "epoch": 0.09777777777777778, + "grad_norm": 1.621751227796066, + "learning_rate": 2e-05, + "loss": 0.6929, "step": 44 }, { - "epoch": 0.24456521739130435, - "grad_norm": 1.8912806129771145, - "learning_rate": 1.9883677130430827e-05, - "loss": 0.5864, + "epoch": 0.1, + "grad_norm": 1.8984821349919128, + "learning_rate": 1.999997120014852e-05, + "loss": 0.6873, "step": 45 }, { - "epoch": 0.25, - "grad_norm": 1.7750105086017574, - "learning_rate": 1.9874576252768793e-05, - "loss": 0.6124, + "epoch": 0.10222222222222223, + "grad_norm": 1.4101227637498284, + "learning_rate": 1.9999884800759955e-05, + "loss": 0.6671, "step": 46 }, { - "epoch": 0.2554347826086957, - "grad_norm": 1.2955635391212061, - "learning_rate": 1.9865134881155504e-05, - "loss": 0.6884, + "epoch": 0.10444444444444445, + "grad_norm": 1.2073741880436464, + "learning_rate": 1.9999740802331976e-05, + "loss": 0.6705, "step": 47 }, { - "epoch": 0.2608695652173913, - "grad_norm": 1.273010141736733, - "learning_rate": 1.98553533411472e-05, - "loss": 0.6484, + "epoch": 0.10666666666666667, + "grad_norm": 1.414899088195992, + "learning_rate": 1.9999539205693996e-05, + "loss": 0.7036, "step": 48 }, { - "epoch": 0.266304347826087, - "grad_norm": 2.163538460282388, - "learning_rate": 1.9845231970029774e-05, - "loss": 0.7095, + "epoch": 0.10888888888888888, + "grad_norm": 1.4613799024835918, + "learning_rate": 1.9999280012007213e-05, + "loss": 0.6638, "step": 49 }, { - "epoch": 0.2717391304347826, - "grad_norm": 1.8775881503442995, - "learning_rate": 1.983477111680712e-05, - "loss": 0.604, + "epoch": 0.1111111111111111, + "grad_norm": 0.6725223951703471, + "learning_rate": 1.9998963222764574e-05, + "loss": 0.6857, "step": 50 }, { - "epoch": 0.27717391304347827, - "grad_norm": 1.5484748822902972, - "learning_rate": 1.9823971142189126e-05, - "loss": 0.6862, + "epoch": 0.11333333333333333, + "grad_norm": 1.8872886767968855, + "learning_rate": 1.9998588839790777e-05, + "loss": 0.7852, "step": 51 }, { - "epoch": 0.2826086956521739, - "grad_norm": 1.0946391927116763, - "learning_rate": 1.981283241857922e-05, - "loss": 0.6276, + "epoch": 0.11555555555555555, + "grad_norm": 0.9537117004789476, + "learning_rate": 1.9998156865242256e-05, + "loss": 0.6477, "step": 52 }, { - "epoch": 0.28804347826086957, - "grad_norm": 1.4879971843628843, - "learning_rate": 1.9801355330061526e-05, - "loss": 0.5763, + "epoch": 0.11777777777777777, + "grad_norm": 0.8576779409246996, + "learning_rate": 1.9997667301607172e-05, + "loss": 0.6637, "step": 53 }, { - "epoch": 0.29347826086956524, - "grad_norm": 1.8993705185884953, - "learning_rate": 1.978954027238763e-05, - "loss": 0.5908, + "epoch": 0.12, + "grad_norm": 1.1782602558474236, + "learning_rate": 1.9997120151705393e-05, + "loss": 0.7428, "step": 54 }, { - "epoch": 0.29891304347826086, - "grad_norm": 1.6076663483914293, - "learning_rate": 1.9777387652962933e-05, - "loss": 0.5543, + "epoch": 0.12222222222222222, + "grad_norm": 1.1603885871259951, + "learning_rate": 1.9996515418688493e-05, + "loss": 0.72, "step": 55 }, { - "epoch": 0.30434782608695654, - "grad_norm": 1.1740894440396383, - "learning_rate": 1.9764897890832597e-05, - "loss": 0.5458, + "epoch": 0.12444444444444444, + "grad_norm": 2.3122148787572243, + "learning_rate": 1.9995853106039707e-05, + "loss": 0.6848, "step": 56 }, { - "epoch": 0.30978260869565216, - "grad_norm": 1.9838553435397361, - "learning_rate": 1.9752071416667102e-05, - "loss": 0.5046, + "epoch": 0.12666666666666668, + "grad_norm": 1.1561151782934596, + "learning_rate": 1.9995133217573943e-05, + "loss": 0.7536, "step": 57 }, { - "epoch": 0.31521739130434784, - "grad_norm": 1.0812842728047714, - "learning_rate": 1.973890867274738e-05, - "loss": 0.5609, + "epoch": 0.1288888888888889, + "grad_norm": 1.1958665790278533, + "learning_rate": 1.999435575743774e-05, + "loss": 0.6975, "step": 58 }, { - "epoch": 0.32065217391304346, - "grad_norm": 1.723223092822651, - "learning_rate": 1.972541011294959e-05, - "loss": 0.4724, + "epoch": 0.13111111111111112, + "grad_norm": 0.9773662800270114, + "learning_rate": 1.9993520730109236e-05, + "loss": 0.6352, "step": 59 }, { - "epoch": 0.32608695652173914, - "grad_norm": 1.4887350192643218, - "learning_rate": 1.9711576202729445e-05, - "loss": 0.5168, + "epoch": 0.13333333333333333, + "grad_norm": 1.799550577385025, + "learning_rate": 1.999262814039817e-05, + "loss": 0.6746, "step": 60 }, { - "epoch": 0.33152173913043476, - "grad_norm": 1.533986608527031, - "learning_rate": 1.9697407419106178e-05, - "loss": 0.5374, + "epoch": 0.13555555555555557, + "grad_norm": 1.2473925426497188, + "learning_rate": 1.9991677993445832e-05, + "loss": 0.701, "step": 61 }, { - "epoch": 0.33695652173913043, - "grad_norm": 1.283663400004928, - "learning_rate": 1.9682904250646084e-05, - "loss": 0.622, + "epoch": 0.13777777777777778, + "grad_norm": 1.8054669089832702, + "learning_rate": 1.9990670294725036e-05, + "loss": 0.6154, "step": 62 }, { - "epoch": 0.3423913043478261, - "grad_norm": 1.511070122779534, - "learning_rate": 1.9668067197445662e-05, - "loss": 0.572, + "epoch": 0.14, + "grad_norm": 1.0163393007517252, + "learning_rate": 1.99896050500401e-05, + "loss": 0.6871, "step": 63 }, { - "epoch": 0.34782608695652173, - "grad_norm": 1.843030359662425, - "learning_rate": 1.9652896771114416e-05, - "loss": 0.5449, + "epoch": 0.14222222222222222, + "grad_norm": 0.8281980864061993, + "learning_rate": 1.9988482265526805e-05, + "loss": 0.7017, "step": 64 }, { - "epoch": 0.3532608695652174, - "grad_norm": 2.2753033401712752, - "learning_rate": 1.9637393494757146e-05, - "loss": 0.6883, + "epoch": 0.14444444444444443, + "grad_norm": 1.3390799802182618, + "learning_rate": 1.9987301947652354e-05, + "loss": 0.6986, "step": 65 }, { - "epoch": 0.358695652173913, - "grad_norm": 1.1407510209951979, - "learning_rate": 1.962155790295597e-05, - "loss": 0.4357, + "epoch": 0.14666666666666667, + "grad_norm": 1.605924131490741, + "learning_rate": 1.998606410321534e-05, + "loss": 0.6804, "step": 66 }, { - "epoch": 0.3641304347826087, - "grad_norm": 1.351954153650573, - "learning_rate": 1.9605390541751864e-05, - "loss": 0.5109, + "epoch": 0.14888888888888888, + "grad_norm": 1.0548141691056787, + "learning_rate": 1.998476873934571e-05, + "loss": 0.66, "step": 67 }, { - "epoch": 0.3695652173913043, - "grad_norm": 1.2344312626302043, - "learning_rate": 1.9588891968625828e-05, - "loss": 0.5133, + "epoch": 0.1511111111111111, + "grad_norm": 0.9130722586122906, + "learning_rate": 1.9983415863504723e-05, + "loss": 0.6492, "step": 68 }, { - "epoch": 0.375, - "grad_norm": 3.528171261663953, - "learning_rate": 1.9572062752479684e-05, - "loss": 0.7135, + "epoch": 0.15333333333333332, + "grad_norm": 1.4362008943547966, + "learning_rate": 1.998200548348491e-05, + "loss": 0.6429, "step": 69 }, { - "epoch": 0.3804347826086957, - "grad_norm": 1.0283054372439564, - "learning_rate": 1.9554903473616432e-05, - "loss": 0.4934, + "epoch": 0.15555555555555556, + "grad_norm": 1.351087411884983, + "learning_rate": 1.9980537607410007e-05, + "loss": 0.6446, "step": 70 }, { - "epoch": 0.3858695652173913, - "grad_norm": 1.2480924815092371, - "learning_rate": 1.953741472372027e-05, - "loss": 0.3846, + "epoch": 0.15777777777777777, + "grad_norm": 1.4671588047342925, + "learning_rate": 1.9979012243734943e-05, + "loss": 0.6532, "step": 71 }, { - "epoch": 0.391304347826087, - "grad_norm": 1.4701584460006578, - "learning_rate": 1.951959710583616e-05, - "loss": 0.5303, + "epoch": 0.16, + "grad_norm": 1.17315945059453, + "learning_rate": 1.9977429401245764e-05, + "loss": 0.6588, "step": 72 }, { - "epoch": 0.3967391304347826, - "grad_norm": 2.2396908880712774, - "learning_rate": 1.950145123434907e-05, - "loss": 0.4241, + "epoch": 0.1622222222222222, + "grad_norm": 1.2620846776311911, + "learning_rate": 1.9975789089059598e-05, + "loss": 0.7003, "step": 73 }, { - "epoch": 0.40217391304347827, - "grad_norm": 1.7904621917947958, - "learning_rate": 1.9482977734962753e-05, - "loss": 0.6144, + "epoch": 0.16444444444444445, + "grad_norm": 2.402061947786113, + "learning_rate": 1.9974091316624596e-05, + "loss": 0.7886, "step": 74 }, { - "epoch": 0.4076086956521739, - "grad_norm": 1.650705831140192, - "learning_rate": 1.94641772446782e-05, - "loss": 0.592, + "epoch": 0.16666666666666666, + "grad_norm": 1.1838405421284488, + "learning_rate": 1.9972336093719876e-05, + "loss": 0.7089, "step": 75 }, { - "epoch": 0.41304347826086957, - "grad_norm": 1.588255971243881, - "learning_rate": 1.9445050411771648e-05, - "loss": 0.5918, + "epoch": 0.1688888888888889, + "grad_norm": 1.0388901526690217, + "learning_rate": 1.997052343045547e-05, + "loss": 0.6717, "step": 76 }, { - "epoch": 0.41847826086956524, - "grad_norm": 1.4379861368277966, - "learning_rate": 1.9425597895772257e-05, - "loss": 0.604, + "epoch": 0.1711111111111111, + "grad_norm": 1.745776427553139, + "learning_rate": 1.9968653337272262e-05, + "loss": 0.6386, "step": 77 }, { - "epoch": 0.42391304347826086, - "grad_norm": 1.7783069990731366, - "learning_rate": 1.9405820367439343e-05, - "loss": 0.6351, + "epoch": 0.17333333333333334, + "grad_norm": 1.6653206182043356, + "learning_rate": 1.9966725824941933e-05, + "loss": 0.7078, "step": 78 }, { - "epoch": 0.42934782608695654, - "grad_norm": 1.3451929958729711, - "learning_rate": 1.9385718508739263e-05, - "loss": 0.4487, + "epoch": 0.17555555555555555, + "grad_norm": 0.9960932615552109, + "learning_rate": 1.9964740904566903e-05, + "loss": 0.6857, "step": 79 }, { - "epoch": 0.43478260869565216, - "grad_norm": 1.5631174238633363, - "learning_rate": 1.9365293012821887e-05, - "loss": 0.5412, + "epoch": 0.17777777777777778, + "grad_norm": 1.4924744665596363, + "learning_rate": 1.9962698587580246e-05, + "loss": 0.7984, "step": 80 }, { - "epoch": 0.44021739130434784, - "grad_norm": 1.7641796531654723, - "learning_rate": 1.934454458399671e-05, - "loss": 0.4606, + "epoch": 0.18, + "grad_norm": 1.102309794654413, + "learning_rate": 1.996059888574565e-05, + "loss": 0.6744, "step": 81 }, { - "epoch": 0.44565217391304346, - "grad_norm": 2.007206796904478, - "learning_rate": 1.9323473937708565e-05, - "loss": 0.5409, + "epoch": 0.18222222222222223, + "grad_norm": 1.367017490200943, + "learning_rate": 1.9958441811157342e-05, + "loss": 0.6743, "step": 82 }, { - "epoch": 0.45108695652173914, - "grad_norm": 1.6060302211544533, - "learning_rate": 1.9302081800512943e-05, - "loss": 0.5194, + "epoch": 0.18444444444444444, + "grad_norm": 1.4318569931825933, + "learning_rate": 1.9956227376239995e-05, + "loss": 0.7155, "step": 83 }, { - "epoch": 0.45652173913043476, - "grad_norm": 1.584139057778314, - "learning_rate": 1.9280368910050943e-05, - "loss": 0.4662, + "epoch": 0.18666666666666668, + "grad_norm": 0.7685375899452971, + "learning_rate": 1.99539555937487e-05, + "loss": 0.6322, "step": 84 }, { - "epoch": 0.46195652173913043, - "grad_norm": 1.8953323400594193, - "learning_rate": 1.9258336015023847e-05, - "loss": 0.4433, + "epoch": 0.18888888888888888, + "grad_norm": 1.7065611965176022, + "learning_rate": 1.9951626476768847e-05, + "loss": 0.7183, "step": 85 }, { - "epoch": 0.4673913043478261, - "grad_norm": 1.6067605181621798, - "learning_rate": 1.9235983875167296e-05, - "loss": 0.4255, + "epoch": 0.19111111111111112, + "grad_norm": 1.2812204719409646, + "learning_rate": 1.9949240038716092e-05, + "loss": 0.6893, "step": 86 }, { - "epoch": 0.47282608695652173, - "grad_norm": 1.4529302278758023, - "learning_rate": 1.9213313261225083e-05, - "loss": 0.4364, + "epoch": 0.19333333333333333, + "grad_norm": 1.6553197079704987, + "learning_rate": 1.9946796293336237e-05, + "loss": 0.6658, "step": 87 }, { - "epoch": 0.4782608695652174, - "grad_norm": 1.9965642456327142, - "learning_rate": 1.9190324954922594e-05, - "loss": 0.4199, + "epoch": 0.19555555555555557, + "grad_norm": 1.4977827926202083, + "learning_rate": 1.9944295254705187e-05, + "loss": 0.6274, "step": 88 }, { - "epoch": 0.483695652173913, - "grad_norm": 1.9458245431232768, - "learning_rate": 1.9167019748939847e-05, - "loss": 0.4024, + "epoch": 0.19777777777777777, + "grad_norm": 1.7632785136442362, + "learning_rate": 1.994173693722885e-05, + "loss": 0.73, "step": 89 }, { - "epoch": 0.4891304347826087, - "grad_norm": 2.000159805579825, - "learning_rate": 1.914339844688415e-05, - "loss": 0.4595, + "epoch": 0.2, + "grad_norm": 1.7602534494796744, + "learning_rate": 1.9939121355643057e-05, + "loss": 0.6606, "step": 90 }, { - "epoch": 0.4945652173913043, - "grad_norm": 1.97378975953703, - "learning_rate": 1.91194618632624e-05, - "loss": 0.4917, + "epoch": 0.20222222222222222, + "grad_norm": 1.5282071513777915, + "learning_rate": 1.993644852501348e-05, + "loss": 0.6653, "step": 91 }, { - "epoch": 0.5, - "grad_norm": 1.3771983904411074, - "learning_rate": 1.9095210823452997e-05, - "loss": 0.3341, + "epoch": 0.20444444444444446, + "grad_norm": 1.0341910602467146, + "learning_rate": 1.9933718460735553e-05, + "loss": 0.6885, "step": 92 }, { - "epoch": 0.5054347826086957, - "grad_norm": 1.8123410249166505, - "learning_rate": 1.9070646163677383e-05, - "loss": 0.4285, + "epoch": 0.20666666666666667, + "grad_norm": 0.9917842426165263, + "learning_rate": 1.9930931178534353e-05, + "loss": 0.6458, "step": 93 }, { - "epoch": 0.5108695652173914, - "grad_norm": 1.7561172390607174, - "learning_rate": 1.9045768730971198e-05, - "loss": 0.3863, + "epoch": 0.2088888888888889, + "grad_norm": 1.5782836008098677, + "learning_rate": 1.9928086694464544e-05, + "loss": 0.6657, "step": 94 }, { - "epoch": 0.5163043478260869, - "grad_norm": 1.809060828661053, - "learning_rate": 1.9020579383155087e-05, - "loss": 0.3486, + "epoch": 0.2111111111111111, + "grad_norm": 1.0679344061648397, + "learning_rate": 1.992518502491028e-05, + "loss": 0.6502, "step": 95 }, { - "epoch": 0.5217391304347826, - "grad_norm": 1.541206279317173, - "learning_rate": 1.899507898880512e-05, - "loss": 0.1713, + "epoch": 0.21333333333333335, + "grad_norm": 0.894256372047346, + "learning_rate": 1.992222618658508e-05, + "loss": 0.6669, "step": 96 }, { - "epoch": 0.5271739130434783, - "grad_norm": 2.0502484531232343, - "learning_rate": 1.8969268427222823e-05, - "loss": 0.2059, + "epoch": 0.21555555555555556, + "grad_norm": 1.8072307001190309, + "learning_rate": 1.9919210196531774e-05, + "loss": 0.6665, "step": 97 }, { - "epoch": 0.532608695652174, - "grad_norm": 1.8524406597388374, - "learning_rate": 1.8943148588404877e-05, - "loss": 0.3856, + "epoch": 0.21777777777777776, + "grad_norm": 1.5196266378787933, + "learning_rate": 1.9916137072122367e-05, + "loss": 0.6386, "step": 98 }, { - "epoch": 0.5380434782608695, - "grad_norm": 3.385889154621842, - "learning_rate": 1.8916720373012425e-05, - "loss": 0.3027, + "epoch": 0.22, + "grad_norm": 1.6755114990203843, + "learning_rate": 1.9913006831057967e-05, + "loss": 0.6378, "step": 99 }, { - "epoch": 0.5434782608695652, - "grad_norm": 1.2814547066301334, - "learning_rate": 1.8889984692340015e-05, - "loss": 0.1609, + "epoch": 0.2222222222222222, + "grad_norm": 1.2087876062985885, + "learning_rate": 1.9909819491368677e-05, + "loss": 0.6355, "step": 100 }, { - "epoch": 0.5489130434782609, - "grad_norm": 1.473493575445019, - "learning_rate": 1.8862942468284174e-05, - "loss": 0.1658, + "epoch": 0.22444444444444445, + "grad_norm": 1.1191077597487138, + "learning_rate": 1.9906575071413468e-05, + "loss": 0.6222, "step": 101 }, { - "epoch": 0.5543478260869565, - "grad_norm": 2.2017906861514125, - "learning_rate": 1.883559463331162e-05, - "loss": 0.2269, + "epoch": 0.22666666666666666, + "grad_norm": 1.0266523093527746, + "learning_rate": 1.9903273589880107e-05, + "loss": 0.6292, "step": 102 }, { - "epoch": 0.5597826086956522, - "grad_norm": 2.9266092953974345, - "learning_rate": 1.880794213042711e-05, - "loss": 0.2638, + "epoch": 0.2288888888888889, + "grad_norm": 1.3533109890908208, + "learning_rate": 1.989991506578503e-05, + "loss": 0.6709, "step": 103 }, { - "epoch": 0.5652173913043478, - "grad_norm": 1.2470192969755443, - "learning_rate": 1.8779985913140927e-05, - "loss": 0.1826, + "epoch": 0.2311111111111111, + "grad_norm": 1.2253552327179564, + "learning_rate": 1.9896499518473237e-05, + "loss": 0.6516, "step": 104 }, { - "epoch": 0.5706521739130435, - "grad_norm": 1.1329281006012806, - "learning_rate": 1.875172694543599e-05, - "loss": 0.0992, + "epoch": 0.23333333333333334, + "grad_norm": 1.206853574486262, + "learning_rate": 1.9893026967618176e-05, + "loss": 0.6638, "step": 105 }, { - "epoch": 0.5760869565217391, - "grad_norm": 1.435458967360399, - "learning_rate": 1.8723166201734626e-05, - "loss": 0.1052, + "epoch": 0.23555555555555555, + "grad_norm": 1.344371862254855, + "learning_rate": 1.988949743322164e-05, + "loss": 0.5966, "step": 106 }, { - "epoch": 0.5815217391304348, - "grad_norm": 2.4406380430615244, - "learning_rate": 1.869430466686497e-05, - "loss": 0.1999, + "epoch": 0.23777777777777778, + "grad_norm": 1.4902511137225691, + "learning_rate": 1.988591093561364e-05, + "loss": 0.6017, "step": 107 }, { - "epoch": 0.5869565217391305, - "grad_norm": 1.0271614062096617, - "learning_rate": 1.8665143336027e-05, - "loss": 0.0855, + "epoch": 0.24, + "grad_norm": 1.377709138840221, + "learning_rate": 1.9882267495452296e-05, + "loss": 0.5654, "step": 108 }, { - "epoch": 0.592391304347826, - "grad_norm": 1.3651592297249626, - "learning_rate": 1.8635683214758213e-05, - "loss": 0.0977, + "epoch": 0.24222222222222223, + "grad_norm": 1.1440453483903748, + "learning_rate": 1.987856713372372e-05, + "loss": 0.6113, "step": 109 }, { - "epoch": 0.5978260869565217, - "grad_norm": 0.5945892482638718, - "learning_rate": 1.8605925318898973e-05, - "loss": 0.0337, + "epoch": 0.24444444444444444, + "grad_norm": 1.3273393596996115, + "learning_rate": 1.9874809871741877e-05, + "loss": 0.6737, "step": 110 }, { - "epoch": 0.6032608695652174, - "grad_norm": 1.194835217639101, - "learning_rate": 1.8575870674557467e-05, - "loss": 0.0722, + "epoch": 0.24666666666666667, + "grad_norm": 1.1698440287358172, + "learning_rate": 1.987099573114849e-05, + "loss": 0.6469, "step": 111 }, { - "epoch": 0.6086956521739131, - "grad_norm": 1.762735939201958, - "learning_rate": 1.8545520318074328e-05, - "loss": 0.1228, + "epoch": 0.24888888888888888, + "grad_norm": 2.3147846542204884, + "learning_rate": 1.986712473391289e-05, + "loss": 0.6119, "step": 112 }, { - "epoch": 0.6141304347826086, - "grad_norm": 1.017829163872169, - "learning_rate": 1.85148752959869e-05, - "loss": 0.0344, + "epoch": 0.2511111111111111, + "grad_norm": 1.3514124701358237, + "learning_rate": 1.9863196902331916e-05, + "loss": 0.6436, "step": 113 }, { - "epoch": 0.6195652173913043, - "grad_norm": 1.052690658912748, - "learning_rate": 1.8483936664993152e-05, - "loss": 0.0377, + "epoch": 0.25333333333333335, + "grad_norm": 2.031033341055374, + "learning_rate": 1.985921225902975e-05, + "loss": 0.7102, "step": 114 }, { - "epoch": 0.625, - "grad_norm": 1.7977784022224987, - "learning_rate": 1.8452705491915232e-05, - "loss": 0.141, + "epoch": 0.25555555555555554, + "grad_norm": 1.779051971654245, + "learning_rate": 1.985517082695783e-05, + "loss": 0.5927, "step": 115 }, { - "epoch": 0.6304347826086957, - "grad_norm": 1.8477093237099182, - "learning_rate": 1.8421182853662704e-05, - "loss": 0.0734, + "epoch": 0.2577777777777778, + "grad_norm": 1.985997034461326, + "learning_rate": 1.985107262939468e-05, + "loss": 0.5925, "step": 116 }, { - "epoch": 0.6358695652173914, - "grad_norm": 0.6794730347498438, - "learning_rate": 1.8389369837195387e-05, - "loss": 0.0266, + "epoch": 0.26, + "grad_norm": 1.1420319758499788, + "learning_rate": 1.984691768994579e-05, + "loss": 0.5606, "step": 117 }, { - "epoch": 0.6413043478260869, - "grad_norm": 0.8818635589659883, - "learning_rate": 1.835726753948589e-05, - "loss": 0.0487, + "epoch": 0.26222222222222225, + "grad_norm": 1.45012568985935, + "learning_rate": 1.9842706032543496e-05, + "loss": 0.5302, "step": 118 }, { - "epoch": 0.6467391304347826, - "grad_norm": 1.0608887498751458, - "learning_rate": 1.8324877067481782e-05, - "loss": 0.0275, + "epoch": 0.2644444444444444, + "grad_norm": 2.0571868831911986, + "learning_rate": 1.983843768144682e-05, + "loss": 0.6307, "step": 119 }, { - "epoch": 0.6521739130434783, - "grad_norm": 1.3129587931586821, - "learning_rate": 1.829219953806743e-05, - "loss": 0.0642, + "epoch": 0.26666666666666666, + "grad_norm": 1.9860044752969075, + "learning_rate": 1.983411266124133e-05, + "loss": 0.588, "step": 120 }, { - "epoch": 0.657608695652174, - "grad_norm": 1.8948301224723039, - "learning_rate": 1.825923607802547e-05, - "loss": 0.0785, + "epoch": 0.2688888888888889, + "grad_norm": 1.4267814457361687, + "learning_rate": 1.982973099683902e-05, + "loss": 0.4638, "step": 121 }, { - "epoch": 0.6630434782608695, - "grad_norm": 0.2518374968408712, - "learning_rate": 1.8225987823997967e-05, - "loss": 0.0111, + "epoch": 0.27111111111111114, + "grad_norm": 2.0707136992140667, + "learning_rate": 1.9825292713478145e-05, + "loss": 0.4867, "step": 122 }, { - "epoch": 0.6684782608695652, - "grad_norm": 0.25552971144651465, - "learning_rate": 1.8192455922447227e-05, - "loss": 0.0103, + "epoch": 0.2733333333333333, + "grad_norm": 1.5691593602876095, + "learning_rate": 1.9820797836723086e-05, + "loss": 0.4559, "step": 123 }, { - "epoch": 0.6739130434782609, - "grad_norm": 0.7841302667217214, - "learning_rate": 1.815864152961624e-05, - "loss": 0.0122, + "epoch": 0.27555555555555555, + "grad_norm": 1.8093632761585683, + "learning_rate": 1.98162463924642e-05, + "loss": 0.5202, "step": 124 }, { - "epoch": 0.6793478260869565, - "grad_norm": 0.1515291563958561, - "learning_rate": 1.812454581148884e-05, - "loss": 0.0079, + "epoch": 0.2777777777777778, + "grad_norm": 3.0305313684125994, + "learning_rate": 1.9811638406917666e-05, + "loss": 0.5261, "step": 125 }, { - "epoch": 0.6847826086956522, - "grad_norm": 0.11584834326779594, - "learning_rate": 1.8090169943749477e-05, - "loss": 0.0055, + "epoch": 0.28, + "grad_norm": 1.9785746959505057, + "learning_rate": 1.9806973906625352e-05, + "loss": 0.4618, "step": 126 }, { - "epoch": 0.6902173913043478, - "grad_norm": 0.1740566784478502, - "learning_rate": 1.8055515111742688e-05, - "loss": 0.0069, + "epoch": 0.2822222222222222, + "grad_norm": 1.5903343645003654, + "learning_rate": 1.980225291845463e-05, + "loss": 0.4995, "step": 127 }, { - "epoch": 0.6956521739130435, - "grad_norm": 1.5625062014274096, - "learning_rate": 1.8020582510432234e-05, - "loss": 0.0383, + "epoch": 0.28444444444444444, + "grad_norm": 2.2576306546740312, + "learning_rate": 1.9797475469598267e-05, + "loss": 0.396, "step": 128 }, { - "epoch": 0.7010869565217391, - "grad_norm": 0.12273159750563628, - "learning_rate": 1.798537334435986e-05, - "loss": 0.0062, + "epoch": 0.2866666666666667, + "grad_norm": 2.4479504389825135, + "learning_rate": 1.9792641587574212e-05, + "loss": 0.511, "step": 129 }, { - "epoch": 0.7065217391304348, - "grad_norm": 3.693193027378141, - "learning_rate": 1.7949888827603813e-05, - "loss": 0.1765, + "epoch": 0.28888888888888886, + "grad_norm": 1.6673079649705205, + "learning_rate": 1.978775130022549e-05, + "loss": 0.3181, "step": 130 }, { - "epoch": 0.7119565217391305, - "grad_norm": 0.12477337459792677, - "learning_rate": 1.791413018373692e-05, - "loss": 0.0057, + "epoch": 0.2911111111111111, + "grad_norm": 2.3834685277588545, + "learning_rate": 1.978280463572001e-05, + "loss": 0.3603, "step": 131 }, { - "epoch": 0.717391304347826, - "grad_norm": 0.8357268279739778, - "learning_rate": 1.7878098645784447e-05, - "loss": 0.0163, + "epoch": 0.29333333333333333, + "grad_norm": 2.4228108259789143, + "learning_rate": 1.977780162255041e-05, + "loss": 0.4199, "step": 132 }, { - "epoch": 0.7228260869565217, - "grad_norm": 3.8264656288549985, - "learning_rate": 1.7841795456181556e-05, - "loss": 0.1727, + "epoch": 0.29555555555555557, + "grad_norm": 2.1331710326272955, + "learning_rate": 1.9772742289533896e-05, + "loss": 0.2263, "step": 133 }, { - "epoch": 0.7282608695652174, - "grad_norm": 0.6387227523871831, - "learning_rate": 1.780522186673046e-05, - "loss": 0.0076, + "epoch": 0.29777777777777775, + "grad_norm": 3.6630520712350694, + "learning_rate": 1.9767626665812083e-05, + "loss": 0.4192, "step": 134 }, { - "epoch": 0.7336956521739131, - "grad_norm": 0.09079528876022976, - "learning_rate": 1.776837913855728e-05, - "loss": 0.0038, + "epoch": 0.3, + "grad_norm": 4.07858252404236, + "learning_rate": 1.9762454780850807e-05, + "loss": 0.5114, "step": 135 }, { - "epoch": 0.7391304347826086, - "grad_norm": 1.9001901725953279, - "learning_rate": 1.7731268542068536e-05, - "loss": 0.0208, + "epoch": 0.3022222222222222, + "grad_norm": 1.4811458016129508, + "learning_rate": 1.9757226664439968e-05, + "loss": 0.2196, "step": 136 }, { - "epoch": 0.7445652173913043, - "grad_norm": 0.21704170005212517, - "learning_rate": 1.7693891356907357e-05, - "loss": 0.007, + "epoch": 0.30444444444444446, + "grad_norm": 3.9239640426900904, + "learning_rate": 1.9751942346693368e-05, + "loss": 0.2792, "step": 137 }, { - "epoch": 0.75, - "grad_norm": 0.7213653784073487, - "learning_rate": 1.7656248871909346e-05, - "loss": 0.0137, + "epoch": 0.30666666666666664, + "grad_norm": 2.4647788971137556, + "learning_rate": 1.9746601858048517e-05, + "loss": 0.2546, "step": 138 }, { - "epoch": 0.7554347826086957, - "grad_norm": 0.40110602562720454, - "learning_rate": 1.7618342385058147e-05, - "loss": 0.0099, + "epoch": 0.3088888888888889, + "grad_norm": 1.600372072633903, + "learning_rate": 1.974120522926647e-05, + "loss": 0.2242, "step": 139 }, { - "epoch": 0.7608695652173914, - "grad_norm": 2.026407827233553, - "learning_rate": 1.758017320344068e-05, - "loss": 0.0415, + "epoch": 0.3111111111111111, + "grad_norm": 2.123167528955402, + "learning_rate": 1.973575249143165e-05, + "loss": 0.247, "step": 140 }, { - "epoch": 0.7663043478260869, - "grad_norm": 1.1169723105563958, - "learning_rate": 1.754174264320208e-05, - "loss": 0.0232, + "epoch": 0.31333333333333335, + "grad_norm": 3.0780287722150663, + "learning_rate": 1.9730243675951666e-05, + "loss": 0.325, "step": 141 }, { - "epoch": 0.7717391304347826, - "grad_norm": 0.1746366846193237, - "learning_rate": 1.7503052029500308e-05, - "loss": 0.0052, + "epoch": 0.31555555555555553, + "grad_norm": 2.2232852553984834, + "learning_rate": 1.972467881455713e-05, + "loss": 0.2792, "step": 142 }, { - "epoch": 0.7771739130434783, - "grad_norm": 2.3203125623649874, - "learning_rate": 1.7464102696460447e-05, - "loss": 0.2205, + "epoch": 0.31777777777777777, + "grad_norm": 2.9177551632247303, + "learning_rate": 1.9719057939301477e-05, + "loss": 0.2311, "step": 143 }, { - "epoch": 0.782608695652174, - "grad_norm": 3.9663829407278315, - "learning_rate": 1.7424895987128723e-05, - "loss": 0.223, + "epoch": 0.32, + "grad_norm": 3.265913388830823, + "learning_rate": 1.9713381082560784e-05, + "loss": 0.2266, "step": 144 }, { - "epoch": 0.7880434782608695, - "grad_norm": 2.9570619026185883, - "learning_rate": 1.738543325342617e-05, - "loss": 0.0697, + "epoch": 0.32222222222222224, + "grad_norm": 5.161310957428422, + "learning_rate": 1.970764827703358e-05, + "loss": 0.3806, "step": 145 }, { - "epoch": 0.7934782608695652, - "grad_norm": 0.07057319843123724, - "learning_rate": 1.7345715856102024e-05, - "loss": 0.0031, + "epoch": 0.3244444444444444, + "grad_norm": 3.757035717946109, + "learning_rate": 1.9701859555740647e-05, + "loss": 0.1554, "step": 146 }, { - "epoch": 0.7989130434782609, - "grad_norm": 0.11320521777018241, - "learning_rate": 1.7305745164686816e-05, - "loss": 0.0042, + "epoch": 0.32666666666666666, + "grad_norm": 1.5391689268175186, + "learning_rate": 1.9696014952024854e-05, + "loss": 0.0585, "step": 147 }, { - "epoch": 0.8043478260869565, - "grad_norm": 1.3124572295306176, - "learning_rate": 1.7265522557445115e-05, - "loss": 0.021, + "epoch": 0.3288888888888889, + "grad_norm": 5.430816313430678, + "learning_rate": 1.969011449955094e-05, + "loss": 0.3218, "step": 148 }, { - "epoch": 0.8097826086956522, - "grad_norm": 0.42701665371399616, - "learning_rate": 1.7225049421328024e-05, - "loss": 0.0091, + "epoch": 0.33111111111111113, + "grad_norm": 3.1628265588302606, + "learning_rate": 1.968415823230534e-05, + "loss": 0.255, "step": 149 }, { - "epoch": 0.8152173913043478, - "grad_norm": 0.6276112813031721, - "learning_rate": 1.7184327151925366e-05, - "loss": 0.0094, + "epoch": 0.3333333333333333, + "grad_norm": 3.8090956287928983, + "learning_rate": 1.9678146184595974e-05, + "loss": 0.2778, "step": 150 }, { - "epoch": 0.8206521739130435, - "grad_norm": 1.5664524264393311, - "learning_rate": 1.7143357153417533e-05, - "loss": 0.0256, + "epoch": 0.33555555555555555, + "grad_norm": 2.5609280859784556, + "learning_rate": 1.967207839105206e-05, + "loss": 0.209, "step": 151 }, { - "epoch": 0.8260869565217391, - "grad_norm": 0.41431375770399115, - "learning_rate": 1.710214083852709e-05, - "loss": 0.0117, + "epoch": 0.3377777777777778, + "grad_norm": 0.931291112617408, + "learning_rate": 1.9665954886623906e-05, + "loss": 0.0763, "step": 152 }, { - "epoch": 0.8315217391304348, - "grad_norm": 0.3493269925986, - "learning_rate": 1.7060679628470054e-05, - "loss": 0.0084, + "epoch": 0.34, + "grad_norm": 4.048073193883698, + "learning_rate": 1.9659775706582717e-05, + "loss": 0.2012, "step": 153 }, { - "epoch": 0.8369565217391305, - "grad_norm": 0.3211404898250956, - "learning_rate": 1.7018974952906885e-05, - "loss": 0.0084, + "epoch": 0.3422222222222222, + "grad_norm": 1.3914150063470174, + "learning_rate": 1.9653540886520387e-05, + "loss": 0.1055, "step": 154 }, { - "epoch": 0.842391304347826, - "grad_norm": 0.21231254558257762, - "learning_rate": 1.697702824989319e-05, - "loss": 0.0065, + "epoch": 0.34444444444444444, + "grad_norm": 1.833144677317589, + "learning_rate": 1.9647250462349296e-05, + "loss": 0.0933, "step": 155 }, { - "epoch": 0.8478260869565217, - "grad_norm": 1.457137599474762, - "learning_rate": 1.693484096583014e-05, - "loss": 0.0226, + "epoch": 0.3466666666666667, + "grad_norm": 3.855307015384292, + "learning_rate": 1.96409044703021e-05, + "loss": 0.2396, "step": 156 }, { - "epoch": 0.8532608695652174, - "grad_norm": 0.19497147073015395, - "learning_rate": 1.6892414555414594e-05, - "loss": 0.0048, + "epoch": 0.3488888888888889, + "grad_norm": 1.7306595986738853, + "learning_rate": 1.9634502946931517e-05, + "loss": 0.0949, "step": 157 }, { - "epoch": 0.8586956521739131, - "grad_norm": 1.8062131040571878, - "learning_rate": 1.6849750481588936e-05, - "loss": 0.0277, + "epoch": 0.3511111111111111, + "grad_norm": 3.5614778907775957, + "learning_rate": 1.9628045929110144e-05, + "loss": 0.1186, "step": 158 }, { - "epoch": 0.8641304347826086, - "grad_norm": 1.3188356922598312, - "learning_rate": 1.680685021549063e-05, - "loss": 0.0207, + "epoch": 0.35333333333333333, + "grad_norm": 3.6189482028384954, + "learning_rate": 1.9621533454030204e-05, + "loss": 0.1631, "step": 159 }, { - "epoch": 0.8695652173913043, - "grad_norm": 0.26492812790936593, - "learning_rate": 1.6763715236401493e-05, - "loss": 0.0059, + "epoch": 0.35555555555555557, + "grad_norm": 2.33615157165387, + "learning_rate": 1.9614965559203358e-05, + "loss": 0.1783, "step": 160 }, { - "epoch": 0.875, - "grad_norm": 0.3017199408994534, - "learning_rate": 1.672034703169669e-05, - "loss": 0.0076, + "epoch": 0.35777777777777775, + "grad_norm": 3.9817782071565606, + "learning_rate": 1.9608342282460492e-05, + "loss": 0.2477, "step": 161 }, { - "epoch": 0.8804347826086957, - "grad_norm": 0.1252817764595737, - "learning_rate": 1.667674709679344e-05, - "loss": 0.0041, + "epoch": 0.36, + "grad_norm": 0.914051091453137, + "learning_rate": 1.960166366195148e-05, + "loss": 0.04, "step": 162 }, { - "epoch": 0.8858695652173914, - "grad_norm": 1.1529370223873083, - "learning_rate": 1.663291693509946e-05, - "loss": 0.019, + "epoch": 0.3622222222222222, + "grad_norm": 1.0617340381178342, + "learning_rate": 1.9594929736144978e-05, + "loss": 0.039, "step": 163 }, { - "epoch": 0.8913043478260869, - "grad_norm": 0.12063163996672908, - "learning_rate": 1.658885805796111e-05, - "loss": 0.0031, + "epoch": 0.36444444444444446, + "grad_norm": 1.3764933745099894, + "learning_rate": 1.9588140543828196e-05, + "loss": 0.0569, "step": 164 }, { - "epoch": 0.8967391304347826, - "grad_norm": 0.11125376158368971, - "learning_rate": 1.6544571984611306e-05, - "loss": 0.0034, + "epoch": 0.36666666666666664, + "grad_norm": 1.1611049157281468, + "learning_rate": 1.9581296124106682e-05, + "loss": 0.0585, "step": 165 }, { - "epoch": 0.9021739130434783, - "grad_norm": 0.19945453640512878, - "learning_rate": 1.6500060242117096e-05, - "loss": 0.0051, + "epoch": 0.3688888888888889, + "grad_norm": 0.7892616834108386, + "learning_rate": 1.957439651640409e-05, + "loss": 0.0194, "step": 166 }, { - "epoch": 0.907608695652174, - "grad_norm": 0.07254620014242376, - "learning_rate": 1.6455324365327035e-05, - "loss": 0.0026, + "epoch": 0.3711111111111111, + "grad_norm": 0.966371299849505, + "learning_rate": 1.956744176046196e-05, + "loss": 0.0228, "step": 167 }, { - "epoch": 0.9130434782608695, - "grad_norm": 1.3895686723936829, - "learning_rate": 1.6410365896818253e-05, - "loss": 0.0234, + "epoch": 0.37333333333333335, + "grad_norm": 9.054351198134148, + "learning_rate": 1.9560431896339475e-05, + "loss": 0.2245, "step": 168 }, { - "epoch": 0.9184782608695652, - "grad_norm": 0.7517916115731629, - "learning_rate": 1.636518638684325e-05, - "loss": 0.0057, + "epoch": 0.37555555555555553, + "grad_norm": 1.075040949727813, + "learning_rate": 1.9553366964413244e-05, + "loss": 0.0207, "step": 169 }, { - "epoch": 0.9239130434782609, - "grad_norm": 0.11708397875230993, - "learning_rate": 1.6319787393276463e-05, - "loss": 0.0036, + "epoch": 0.37777777777777777, + "grad_norm": 0.6411240263535807, + "learning_rate": 1.9546247005377065e-05, + "loss": 0.0125, "step": 170 }, { - "epoch": 0.9293478260869565, - "grad_norm": 0.027987175186703777, - "learning_rate": 1.6274170481560527e-05, - "loss": 0.0015, + "epoch": 0.38, + "grad_norm": 0.8065526646361524, + "learning_rate": 1.9539072060241692e-05, + "loss": 0.0143, "step": 171 }, { - "epoch": 0.9347826086956522, - "grad_norm": 0.17986790848065237, - "learning_rate": 1.6228337224652307e-05, - "loss": 0.0059, + "epoch": 0.38222222222222224, + "grad_norm": 4.946465628240289, + "learning_rate": 1.9531842170334595e-05, + "loss": 0.3384, "step": 172 }, { - "epoch": 0.9402173913043478, - "grad_norm": 0.03867873116439446, - "learning_rate": 1.6182289202968663e-05, - "loss": 0.0017, + "epoch": 0.3844444444444444, + "grad_norm": 2.2974080415149496, + "learning_rate": 1.952455737729973e-05, + "loss": 0.0543, "step": 173 }, { - "epoch": 0.9456521739130435, - "grad_norm": 0.057278523890185604, - "learning_rate": 1.613602800433194e-05, - "loss": 0.0024, + "epoch": 0.38666666666666666, + "grad_norm": 0.8035463677156034, + "learning_rate": 1.951721772309728e-05, + "loss": 0.0133, "step": 174 }, { - "epoch": 0.9510869565217391, - "grad_norm": 2.728399164781685, - "learning_rate": 1.6089555223915226e-05, - "loss": 0.1588, + "epoch": 0.3888888888888889, + "grad_norm": 1.1219386061068553, + "learning_rate": 1.950982325000344e-05, + "loss": 0.0233, "step": 175 }, { - "epoch": 0.9565217391304348, - "grad_norm": 0.3768997196852311, - "learning_rate": 1.6042872464187352e-05, - "loss": 0.0054, + "epoch": 0.39111111111111113, + "grad_norm": 0.9064547536472516, + "learning_rate": 1.9502374000610152e-05, + "loss": 0.0147, "step": 176 }, { - "epoch": 0.9619565217391305, - "grad_norm": 4.011589996542784, - "learning_rate": 1.5995981334857625e-05, - "loss": 0.0702, + "epoch": 0.3933333333333333, + "grad_norm": 3.1578993980789813, + "learning_rate": 1.9494870017824877e-05, + "loss": 0.0273, "step": 177 }, { - "epoch": 0.967391304347826, - "grad_norm": 0.49004409324214177, - "learning_rate": 1.5948883452820326e-05, - "loss": 0.01, + "epoch": 0.39555555555555555, + "grad_norm": 1.1601713448937385, + "learning_rate": 1.9487311344870327e-05, + "loss": 0.0187, "step": 178 }, { - "epoch": 0.9728260869565217, - "grad_norm": 0.048813631073329034, - "learning_rate": 1.590158044209897e-05, - "loss": 0.002, + "epoch": 0.3977777777777778, + "grad_norm": 0.24618526143218464, + "learning_rate": 1.947969802528424e-05, + "loss": 0.004, "step": 179 }, { - "epoch": 0.9782608695652174, - "grad_norm": 0.09547901003362863, - "learning_rate": 1.5854073933790277e-05, - "loss": 0.0024, + "epoch": 0.4, + "grad_norm": 6.047913942388394, + "learning_rate": 1.9472030102919102e-05, + "loss": 0.2825, "step": 180 }, { - "epoch": 0.9836956521739131, - "grad_norm": 2.3086350812363565, - "learning_rate": 1.580636556600796e-05, - "loss": 0.0277, + "epoch": 0.4022222222222222, + "grad_norm": 0.304222239178378, + "learning_rate": 1.9464307621941926e-05, + "loss": 0.005, "step": 181 }, { - "epoch": 0.9891304347826086, - "grad_norm": 2.752485470216331, - "learning_rate": 1.575845698382622e-05, - "loss": 0.0671, + "epoch": 0.40444444444444444, + "grad_norm": 0.578698715672425, + "learning_rate": 1.945653062683397e-05, + "loss": 0.0065, "step": 182 }, { - "epoch": 0.9945652173913043, - "grad_norm": 0.08760080184190135, - "learning_rate": 1.5710349839223034e-05, - "loss": 0.0025, + "epoch": 0.4066666666666667, + "grad_norm": 0.33184308431280024, + "learning_rate": 1.9448699162390497e-05, + "loss": 0.0048, "step": 183 }, { - "epoch": 1.0, - "grad_norm": 0.052319179757302624, - "learning_rate": 1.566204579102317e-05, - "loss": 0.0016, + "epoch": 0.4088888888888889, + "grad_norm": 2.933148449120257, + "learning_rate": 1.9440813273720504e-05, + "loss": 0.1576, "step": 184 }, { - "epoch": 1.0054347826086956, - "grad_norm": 0.20188982483949725, - "learning_rate": 1.561354650484102e-05, - "loss": 0.0054, + "epoch": 0.4111111111111111, + "grad_norm": 5.765343866379219, + "learning_rate": 1.9432873006246483e-05, + "loss": 0.2532, "step": 185 }, { - "epoch": 1.0108695652173914, - "grad_norm": 1.214861582615001, - "learning_rate": 1.556485365302313e-05, - "loss": 0.0095, + "epoch": 0.41333333333333333, + "grad_norm": 1.8672382535165626, + "learning_rate": 1.9424878405704134e-05, + "loss": 0.0216, "step": 186 }, { - "epoch": 1.016304347826087, - "grad_norm": 1.1857810014141275, - "learning_rate": 1.5515968914590568e-05, - "loss": 0.0161, + "epoch": 0.41555555555555557, + "grad_norm": 0.38100990213355157, + "learning_rate": 1.941682951814212e-05, + "loss": 0.005, "step": 187 }, { - "epoch": 1.0217391304347827, - "grad_norm": 0.19290187635263223, - "learning_rate": 1.546689397518101e-05, - "loss": 0.004, + "epoch": 0.4177777777777778, + "grad_norm": 0.507528502422951, + "learning_rate": 1.940872638992179e-05, + "loss": 0.0035, "step": 188 }, { - "epoch": 1.0271739130434783, - "grad_norm": 0.22326269659684472, - "learning_rate": 1.5417630526990613e-05, - "loss": 0.0044, + "epoch": 0.42, + "grad_norm": 4.61533410544654, + "learning_rate": 1.9400569067716927e-05, + "loss": 0.0694, "step": 189 }, { - "epoch": 1.0326086956521738, - "grad_norm": 0.0690691126927046, - "learning_rate": 1.5368180268715678e-05, - "loss": 0.0022, + "epoch": 0.4222222222222222, + "grad_norm": 2.196167396480173, + "learning_rate": 1.9392357598513463e-05, + "loss": 0.0125, "step": 190 }, { - "epoch": 1.0380434782608696, - "grad_norm": 0.519784946142706, - "learning_rate": 1.5318544905494063e-05, - "loss": 0.0075, + "epoch": 0.42444444444444446, + "grad_norm": 0.18179059657394495, + "learning_rate": 1.938409202960922e-05, + "loss": 0.0027, "step": 191 }, { - "epoch": 1.0434782608695652, - "grad_norm": 0.1210215491547705, - "learning_rate": 1.52687261488464e-05, - "loss": 0.0032, + "epoch": 0.4266666666666667, + "grad_norm": 0.22195030865286366, + "learning_rate": 1.9375772408613625e-05, + "loss": 0.0041, "step": 192 }, { - "epoch": 1.048913043478261, - "grad_norm": 0.1128182153705411, - "learning_rate": 1.5218725716617062e-05, - "loss": 0.0031, + "epoch": 0.4288888888888889, + "grad_norm": 0.08732569338817697, + "learning_rate": 1.936739878344745e-05, + "loss": 0.0025, "step": 193 }, { - "epoch": 1.0543478260869565, - "grad_norm": 0.0917279431010188, - "learning_rate": 1.5168545332914942e-05, - "loss": 0.0032, + "epoch": 0.4311111111111111, + "grad_norm": 0.09869207326839394, + "learning_rate": 1.9358971202342523e-05, + "loss": 0.0026, "step": 194 }, { - "epoch": 1.059782608695652, - "grad_norm": 0.1599750281188914, - "learning_rate": 1.5118186728054002e-05, - "loss": 0.0034, + "epoch": 0.43333333333333335, + "grad_norm": 1.2970724680749888, + "learning_rate": 1.935048971384147e-05, + "loss": 0.0229, "step": 195 }, { - "epoch": 1.065217391304348, - "grad_norm": 3.0052317701428906, - "learning_rate": 1.50676516384936e-05, - "loss": 0.2052, + "epoch": 0.43555555555555553, + "grad_norm": 0.8044530045668882, + "learning_rate": 1.93419543667974e-05, + "loss": 0.0085, "step": 196 }, { - "epoch": 1.0706521739130435, - "grad_norm": 0.09347487309598097, - "learning_rate": 1.5016941806778622e-05, - "loss": 0.0024, + "epoch": 0.43777777777777777, + "grad_norm": 0.14937039869163152, + "learning_rate": 1.9333365210373668e-05, + "loss": 0.0031, "step": 197 }, { - "epoch": 1.0760869565217392, - "grad_norm": 0.6368154943577347, - "learning_rate": 1.496605898147938e-05, - "loss": 0.0112, + "epoch": 0.44, + "grad_norm": 3.9775219820036893, + "learning_rate": 1.932472229404356e-05, + "loss": 0.0555, "step": 198 }, { - "epoch": 1.0815217391304348, - "grad_norm": 0.08805765943523453, - "learning_rate": 1.4915004917131345e-05, - "loss": 0.0025, + "epoch": 0.44222222222222224, + "grad_norm": 1.3938042508621007, + "learning_rate": 1.931602566759001e-05, + "loss": 0.0129, "step": 199 }, { - "epoch": 1.0869565217391304, - "grad_norm": 0.05469514003374087, - "learning_rate": 1.4863781374174625e-05, - "loss": 0.002, + "epoch": 0.4444444444444444, + "grad_norm": 1.003378533226406, + "learning_rate": 1.930727538110534e-05, + "loss": 0.0069, "step": 200 }, { - "epoch": 1.0923913043478262, - "grad_norm": 0.10652940546536208, - "learning_rate": 1.4812390118893273e-05, - "loss": 0.0032, + "epoch": 0.44666666666666666, + "grad_norm": 1.7318781901964837, + "learning_rate": 1.929847148499093e-05, + "loss": 0.0206, "step": 201 }, { - "epoch": 1.0978260869565217, - "grad_norm": 4.207882558276106, - "learning_rate": 1.4760832923354375e-05, - "loss": 0.0583, + "epoch": 0.4488888888888889, + "grad_norm": 0.06473128468348852, + "learning_rate": 1.928961402995696e-05, + "loss": 0.0014, "step": 202 }, { - "epoch": 1.1032608695652173, - "grad_norm": 0.0699647885839302, - "learning_rate": 1.4709111565346948e-05, - "loss": 0.0026, + "epoch": 0.45111111111111113, + "grad_norm": 0.09469091301728669, + "learning_rate": 1.9280703067022114e-05, + "loss": 0.0017, "step": 203 }, { - "epoch": 1.108695652173913, - "grad_norm": 0.30166623168218903, - "learning_rate": 1.4657227828320637e-05, - "loss": 0.006, + "epoch": 0.4533333333333333, + "grad_norm": 1.1494476789266588, + "learning_rate": 1.927173864751327e-05, + "loss": 0.0184, "step": 204 }, { - "epoch": 1.1141304347826086, - "grad_norm": 4.199370993333585, - "learning_rate": 1.4605183501324231e-05, - "loss": 0.0775, + "epoch": 0.45555555555555555, + "grad_norm": 0.11935205837742928, + "learning_rate": 1.9262720823065217e-05, + "loss": 0.0023, "step": 205 }, { - "epoch": 1.1195652173913044, - "grad_norm": 0.32565218496952747, - "learning_rate": 1.4552980378943953e-05, + "epoch": 0.4577777777777778, + "grad_norm": 0.1427849942941257, + "learning_rate": 1.9253649645620363e-05, "loss": 0.0033, "step": 206 }, { - "epoch": 1.125, - "grad_norm": 0.0809703234967001, - "learning_rate": 1.4500620261241598e-05, - "loss": 0.0026, + "epoch": 0.46, + "grad_norm": 0.37278574542805715, + "learning_rate": 1.9244525167428412e-05, + "loss": 0.0037, "step": 207 }, { - "epoch": 1.1304347826086956, - "grad_norm": 0.06883017026031267, - "learning_rate": 1.4448104953692443e-05, - "loss": 0.0019, + "epoch": 0.4622222222222222, + "grad_norm": 1.7974748595874739, + "learning_rate": 1.923534744104609e-05, + "loss": 0.0646, "step": 208 }, { - "epoch": 1.1358695652173914, - "grad_norm": 0.08112137716749798, - "learning_rate": 1.4395436267123017e-05, - "loss": 0.0025, + "epoch": 0.46444444444444444, + "grad_norm": 0.2490133537317968, + "learning_rate": 1.922611651933683e-05, + "loss": 0.003, "step": 209 }, { - "epoch": 1.141304347826087, - "grad_norm": 0.0472362130550949, - "learning_rate": 1.4342616017648632e-05, - "loss": 0.0018, + "epoch": 0.4666666666666667, + "grad_norm": 0.693554821937507, + "learning_rate": 1.9216832455470466e-05, + "loss": 0.0059, "step": 210 }, { - "epoch": 1.1467391304347827, - "grad_norm": 0.0884620238410297, - "learning_rate": 1.4289646026610789e-05, - "loss": 0.0021, + "epoch": 0.4688888888888889, + "grad_norm": 2.08851937427616, + "learning_rate": 1.920749530292293e-05, + "loss": 0.0606, "step": 211 }, { - "epoch": 1.1521739130434783, - "grad_norm": 0.04795365977948435, - "learning_rate": 1.423652812051434e-05, - "loss": 0.0017, + "epoch": 0.4711111111111111, + "grad_norm": 1.725048518105214, + "learning_rate": 1.9198105115475946e-05, + "loss": 0.0162, "step": 212 }, { - "epoch": 1.1576086956521738, - "grad_norm": 0.02935797027689571, - "learning_rate": 1.4183264130964545e-05, - "loss": 0.0015, + "epoch": 0.47333333333333333, + "grad_norm": 0.07031384275169696, + "learning_rate": 1.9188661947216712e-05, + "loss": 0.0013, "step": 213 }, { - "epoch": 1.1630434782608696, - "grad_norm": 0.0668820523334726, - "learning_rate": 1.4129855894603885e-05, - "loss": 0.0027, + "epoch": 0.47555555555555556, + "grad_norm": 0.02174852013709219, + "learning_rate": 1.9179165852537596e-05, + "loss": 0.0006, "step": 214 }, { - "epoch": 1.1684782608695652, - "grad_norm": 0.7758685627388171, - "learning_rate": 1.4076305253048748e-05, - "loss": 0.0105, + "epoch": 0.4777777777777778, + "grad_norm": 0.20785229830131394, + "learning_rate": 1.916961688613582e-05, + "loss": 0.0025, "step": 215 }, { - "epoch": 1.1739130434782608, - "grad_norm": 0.7009141120346845, - "learning_rate": 1.4022614052825918e-05, - "loss": 0.01, + "epoch": 0.48, + "grad_norm": 0.5405886033855503, + "learning_rate": 1.9160015103013153e-05, + "loss": 0.0034, "step": 216 }, { - "epoch": 1.1793478260869565, - "grad_norm": 0.058294067779879076, - "learning_rate": 1.3968784145308907e-05, - "loss": 0.002, + "epoch": 0.4822222222222222, + "grad_norm": 0.10364950393993827, + "learning_rate": 1.9150360558475574e-05, + "loss": 0.0021, "step": 217 }, { - "epoch": 1.184782608695652, - "grad_norm": 0.09580668260043325, - "learning_rate": 1.3914817386654112e-05, - "loss": 0.0028, + "epoch": 0.48444444444444446, + "grad_norm": 0.11811401385618839, + "learning_rate": 1.9140653308132977e-05, + "loss": 0.0017, "step": 218 }, { - "epoch": 1.190217391304348, - "grad_norm": 4.864872194559485, - "learning_rate": 1.3860715637736817e-05, - "loss": 0.1252, + "epoch": 0.4866666666666667, + "grad_norm": 0.22653479672501642, + "learning_rate": 1.9130893407898834e-05, + "loss": 0.0032, "step": 219 }, { - "epoch": 1.1956521739130435, - "grad_norm": 0.15310828918627564, - "learning_rate": 1.3806480764087027e-05, - "loss": 0.003, + "epoch": 0.4888888888888889, + "grad_norm": 0.08690287566824914, + "learning_rate": 1.912108091398988e-05, + "loss": 0.0019, "step": 220 }, { - "epoch": 1.2010869565217392, - "grad_norm": 0.3265801320494785, - "learning_rate": 1.3752114635825138e-05, - "loss": 0.005, + "epoch": 0.4911111111111111, + "grad_norm": 0.02892294756824045, + "learning_rate": 1.9111215882925787e-05, + "loss": 0.0007, "step": 221 }, { - "epoch": 1.2065217391304348, - "grad_norm": 4.409339908706341, - "learning_rate": 1.369761912759744e-05, - "loss": 0.1368, + "epoch": 0.49333333333333335, + "grad_norm": 0.6257709576293506, + "learning_rate": 1.9101298371528845e-05, + "loss": 0.0047, "step": 222 }, { - "epoch": 1.2119565217391304, - "grad_norm": 0.09658224964632216, - "learning_rate": 1.3642996118511504e-05, - "loss": 0.0027, + "epoch": 0.4955555555555556, + "grad_norm": 0.11497102293238928, + "learning_rate": 1.9091328436923624e-05, + "loss": 0.0017, "step": 223 }, { - "epoch": 1.2173913043478262, - "grad_norm": 0.13386998342251066, - "learning_rate": 1.358824749207136e-05, - "loss": 0.0029, + "epoch": 0.49777777777777776, + "grad_norm": 0.05135797027942428, + "learning_rate": 1.908130613653665e-05, + "loss": 0.0013, "step": 224 }, { - "epoch": 1.2228260869565217, - "grad_norm": 0.058694075695156535, - "learning_rate": 1.3533375136112563e-05, - "loss": 0.0019, + "epoch": 0.5, + "grad_norm": 0.4090647810832787, + "learning_rate": 1.9071231528096074e-05, + "loss": 0.0055, "step": 225 }, { - "epoch": 1.2282608695652173, - "grad_norm": 0.1675736492580823, - "learning_rate": 1.3478380942737097e-05, - "loss": 0.0041, + "epoch": 0.5022222222222222, + "grad_norm": 0.009920172092546144, + "learning_rate": 1.9061104669631343e-05, + "loss": 0.0003, "step": 226 }, { - "epoch": 1.233695652173913, - "grad_norm": 0.6605378406587118, - "learning_rate": 1.3423266808248123e-05, - "loss": 0.0064, + "epoch": 0.5044444444444445, + "grad_norm": 0.019926724964198943, + "learning_rate": 1.9050925619472863e-05, + "loss": 0.0005, "step": 227 }, { - "epoch": 1.2391304347826086, - "grad_norm": 0.07582983219640445, - "learning_rate": 1.3368034633084603e-05, - "loss": 0.0021, + "epoch": 0.5066666666666667, + "grad_norm": 0.020201347419880836, + "learning_rate": 1.9040694436251657e-05, + "loss": 0.0006, "step": 228 }, { - "epoch": 1.2445652173913044, - "grad_norm": 0.11839256459523798, - "learning_rate": 1.331268632175576e-05, - "loss": 0.0033, + "epoch": 0.5088888888888888, + "grad_norm": 0.03657450250424581, + "learning_rate": 1.9030411178899037e-05, + "loss": 0.001, "step": 229 }, { - "epoch": 1.25, - "grad_norm": 0.498989993420891, - "learning_rate": 1.3257223782775412e-05, - "loss": 0.0058, + "epoch": 0.5111111111111111, + "grad_norm": 0.04735269179243291, + "learning_rate": 1.902007590664626e-05, + "loss": 0.0009, "step": 230 }, { - "epoch": 1.2554347826086958, - "grad_norm": 0.0627689672183379, - "learning_rate": 1.3201648928596164e-05, - "loss": 0.0028, + "epoch": 0.5133333333333333, + "grad_norm": 0.29877003109646194, + "learning_rate": 1.900968867902419e-05, + "loss": 0.0037, "step": 231 }, { - "epoch": 1.2608695652173914, - "grad_norm": 0.44003082591712833, - "learning_rate": 1.3145963675543451e-05, - "loss": 0.0056, + "epoch": 0.5155555555555555, + "grad_norm": 0.7476010813417763, + "learning_rate": 1.8999249555862953e-05, + "loss": 0.0047, "step": 232 }, { - "epoch": 1.266304347826087, - "grad_norm": 3.9655617256713556, - "learning_rate": 1.3090169943749475e-05, - "loss": 0.0738, + "epoch": 0.5177777777777778, + "grad_norm": 0.08395193506568059, + "learning_rate": 1.8988758597291577e-05, + "loss": 0.0014, "step": 233 }, { - "epoch": 1.2717391304347827, - "grad_norm": 0.1490491896911272, - "learning_rate": 1.3034269657086993e-05, - "loss": 0.003, + "epoch": 0.52, + "grad_norm": 3.498570528457473, + "learning_rate": 1.8978215863737675e-05, + "loss": 0.1936, "step": 234 }, { - "epoch": 1.2771739130434783, - "grad_norm": 0.255678738387853, - "learning_rate": 1.2978264743102964e-05, - "loss": 0.0036, + "epoch": 0.5222222222222223, + "grad_norm": 2.8043195930483718, + "learning_rate": 1.8967621415927087e-05, + "loss": 0.0709, "step": 235 }, { - "epoch": 1.2826086956521738, - "grad_norm": 0.08658556472142168, - "learning_rate": 1.2922157132952106e-05, - "loss": 0.003, + "epoch": 0.5244444444444445, + "grad_norm": 2.066817073021631, + "learning_rate": 1.8956975314883512e-05, + "loss": 0.0404, "step": 236 }, { - "epoch": 1.2880434782608696, - "grad_norm": 0.056388528409829865, - "learning_rate": 1.286594876133028e-05, - "loss": 0.0016, + "epoch": 0.5266666666666666, + "grad_norm": 0.04165662882960646, + "learning_rate": 1.8946277621928174e-05, + "loss": 0.001, "step": 237 }, { - "epoch": 1.2934782608695652, - "grad_norm": 1.5398049755885386, - "learning_rate": 1.2809641566407802e-05, - "loss": 0.0378, + "epoch": 0.5288888888888889, + "grad_norm": 0.06189450698566103, + "learning_rate": 1.893552839867947e-05, + "loss": 0.0009, "step": 238 }, { - "epoch": 1.2989130434782608, - "grad_norm": 0.036566689298081184, - "learning_rate": 1.27532374897626e-05, - "loss": 0.0012, + "epoch": 0.5311111111111111, + "grad_norm": 0.5060993991960864, + "learning_rate": 1.8924727707052607e-05, + "loss": 0.0063, "step": 239 }, { - "epoch": 1.3043478260869565, - "grad_norm": 0.04920293791313143, - "learning_rate": 1.2696738476313261e-05, - "loss": 0.0017, + "epoch": 0.5333333333333333, + "grad_norm": 0.09534205897106707, + "learning_rate": 1.8913875609259246e-05, + "loss": 0.0016, "step": 240 }, { - "epoch": 1.309782608695652, - "grad_norm": 0.1402817359911882, - "learning_rate": 1.2640146474251979e-05, - "loss": 0.0036, + "epoch": 0.5355555555555556, + "grad_norm": 0.11551302393958494, + "learning_rate": 1.890297216780715e-05, + "loss": 0.0019, "step": 241 }, { - "epoch": 1.315217391304348, - "grad_norm": 0.06831135225959813, - "learning_rate": 1.258346343497736e-05, - "loss": 0.0025, + "epoch": 0.5377777777777778, + "grad_norm": 0.021537899083499033, + "learning_rate": 1.8892017445499812e-05, + "loss": 0.0006, "step": 242 }, { - "epoch": 1.3206521739130435, - "grad_norm": 0.028285907167631727, - "learning_rate": 1.2526691313027153e-05, - "loss": 0.001, + "epoch": 0.54, + "grad_norm": 4.109682603988748, + "learning_rate": 1.8881011505436114e-05, + "loss": 0.116, "step": 243 }, { - "epoch": 1.3260869565217392, - "grad_norm": 0.33707980146121225, - "learning_rate": 1.2469832066010843e-05, - "loss": 0.0074, + "epoch": 0.5422222222222223, + "grad_norm": 2.1315574975345046, + "learning_rate": 1.8869954411009942e-05, + "loss": 0.0148, "step": 244 }, { - "epoch": 1.3315217391304348, - "grad_norm": 0.02312342530538864, - "learning_rate": 1.2412887654542147e-05, - "loss": 0.001, + "epoch": 0.5444444444444444, + "grad_norm": 0.0688679369542214, + "learning_rate": 1.8858846225909832e-05, + "loss": 0.0015, "step": 245 }, { - "epoch": 1.3369565217391304, - "grad_norm": 0.026427047059385186, - "learning_rate": 1.2355860042171421e-05, - "loss": 0.0011, + "epoch": 0.5466666666666666, + "grad_norm": 0.06778584694493012, + "learning_rate": 1.8847687014118596e-05, + "loss": 0.0012, "step": 246 }, { - "epoch": 1.3423913043478262, - "grad_norm": 2.9263468296261164, - "learning_rate": 1.2298751195317935e-05, - "loss": 0.1557, + "epoch": 0.5488888888888889, + "grad_norm": 0.6806254068060433, + "learning_rate": 1.8836476839912967e-05, + "loss": 0.0091, "step": 247 }, { - "epoch": 1.3478260869565217, - "grad_norm": 0.020548021429656328, - "learning_rate": 1.224156308320208e-05, - "loss": 0.0009, + "epoch": 0.5511111111111111, + "grad_norm": 0.31908447257969436, + "learning_rate": 1.8825215767863215e-05, + "loss": 0.0033, "step": 248 }, { - "epoch": 1.3532608695652173, - "grad_norm": 0.025684644607937637, - "learning_rate": 1.2184297677777463e-05, - "loss": 0.0011, + "epoch": 0.5533333333333333, + "grad_norm": 0.4751718889273426, + "learning_rate": 1.8813903862832776e-05, + "loss": 0.0052, "step": 249 }, { - "epoch": 1.358695652173913, - "grad_norm": 0.4277199026740869, - "learning_rate": 1.2126956953662914e-05, - "loss": 0.0074, + "epoch": 0.5555555555555556, + "grad_norm": 0.20947794839156766, + "learning_rate": 1.8802541189977893e-05, + "loss": 0.0028, "step": 250 }, { - "epoch": 1.3641304347826086, - "grad_norm": 0.722362923284817, - "learning_rate": 1.2069542888074386e-05, - "loss": 0.0094, + "epoch": 0.5577777777777778, + "grad_norm": 2.9506235366062827, + "learning_rate": 1.879112781474722e-05, + "loss": 0.0171, "step": 251 }, { - "epoch": 1.3695652173913042, - "grad_norm": 0.05042192018129352, - "learning_rate": 1.2012057460756786e-05, - "loss": 0.0016, + "epoch": 0.56, + "grad_norm": 0.08684695871277402, + "learning_rate": 1.8779663802881465e-05, + "loss": 0.0014, "step": 252 }, { - "epoch": 1.375, - "grad_norm": 0.04160962471056512, - "learning_rate": 1.1954502653915704e-05, - "loss": 0.0014, + "epoch": 0.5622222222222222, + "grad_norm": 0.9141387836477947, + "learning_rate": 1.876814922041299e-05, + "loss": 0.007, "step": 253 }, { - "epoch": 1.3804347826086958, - "grad_norm": 0.04523201782339563, - "learning_rate": 1.1896880452149077e-05, - "loss": 0.0016, + "epoch": 0.5644444444444444, + "grad_norm": 0.03278073214413208, + "learning_rate": 1.8756584133665447e-05, + "loss": 0.0008, "step": 254 }, { - "epoch": 1.3858695652173914, - "grad_norm": 0.023639170674016628, - "learning_rate": 1.1839192842378737e-05, - "loss": 0.0009, + "epoch": 0.5666666666666667, + "grad_norm": 0.4975766803037279, + "learning_rate": 1.8744968609253398e-05, + "loss": 0.0066, "step": 255 }, { - "epoch": 1.391304347826087, - "grad_norm": 0.04866250108659108, - "learning_rate": 1.1781441813781911e-05, - "loss": 0.0014, + "epoch": 0.5688888888888889, + "grad_norm": 3.3665570433316074, + "learning_rate": 1.8733302714081915e-05, + "loss": 0.1015, "step": 256 }, { - "epoch": 1.3967391304347827, - "grad_norm": 0.027392748713626538, - "learning_rate": 1.1723629357722622e-05, - "loss": 0.001, + "epoch": 0.5711111111111111, + "grad_norm": 1.6262601683804174, + "learning_rate": 1.8721586515346204e-05, + "loss": 0.0097, "step": 257 }, { - "epoch": 1.4021739130434783, - "grad_norm": 0.04956045333392312, - "learning_rate": 1.1665757467683025e-05, - "loss": 0.0013, + "epoch": 0.5733333333333334, + "grad_norm": 0.05832173386372106, + "learning_rate": 1.870982008053123e-05, + "loss": 0.0014, "step": 258 }, { - "epoch": 1.4076086956521738, - "grad_norm": 0.287445593085176, - "learning_rate": 1.1607828139194683e-05, - "loss": 0.0051, + "epoch": 0.5755555555555556, + "grad_norm": 0.05065640448521942, + "learning_rate": 1.86980034774113e-05, + "loss": 0.0012, "step": 259 }, { - "epoch": 1.4130434782608696, - "grad_norm": 0.13531127988753577, - "learning_rate": 1.1549843369769733e-05, - "loss": 0.0023, + "epoch": 0.5777777777777777, + "grad_norm": 0.09194259616327709, + "learning_rate": 1.8686136774049704e-05, + "loss": 0.0018, "step": 260 }, { - "epoch": 1.4184782608695652, - "grad_norm": 0.16453092649100554, - "learning_rate": 1.1491805158832028e-05, - "loss": 0.0031, + "epoch": 0.58, + "grad_norm": 0.04062493498888717, + "learning_rate": 1.86742200387983e-05, + "loss": 0.0009, "step": 261 }, { - "epoch": 1.4239130434782608, - "grad_norm": 1.4301870845043336, - "learning_rate": 1.1433715507648173e-05, - "loss": 0.0166, + "epoch": 0.5822222222222222, + "grad_norm": 0.10457791506019953, + "learning_rate": 1.866225334029712e-05, + "loss": 0.0024, "step": 262 }, { - "epoch": 1.4293478260869565, - "grad_norm": 0.06079450292325032, - "learning_rate": 1.1375576419258543e-05, - "loss": 0.0016, + "epoch": 0.5844444444444444, + "grad_norm": 0.036927768185189454, + "learning_rate": 1.8650236747474007e-05, + "loss": 0.0007, "step": 263 }, { - "epoch": 1.434782608695652, - "grad_norm": 0.12935761070271598, - "learning_rate": 1.1317389898408188e-05, - "loss": 0.0022, + "epoch": 0.5866666666666667, + "grad_norm": 7.786967052939128, + "learning_rate": 1.8638170329544164e-05, + "loss": 0.472, "step": 264 }, { - "epoch": 1.440217391304348, - "grad_norm": 0.06441466437879496, - "learning_rate": 1.125915795147773e-05, - "loss": 0.0017, + "epoch": 0.5888888888888889, + "grad_norm": 0.06831447838510404, + "learning_rate": 1.8626054156009807e-05, + "loss": 0.0015, "step": 265 }, { - "epoch": 1.4456521739130435, - "grad_norm": 0.11938010559111087, - "learning_rate": 1.1200882586414168e-05, - "loss": 0.0021, + "epoch": 0.5911111111111111, + "grad_norm": 0.11021401160932005, + "learning_rate": 1.8613888296659736e-05, + "loss": 0.0027, "step": 266 }, { - "epoch": 1.4510869565217392, - "grad_norm": 0.14576252527987352, - "learning_rate": 1.114256581266162e-05, - "loss": 0.0032, + "epoch": 0.5933333333333334, + "grad_norm": 0.07418537215195235, + "learning_rate": 1.860167282156894e-05, + "loss": 0.0016, "step": 267 }, { - "epoch": 1.4565217391304348, - "grad_norm": 0.8091624068148694, - "learning_rate": 1.1084209641092083e-05, - "loss": 0.0098, + "epoch": 0.5955555555555555, + "grad_norm": 0.16482759421027338, + "learning_rate": 1.8589407801098192e-05, + "loss": 0.0028, "step": 268 }, { - "epoch": 1.4619565217391304, - "grad_norm": 0.07301592812987565, - "learning_rate": 1.1025816083936036e-05, - "loss": 0.0021, + "epoch": 0.5977777777777777, + "grad_norm": 0.6066618288753074, + "learning_rate": 1.857709330589364e-05, + "loss": 0.0145, "step": 269 }, { - "epoch": 1.4673913043478262, - "grad_norm": 0.019465384139083376, - "learning_rate": 1.0967387154713104e-05, - "loss": 0.0008, + "epoch": 0.6, + "grad_norm": 0.07823660968025212, + "learning_rate": 1.856472940688642e-05, + "loss": 0.0025, "step": 270 }, { - "epoch": 1.4728260869565217, - "grad_norm": 0.02684807806576838, - "learning_rate": 1.0908924868162605e-05, - "loss": 0.0009, + "epoch": 0.6022222222222222, + "grad_norm": 5.652540597325386, + "learning_rate": 1.8552316175292214e-05, + "loss": 0.1483, "step": 271 }, { - "epoch": 1.4782608695652173, - "grad_norm": 2.0536809709774086, - "learning_rate": 1.0850431240174066e-05, - "loss": 0.2241, + "epoch": 0.6044444444444445, + "grad_norm": 0.10820834145938359, + "learning_rate": 1.8539853682610876e-05, + "loss": 0.0021, "step": 272 }, { - "epoch": 1.483695652173913, - "grad_norm": 0.5395466577497267, - "learning_rate": 1.0791908287717744e-05, - "loss": 0.0097, + "epoch": 0.6066666666666667, + "grad_norm": 0.7687408534142888, + "learning_rate": 1.8527342000625984e-05, + "loss": 0.0146, "step": 273 }, { - "epoch": 1.4891304347826086, - "grad_norm": 3.6218348045652107, - "learning_rate": 1.073335802877504e-05, - "loss": 0.0488, + "epoch": 0.6088888888888889, + "grad_norm": 0.2737599439366285, + "learning_rate": 1.8514781201404464e-05, + "loss": 0.0051, "step": 274 }, { - "epoch": 1.4945652173913042, - "grad_norm": 0.0346000232826567, - "learning_rate": 1.0674782482268953e-05, - "loss": 0.0013, + "epoch": 0.6111111111111112, + "grad_norm": 0.14603693140586155, + "learning_rate": 1.8502171357296144e-05, + "loss": 0.0045, "step": 275 }, { - "epoch": 1.5, - "grad_norm": 0.031039844572176237, - "learning_rate": 1.0616183667994435e-05, - "loss": 0.0011, + "epoch": 0.6133333333333333, + "grad_norm": 0.20943307181041065, + "learning_rate": 1.8489512540933346e-05, + "loss": 0.0046, "step": 276 }, { - "epoch": 1.5054347826086958, - "grad_norm": 1.3869410436009917, - "learning_rate": 1.0557563606548751e-05, - "loss": 0.02, + "epoch": 0.6155555555555555, + "grad_norm": 1.615648876766883, + "learning_rate": 1.8476804825230482e-05, + "loss": 0.0246, "step": 277 }, { - "epoch": 1.5108695652173914, - "grad_norm": 0.31857812561228843, - "learning_rate": 1.0498924319261816e-05, - "loss": 0.0046, + "epoch": 0.6177777777777778, + "grad_norm": 0.30319699310782766, + "learning_rate": 1.8464048283383613e-05, + "loss": 0.008, "step": 278 }, { - "epoch": 1.516304347826087, - "grad_norm": 0.018901071551922013, - "learning_rate": 1.0440267828126478e-05, - "loss": 0.0007, + "epoch": 0.62, + "grad_norm": 0.4356219324001113, + "learning_rate": 1.8451242988870043e-05, + "loss": 0.0077, "step": 279 }, { - "epoch": 1.5217391304347827, - "grad_norm": 0.35747451319055523, - "learning_rate": 1.0381596155728823e-05, - "loss": 0.0077, + "epoch": 0.6222222222222222, + "grad_norm": 2.143540837646445, + "learning_rate": 1.843838901544789e-05, + "loss": 0.0368, "step": 280 }, { - "epoch": 1.5271739130434783, - "grad_norm": 0.038504499041816166, - "learning_rate": 1.0322911325178402e-05, - "loss": 0.0012, + "epoch": 0.6244444444444445, + "grad_norm": 0.1955134676174524, + "learning_rate": 1.842548643715566e-05, + "loss": 0.0047, "step": 281 }, { - "epoch": 1.5326086956521738, - "grad_norm": 0.061533456725221265, - "learning_rate": 1.0264215360038483e-05, - "loss": 0.0018, + "epoch": 0.6266666666666667, + "grad_norm": 0.8392449567222335, + "learning_rate": 1.8412535328311813e-05, + "loss": 0.017, "step": 282 }, { - "epoch": 1.5380434782608696, - "grad_norm": 0.053405604412389306, - "learning_rate": 1.0205510284256286e-05, - "loss": 0.0014, + "epoch": 0.6288888888888889, + "grad_norm": 0.9320595731120124, + "learning_rate": 1.839953576351436e-05, + "loss": 0.0172, "step": 283 }, { - "epoch": 1.5434782608695652, - "grad_norm": 0.1699993644991474, - "learning_rate": 1.0146798122093167e-05, - "loss": 0.0029, + "epoch": 0.6311111111111111, + "grad_norm": 2.2354653928528707, + "learning_rate": 1.8386487817640398e-05, + "loss": 0.1762, "step": 284 }, { - "epoch": 1.5489130434782608, - "grad_norm": 0.07043260478387495, - "learning_rate": 1.0088080898054852e-05, - "loss": 0.0013, + "epoch": 0.6333333333333333, + "grad_norm": 0.14685979372957694, + "learning_rate": 1.837339156584572e-05, + "loss": 0.0029, "step": 285 }, { - "epoch": 1.5543478260869565, - "grad_norm": 0.050883436804006456, - "learning_rate": 1.00293606368216e-05, - "loss": 0.0018, + "epoch": 0.6355555555555555, + "grad_norm": 0.07009896969692707, + "learning_rate": 1.8360247083564343e-05, + "loss": 0.0019, "step": 286 }, { - "epoch": 1.5597826086956523, - "grad_norm": 0.2015858838482068, - "learning_rate": 9.970639363178401e-06, - "loss": 0.0034, + "epoch": 0.6377777777777778, + "grad_norm": 0.16311479362074124, + "learning_rate": 1.834705444650809e-05, + "loss": 0.0046, "step": 287 }, { - "epoch": 1.5652173913043477, - "grad_norm": 0.15696624949542315, - "learning_rate": 9.91191910194515e-06, + "epoch": 0.64, + "grad_norm": 0.1061231367334972, + "learning_rate": 1.8333813730666158e-05, "loss": 0.0024, "step": 288 }, { - "epoch": 1.5706521739130435, - "grad_norm": 0.016094697472839387, - "learning_rate": 9.853201877906836e-06, - "loss": 0.0007, + "epoch": 0.6422222222222222, + "grad_norm": 0.17842166938797405, + "learning_rate": 1.8320525012304685e-05, + "loss": 0.0049, "step": 289 }, { - "epoch": 1.5760869565217392, - "grad_norm": 2.6447259699825225, - "learning_rate": 9.79448971574372e-06, - "loss": 0.0868, + "epoch": 0.6444444444444445, + "grad_norm": 0.561962789703285, + "learning_rate": 1.8307188367966288e-05, + "loss": 0.0101, "step": 290 }, { - "epoch": 1.5815217391304348, - "grad_norm": 0.034146999181789345, - "learning_rate": 9.73578463996152e-06, - "loss": 0.001, + "epoch": 0.6466666666666666, + "grad_norm": 0.06969868784740545, + "learning_rate": 1.8293803874469645e-05, + "loss": 0.0016, "step": 291 }, { - "epoch": 1.5869565217391304, - "grad_norm": 2.3913058327100507, - "learning_rate": 9.677088674821601e-06, - "loss": 0.0933, + "epoch": 0.6488888888888888, + "grad_norm": 0.06580685526433887, + "learning_rate": 1.8280371608909034e-05, + "loss": 0.0018, "step": 292 }, { - "epoch": 1.5923913043478262, - "grad_norm": 2.7206555164113113, - "learning_rate": 9.618403844271179e-06, - "loss": 0.0834, + "epoch": 0.6511111111111111, + "grad_norm": 1.848153754138197, + "learning_rate": 1.8266891648653916e-05, + "loss": 0.0364, "step": 293 }, { - "epoch": 1.5978260869565217, - "grad_norm": 2.04432325341852, - "learning_rate": 9.559732171873524e-06, - "loss": 0.0509, + "epoch": 0.6533333333333333, + "grad_norm": 0.078407073375797, + "learning_rate": 1.8253364071348457e-05, + "loss": 0.0022, "step": 294 }, { - "epoch": 1.6032608695652173, - "grad_norm": 3.408481044696874, - "learning_rate": 9.50107568073819e-06, - "loss": 0.1523, + "epoch": 0.6555555555555556, + "grad_norm": 2.384346201572708, + "learning_rate": 1.8239788954911102e-05, + "loss": 0.0302, "step": 295 }, { - "epoch": 1.608695652173913, - "grad_norm": 0.15857623535162915, - "learning_rate": 9.442436393451252e-06, - "loss": 0.0037, + "epoch": 0.6577777777777778, + "grad_norm": 2.632928591421819, + "learning_rate": 1.8226166377534113e-05, + "loss": 0.2059, "step": 296 }, { - "epoch": 1.6141304347826086, - "grad_norm": 0.48149742863897177, - "learning_rate": 9.383816332005569e-06, - "loss": 0.0066, + "epoch": 0.66, + "grad_norm": 0.06278279644913055, + "learning_rate": 1.8212496417683135e-05, + "loss": 0.0018, "step": 297 }, { - "epoch": 1.6195652173913042, - "grad_norm": 0.43146507945514945, - "learning_rate": 9.325217517731047e-06, - "loss": 0.0063, + "epoch": 0.6622222222222223, + "grad_norm": 0.06966629563059502, + "learning_rate": 1.8198779154096735e-05, + "loss": 0.0018, "step": 298 }, { - "epoch": 1.625, - "grad_norm": 3.7183270965419526, - "learning_rate": 9.266641971224963e-06, - "loss": 0.0717, + "epoch": 0.6644444444444444, + "grad_norm": 0.11766331582693176, + "learning_rate": 1.8185014665785936e-05, + "loss": 0.0028, "step": 299 }, { - "epoch": 1.6304347826086958, - "grad_norm": 0.6284145395909966, - "learning_rate": 9.208091712282261e-06, - "loss": 0.0113, + "epoch": 0.6666666666666666, + "grad_norm": 0.555877740180206, + "learning_rate": 1.8171203032033788e-05, + "loss": 0.0091, "step": 300 }, { - "epoch": 1.6358695652173914, - "grad_norm": 0.12204274733613643, - "learning_rate": 9.149568759825937e-06, - "loss": 0.003, + "epoch": 0.6688888888888889, + "grad_norm": 0.09334898428512266, + "learning_rate": 1.8157344332394885e-05, + "loss": 0.0023, "step": 301 }, { - "epoch": 1.641304347826087, - "grad_norm": 1.1716856729713159, - "learning_rate": 9.091075131837399e-06, - "loss": 0.016, + "epoch": 0.6711111111111111, + "grad_norm": 1.1498766072687665, + "learning_rate": 1.814343864669493e-05, + "loss": 0.0296, "step": 302 }, { - "epoch": 1.6467391304347827, - "grad_norm": 2.3073801254975743, - "learning_rate": 9.032612845286896e-06, - "loss": 0.0625, + "epoch": 0.6733333333333333, + "grad_norm": 0.5513124736006071, + "learning_rate": 1.8129486055030255e-05, + "loss": 0.0143, "step": 303 }, { - "epoch": 1.6521739130434783, - "grad_norm": 0.24584369141616186, - "learning_rate": 8.974183916063967e-06, - "loss": 0.0038, + "epoch": 0.6755555555555556, + "grad_norm": 0.18761100752778656, + "learning_rate": 1.8115486637767384e-05, + "loss": 0.0045, "step": 304 }, { - "epoch": 1.6576086956521738, - "grad_norm": 0.896272637025756, - "learning_rate": 8.915790358907924e-06, - "loss": 0.0124, + "epoch": 0.6777777777777778, + "grad_norm": 0.141402249888242, + "learning_rate": 1.8101440475542533e-05, + "loss": 0.0032, "step": 305 }, { - "epoch": 1.6630434782608696, - "grad_norm": 3.8696382415332957, - "learning_rate": 8.857434187338381e-06, - "loss": 0.0462, + "epoch": 0.68, + "grad_norm": 0.11094764550958562, + "learning_rate": 1.8087347649261183e-05, + "loss": 0.0026, "step": 306 }, { - "epoch": 1.6684782608695652, - "grad_norm": 0.12503032249914797, - "learning_rate": 8.799117413585836e-06, - "loss": 0.0025, + "epoch": 0.6822222222222222, + "grad_norm": 0.15459930782503847, + "learning_rate": 1.8073208240097598e-05, + "loss": 0.0029, "step": 307 }, { - "epoch": 1.6739130434782608, - "grad_norm": 0.45154839467695335, - "learning_rate": 8.740842048522268e-06, - "loss": 0.0061, + "epoch": 0.6844444444444444, + "grad_norm": 0.1994271341598364, + "learning_rate": 1.805902232949435e-05, + "loss": 0.0031, "step": 308 }, { - "epoch": 1.6793478260869565, - "grad_norm": 0.09419278918622512, - "learning_rate": 8.682610101591813e-06, - "loss": 0.002, + "epoch": 0.6866666666666666, + "grad_norm": 0.0813377377122633, + "learning_rate": 1.8044789999161864e-05, + "loss": 0.0017, "step": 309 }, { - "epoch": 1.6847826086956523, - "grad_norm": 0.4958479599321362, - "learning_rate": 8.624423580741462e-06, - "loss": 0.0086, + "epoch": 0.6888888888888889, + "grad_norm": 0.5891758843914501, + "learning_rate": 1.8030511331077945e-05, + "loss": 0.0096, "step": 310 }, { - "epoch": 1.6902173913043477, - "grad_norm": 0.11770008527271246, - "learning_rate": 8.56628449235183e-06, - "loss": 0.0025, + "epoch": 0.6911111111111111, + "grad_norm": 0.18776208744216005, + "learning_rate": 1.8016186407487287e-05, + "loss": 0.0036, "step": 311 }, { - "epoch": 1.6956521739130435, - "grad_norm": 0.369565128723298, - "learning_rate": 8.508194841167975e-06, - "loss": 0.0059, + "epoch": 0.6933333333333334, + "grad_norm": 0.10862822433015418, + "learning_rate": 1.8001815310901036e-05, + "loss": 0.0022, "step": 312 }, { - "epoch": 1.7010869565217392, - "grad_norm": 0.06235754588692365, - "learning_rate": 8.450156630230267e-06, - "loss": 0.0019, + "epoch": 0.6955555555555556, + "grad_norm": 0.047131675961577676, + "learning_rate": 1.7987398124096274e-05, + "loss": 0.0011, "step": 313 }, { - "epoch": 1.7065217391304348, - "grad_norm": 0.02787223131850643, - "learning_rate": 8.39217186080532e-06, - "loss": 0.0012, + "epoch": 0.6977777777777778, + "grad_norm": 2.7295542665862498, + "learning_rate": 1.7972934930115568e-05, + "loss": 0.0262, "step": 314 }, { - "epoch": 1.7119565217391304, - "grad_norm": 0.03719997929743275, - "learning_rate": 8.334242532316977e-06, - "loss": 0.0012, + "epoch": 0.7, + "grad_norm": 0.2695775557626087, + "learning_rate": 1.7958425812266493e-05, + "loss": 0.0052, "step": 315 }, { - "epoch": 1.7173913043478262, - "grad_norm": 0.42795195182267215, - "learning_rate": 8.276370642277383e-06, - "loss": 0.0048, + "epoch": 0.7022222222222222, + "grad_norm": 0.18307531748663033, + "learning_rate": 1.7943870854121126e-05, + "loss": 0.0026, "step": 316 }, { - "epoch": 1.7228260869565217, - "grad_norm": 0.9372903840892463, - "learning_rate": 8.21855818621809e-06, - "loss": 0.0203, + "epoch": 0.7044444444444444, + "grad_norm": 0.1261476876391146, + "learning_rate": 1.7929270139515606e-05, + "loss": 0.0038, "step": 317 }, { - "epoch": 1.7282608695652173, - "grad_norm": 0.13870817101483046, - "learning_rate": 8.160807157621262e-06, - "loss": 0.0025, + "epoch": 0.7066666666666667, + "grad_norm": 3.2139458942657675, + "learning_rate": 1.7914623752549606e-05, + "loss": 0.0586, "step": 318 }, { - "epoch": 1.733695652173913, - "grad_norm": 0.2445880882562458, - "learning_rate": 8.103119547850924e-06, - "loss": 0.0037, + "epoch": 0.7088888888888889, + "grad_norm": 0.16389595523645703, + "learning_rate": 1.789993177758588e-05, + "loss": 0.0024, "step": 319 }, { - "epoch": 1.7391304347826086, - "grad_norm": 0.06926518467785787, - "learning_rate": 8.045497346084297e-06, - "loss": 0.002, + "epoch": 0.7111111111111111, + "grad_norm": 3.5797498316041194, + "learning_rate": 1.7885194299249774e-05, + "loss": 0.2762, "step": 320 }, { - "epoch": 1.7445652173913042, - "grad_norm": 0.029704630377944685, - "learning_rate": 7.98794253924322e-06, - "loss": 0.0011, + "epoch": 0.7133333333333334, + "grad_norm": 3.651241292420766, + "learning_rate": 1.787041140242872e-05, + "loss": 0.0286, "step": 321 }, { - "epoch": 1.75, - "grad_norm": 0.02657434909385738, - "learning_rate": 7.930457111925616e-06, - "loss": 0.0012, + "epoch": 0.7155555555555555, + "grad_norm": 0.16646077195366074, + "learning_rate": 1.785558317227177e-05, + "loss": 0.0026, "step": 322 }, { - "epoch": 1.7554347826086958, - "grad_norm": 0.087118861417369, - "learning_rate": 7.873043046337086e-06, - "loss": 0.002, + "epoch": 0.7177777777777777, + "grad_norm": 0.15348159227004488, + "learning_rate": 1.7840709694189082e-05, + "loss": 0.0029, "step": 323 }, { - "epoch": 1.7608695652173914, - "grad_norm": 0.029028883768708425, - "learning_rate": 7.815702322222539e-06, - "loss": 0.0009, + "epoch": 0.72, + "grad_norm": 0.051550658950101275, + "learning_rate": 1.782579105385145e-05, + "loss": 0.0011, "step": 324 }, { - "epoch": 1.766304347826087, - "grad_norm": 0.574091822654542, - "learning_rate": 7.758436916797923e-06, - "loss": 0.0092, + "epoch": 0.7222222222222222, + "grad_norm": 3.3430686212505654, + "learning_rate": 1.7810827337189806e-05, + "loss": 0.1291, "step": 325 }, { - "epoch": 1.7717391304347827, - "grad_norm": 0.043721730276414336, - "learning_rate": 7.701248804682069e-06, - "loss": 0.0014, + "epoch": 0.7244444444444444, + "grad_norm": 0.05633528070956402, + "learning_rate": 1.7795818630394705e-05, + "loss": 0.0015, "step": 326 }, { - "epoch": 1.7771739130434783, - "grad_norm": 2.4824141009923726, - "learning_rate": 7.64413995782858e-06, - "loss": 0.1501, + "epoch": 0.7266666666666667, + "grad_norm": 0.042544728787952485, + "learning_rate": 1.7780765019915854e-05, + "loss": 0.0011, "step": 327 }, { - "epoch": 1.7826086956521738, - "grad_norm": 0.3656857182755404, - "learning_rate": 7.5871123454578534e-06, - "loss": 0.0055, + "epoch": 0.7288888888888889, + "grad_norm": 0.20663780841717014, + "learning_rate": 1.776566659246161e-05, + "loss": 0.0064, "step": 328 }, { - "epoch": 1.7880434782608696, - "grad_norm": 0.030565125424490584, - "learning_rate": 7.530167933989161e-06, - "loss": 0.001, + "epoch": 0.7311111111111112, + "grad_norm": 0.10117150253869212, + "learning_rate": 1.7750523434998454e-05, + "loss": 0.0017, "step": 329 }, { - "epoch": 1.7934782608695652, - "grad_norm": 0.6771809217496879, - "learning_rate": 7.47330868697285e-06, - "loss": 0.01, + "epoch": 0.7333333333333333, + "grad_norm": 0.04912357223044673, + "learning_rate": 1.773533563475053e-05, + "loss": 0.0015, "step": 330 }, { - "epoch": 1.7989130434782608, - "grad_norm": 0.24573870561094346, - "learning_rate": 7.4165365650226425e-06, - "loss": 0.0049, + "epoch": 0.7355555555555555, + "grad_norm": 0.07816292042759533, + "learning_rate": 1.772010327919912e-05, + "loss": 0.0017, "step": 331 }, { - "epoch": 1.8043478260869565, - "grad_norm": 0.8696535124002203, - "learning_rate": 7.3598535257480244e-06, - "loss": 0.0126, + "epoch": 0.7377777777777778, + "grad_norm": 0.1131516402115526, + "learning_rate": 1.7704826456082137e-05, + "loss": 0.0015, "step": 332 }, { - "epoch": 1.8097826086956523, - "grad_norm": 0.02189894312561321, - "learning_rate": 7.30326152368674e-06, - "loss": 0.0008, + "epoch": 0.74, + "grad_norm": 0.0939956501816851, + "learning_rate": 1.768950525339362e-05, + "loss": 0.0015, "step": 333 }, { - "epoch": 1.8152173913043477, - "grad_norm": 0.031609375803459974, - "learning_rate": 7.246762510237404e-06, - "loss": 0.0011, + "epoch": 0.7422222222222222, + "grad_norm": 0.06895273740828892, + "learning_rate": 1.7674139759383253e-05, + "loss": 0.0017, "step": 334 }, { - "epoch": 1.8206521739130435, - "grad_norm": 0.020342266321765227, - "learning_rate": 7.1903584335922e-06, - "loss": 0.0008, + "epoch": 0.7444444444444445, + "grad_norm": 0.18828063400187017, + "learning_rate": 1.765873006255582e-05, + "loss": 0.0038, "step": 335 }, { - "epoch": 1.8260869565217392, - "grad_norm": 0.09248271114619741, - "learning_rate": 7.134051238669722e-06, - "loss": 0.0018, + "epoch": 0.7466666666666667, + "grad_norm": 0.05759248593135591, + "learning_rate": 1.764327625167072e-05, + "loss": 0.0014, "step": 336 }, { - "epoch": 1.8315217391304348, - "grad_norm": 0.10061723518020388, - "learning_rate": 7.077842867047897e-06, - "loss": 0.0024, + "epoch": 0.7488888888888889, + "grad_norm": 3.2019717673338866, + "learning_rate": 1.7627778415741437e-05, + "loss": 0.2732, "step": 337 }, { - "epoch": 1.8369565217391304, - "grad_norm": 0.21992324150498122, - "learning_rate": 7.021735256897035e-06, - "loss": 0.0027, + "epoch": 0.7511111111111111, + "grad_norm": 3.829915601578586, + "learning_rate": 1.761223664403505e-05, + "loss": 0.0382, "step": 338 }, { - "epoch": 1.8423913043478262, - "grad_norm": 0.030816726743244916, - "learning_rate": 6.965730342913011e-06, - "loss": 0.0011, + "epoch": 0.7533333333333333, + "grad_norm": 0.07801699193935524, + "learning_rate": 1.7596651026071708e-05, + "loss": 0.0017, "step": 339 }, { - "epoch": 1.8478260869565217, - "grad_norm": 0.01683095603625154, - "learning_rate": 6.909830056250527e-06, - "loss": 0.0008, + "epoch": 0.7555555555555555, + "grad_norm": 2.178951935310646, + "learning_rate": 1.7581021651624097e-05, + "loss": 0.0783, "step": 340 }, { - "epoch": 1.8532608695652173, - "grad_norm": 0.23379778261250125, - "learning_rate": 6.8540363244565524e-06, - "loss": 0.0043, + "epoch": 0.7577777777777778, + "grad_norm": 0.3936676246719478, + "learning_rate": 1.7565348610716963e-05, + "loss": 0.0109, "step": 341 }, { - "epoch": 1.858695652173913, - "grad_norm": 0.03675133534148478, - "learning_rate": 6.798351071403839e-06, - "loss": 0.001, + "epoch": 0.76, + "grad_norm": 0.1929534132804924, + "learning_rate": 1.754963199362654e-05, + "loss": 0.0049, "step": 342 }, { - "epoch": 1.8641304347826086, - "grad_norm": 0.1140408877999425, - "learning_rate": 6.742776217224587e-06, - "loss": 0.0027, + "epoch": 0.7622222222222222, + "grad_norm": 0.22591516183578036, + "learning_rate": 1.7533871890880088e-05, + "loss": 0.0031, "step": 343 }, { - "epoch": 1.8695652173913042, - "grad_norm": 0.02850900102579577, - "learning_rate": 6.687313678244243e-06, - "loss": 0.0009, + "epoch": 0.7644444444444445, + "grad_norm": 0.1888882198947638, + "learning_rate": 1.7518068393255324e-05, + "loss": 0.004, "step": 344 }, { - "epoch": 1.875, - "grad_norm": 0.02532716939465366, - "learning_rate": 6.6319653669154e-06, - "loss": 0.001, + "epoch": 0.7666666666666667, + "grad_norm": 1.333005638664377, + "learning_rate": 1.7502221591779932e-05, + "loss": 0.0455, "step": 345 }, { - "epoch": 1.8804347826086958, - "grad_norm": 0.10582087034471738, - "learning_rate": 6.576733191751879e-06, - "loss": 0.0029, + "epoch": 0.7688888888888888, + "grad_norm": 0.08351848744341497, + "learning_rate": 1.748633157773101e-05, + "loss": 0.0019, "step": 346 }, { - "epoch": 1.8858695652173914, - "grad_norm": 2.4137374896779877, - "learning_rate": 6.521619057262904e-06, - "loss": 0.1004, + "epoch": 0.7711111111111111, + "grad_norm": 0.20135338147628218, + "learning_rate": 1.7470398442634572e-05, + "loss": 0.004, "step": 347 }, { - "epoch": 1.891304347826087, - "grad_norm": 2.0298394937535122, - "learning_rate": 6.466624863887437e-06, - "loss": 0.0361, + "epoch": 0.7733333333333333, + "grad_norm": 0.3920394157318214, + "learning_rate": 1.7454422278264997e-05, + "loss": 0.007, "step": 348 }, { - "epoch": 1.8967391304347827, - "grad_norm": 0.15424873092333466, - "learning_rate": 6.411752507928643e-06, - "loss": 0.0031, + "epoch": 0.7755555555555556, + "grad_norm": 2.3346652301224435, + "learning_rate": 1.7438403176644524e-05, + "loss": 0.061, "step": 349 }, { - "epoch": 1.9021739130434783, - "grad_norm": 0.7343430535593085, - "learning_rate": 6.357003881488499e-06, - "loss": 0.0086, + "epoch": 0.7777777777777778, + "grad_norm": 0.17399541957186548, + "learning_rate": 1.74223412300427e-05, + "loss": 0.0038, "step": 350 }, { - "epoch": 1.9076086956521738, - "grad_norm": 0.0169679254906056, - "learning_rate": 6.302380872402562e-06, - "loss": 0.0007, + "epoch": 0.78, + "grad_norm": 0.5063917945887836, + "learning_rate": 1.7406236530975862e-05, + "loss": 0.014, "step": 351 }, { - "epoch": 1.9130434782608696, - "grad_norm": 0.026108663412252976, - "learning_rate": 6.247885364174866e-06, - "loss": 0.001, + "epoch": 0.7822222222222223, + "grad_norm": 0.17100998262389278, + "learning_rate": 1.7390089172206594e-05, + "loss": 0.0039, "step": 352 }, { - "epoch": 1.9184782608695652, - "grad_norm": 0.022679414032134804, - "learning_rate": 6.193519235912972e-06, - "loss": 0.0008, + "epoch": 0.7844444444444445, + "grad_norm": 0.07690407785975054, + "learning_rate": 1.7373899246743202e-05, + "loss": 0.0023, "step": 353 }, { - "epoch": 1.9239130434782608, - "grad_norm": 0.02365404382322627, - "learning_rate": 6.139284362263185e-06, - "loss": 0.0008, + "epoch": 0.7866666666666666, + "grad_norm": 3.096708709231272, + "learning_rate": 1.7357666847839186e-05, + "loss": 0.0948, "step": 354 }, { - "epoch": 1.9293478260869565, - "grad_norm": 0.014446988115359962, - "learning_rate": 6.085182613345893e-06, - "loss": 0.0006, + "epoch": 0.7888888888888889, + "grad_norm": 0.06677352191462775, + "learning_rate": 1.734139206899267e-05, + "loss": 0.0018, "step": 355 }, { - "epoch": 1.9347826086956523, - "grad_norm": 0.016091425374232204, - "learning_rate": 6.031215854691097e-06, - "loss": 0.0007, + "epoch": 0.7911111111111111, + "grad_norm": 2.158930506345796, + "learning_rate": 1.7325075003945902e-05, + "loss": 0.0612, "step": 356 }, { - "epoch": 1.9402173913043477, - "grad_norm": 0.01553827774955186, - "learning_rate": 5.977385947174084e-06, - "loss": 0.0007, + "epoch": 0.7933333333333333, + "grad_norm": 0.07343319642954196, + "learning_rate": 1.730871574668469e-05, + "loss": 0.0018, "step": 357 }, { - "epoch": 1.9456521739130435, - "grad_norm": 0.17966133137766196, - "learning_rate": 5.923694746951253e-06, - "loss": 0.0028, + "epoch": 0.7955555555555556, + "grad_norm": 0.10209992241664956, + "learning_rate": 1.729231439143787e-05, + "loss": 0.003, "step": 358 }, { - "epoch": 1.9510869565217392, - "grad_norm": 0.02477310360295687, - "learning_rate": 5.8701441053961185e-06, - "loss": 0.0009, + "epoch": 0.7977777777777778, + "grad_norm": 0.05154563949559315, + "learning_rate": 1.727587103267677e-05, + "loss": 0.0016, "step": 359 }, { - "epoch": 1.9565217391304348, - "grad_norm": 0.025478377542260965, - "learning_rate": 5.816735869035458e-06, - "loss": 0.0009, + "epoch": 0.8, + "grad_norm": 2.5242361305489083, + "learning_rate": 1.7259385765114634e-05, + "loss": 0.1427, "step": 360 }, { - "epoch": 1.9619565217391304, - "grad_norm": 0.01385737253155479, - "learning_rate": 5.7634718794856626e-06, - "loss": 0.0006, + "epoch": 0.8022222222222222, + "grad_norm": 0.044414925999775715, + "learning_rate": 1.7242858683706122e-05, + "loss": 0.0013, "step": 361 }, { - "epoch": 1.9673913043478262, - "grad_norm": 0.2920694264321747, - "learning_rate": 5.710353973389215e-06, - "loss": 0.003, + "epoch": 0.8044444444444444, + "grad_norm": 0.08091268252607839, + "learning_rate": 1.7226289883646727e-05, + "loss": 0.0018, "step": 362 }, { - "epoch": 1.9728260869565217, - "grad_norm": 0.0609584809389905, - "learning_rate": 5.657383982351368e-06, - "loss": 0.0014, + "epoch": 0.8066666666666666, + "grad_norm": 1.3470382192190027, + "learning_rate": 1.720967946037225e-05, + "loss": 0.0172, "step": 363 }, { - "epoch": 1.9782608695652173, - "grad_norm": 0.014022955163492444, - "learning_rate": 5.604563732876989e-06, - "loss": 0.0006, + "epoch": 0.8088888888888889, + "grad_norm": 1.1385884696978847, + "learning_rate": 1.7193027509558233e-05, + "loss": 0.0233, "step": 364 }, { - "epoch": 1.983695652173913, - "grad_norm": 0.02973603833790608, - "learning_rate": 5.55189504630756e-06, - "loss": 0.0009, + "epoch": 0.8111111111111111, + "grad_norm": 0.0696388572432045, + "learning_rate": 1.7176334127119418e-05, + "loss": 0.002, "step": 365 }, { - "epoch": 1.9891304347826086, - "grad_norm": 0.07663989298851219, - "learning_rate": 5.4993797387584056e-06, - "loss": 0.0015, + "epoch": 0.8133333333333334, + "grad_norm": 2.689648637168946, + "learning_rate": 1.7159599409209194e-05, + "loss": 0.0835, "step": 366 }, { - "epoch": 1.9945652173913042, - "grad_norm": 3.723476839809668, - "learning_rate": 5.447019621056049e-06, - "loss": 0.1512, + "epoch": 0.8155555555555556, + "grad_norm": 0.02695159673981318, + "learning_rate": 1.7142823452219036e-05, + "loss": 0.0008, "step": 367 }, { - "epoch": 2.0, - "grad_norm": 0.023508663828369594, - "learning_rate": 5.394816498675772e-06, - "loss": 0.0008, + "epoch": 0.8177777777777778, + "grad_norm": 1.016439154703933, + "learning_rate": 1.7126006352777965e-05, + "loss": 0.0395, "step": 368 }, { - "epoch": 2.005434782608696, - "grad_norm": 0.014915331253251566, - "learning_rate": 5.342772171679364e-06, - "loss": 0.0006, + "epoch": 0.82, + "grad_norm": 0.0696464936791321, + "learning_rate": 1.710914820775196e-05, + "loss": 0.0019, "step": 369 }, { - "epoch": 2.010869565217391, - "grad_norm": 0.15045045132635565, - "learning_rate": 5.290888434653056e-06, - "loss": 0.0035, + "epoch": 0.8222222222222222, + "grad_norm": 0.06119723875136413, + "learning_rate": 1.7092249114243453e-05, + "loss": 0.0015, "step": 370 }, { - "epoch": 2.016304347826087, - "grad_norm": 0.02078710490582649, - "learning_rate": 5.239167076645626e-06, - "loss": 0.0009, + "epoch": 0.8244444444444444, + "grad_norm": 0.07165346259135334, + "learning_rate": 1.7075309169590708e-05, + "loss": 0.0019, "step": 371 }, { - "epoch": 2.0217391304347827, - "grad_norm": 0.08909809356955653, - "learning_rate": 5.187609881106725e-06, - "loss": 0.0021, + "epoch": 0.8266666666666667, + "grad_norm": 0.26400721024705276, + "learning_rate": 1.705832847136731e-05, + "loss": 0.005, "step": 372 }, { - "epoch": 2.027173913043478, - "grad_norm": 0.019002236928891497, - "learning_rate": 5.136218625825374e-06, - "loss": 0.0006, + "epoch": 0.8288888888888889, + "grad_norm": 1.0065925848667174, + "learning_rate": 1.704130711738157e-05, + "loss": 0.0148, "step": 373 }, { - "epoch": 2.032608695652174, - "grad_norm": 0.04208850827532741, - "learning_rate": 5.084995082868658e-06, - "loss": 0.0009, + "epoch": 0.8311111111111111, + "grad_norm": 0.44147343637149283, + "learning_rate": 1.7024245205675986e-05, + "loss": 0.0096, "step": 374 }, { - "epoch": 2.0380434782608696, - "grad_norm": 0.046840875573742065, - "learning_rate": 5.033941018520625e-06, - "loss": 0.0014, + "epoch": 0.8333333333333334, + "grad_norm": 1.3630136313620684, + "learning_rate": 1.7007142834526665e-05, + "loss": 0.0364, "step": 375 }, { - "epoch": 2.0434782608695654, - "grad_norm": 0.1033934706809575, - "learning_rate": 4.983058193221384e-06, - "loss": 0.0019, + "epoch": 0.8355555555555556, + "grad_norm": 0.029280506551201933, + "learning_rate": 1.6990000102442748e-05, + "loss": 0.0009, "step": 376 }, { - "epoch": 2.0489130434782608, - "grad_norm": 0.1705166302206335, - "learning_rate": 4.932348361506402e-06, - "loss": 0.0033, + "epoch": 0.8377777777777777, + "grad_norm": 0.04456593435054069, + "learning_rate": 1.697281710816587e-05, + "loss": 0.0012, "step": 377 }, { - "epoch": 2.0543478260869565, - "grad_norm": 0.028909235733879053, - "learning_rate": 4.881813271946e-06, - "loss": 0.0012, + "epoch": 0.84, + "grad_norm": 0.19248252442168862, + "learning_rate": 1.6955593950669568e-05, + "loss": 0.0053, "step": 378 }, { - "epoch": 2.0597826086956523, - "grad_norm": 0.3030377695298429, - "learning_rate": 4.831454667085059e-06, - "loss": 0.0039, + "epoch": 0.8422222222222222, + "grad_norm": 0.03408835741593863, + "learning_rate": 1.6938330729158713e-05, + "loss": 0.0009, "step": 379 }, { - "epoch": 2.0652173913043477, - "grad_norm": 0.0477055277967709, - "learning_rate": 4.781274283382941e-06, - "loss": 0.001, + "epoch": 0.8444444444444444, + "grad_norm": 1.1746726029841232, + "learning_rate": 1.692102754306895e-05, + "loss": 0.0293, "step": 380 }, { - "epoch": 2.0706521739130435, - "grad_norm": 0.0199106085983902, - "learning_rate": 4.7312738511536035e-06, - "loss": 0.0008, + "epoch": 0.8466666666666667, + "grad_norm": 0.042184291758604296, + "learning_rate": 1.690368449206612e-05, + "loss": 0.0011, "step": 381 }, { - "epoch": 2.0760869565217392, - "grad_norm": 0.027962787198971308, - "learning_rate": 4.681455094505938e-06, - "loss": 0.001, + "epoch": 0.8488888888888889, + "grad_norm": 0.05911287012857593, + "learning_rate": 1.6886301676045676e-05, + "loss": 0.0016, "step": 382 }, { - "epoch": 2.0815217391304346, - "grad_norm": 0.0382934899009715, - "learning_rate": 4.631819731284323e-06, - "loss": 0.0011, + "epoch": 0.8511111111111112, + "grad_norm": 0.09567480152257578, + "learning_rate": 1.6868879195132128e-05, + "loss": 0.0022, "step": 383 }, { - "epoch": 2.0869565217391304, - "grad_norm": 0.013418670608056855, - "learning_rate": 4.58236947300939e-06, - "loss": 0.0006, + "epoch": 0.8533333333333334, + "grad_norm": 0.02923613179690119, + "learning_rate": 1.6851417149678442e-05, + "loss": 0.0009, "step": 384 }, { - "epoch": 2.092391304347826, - "grad_norm": 0.04141147762016092, - "learning_rate": 4.5331060248189924e-06, - "loss": 0.0013, + "epoch": 0.8555555555555555, + "grad_norm": 0.03595737347271202, + "learning_rate": 1.6833915640265485e-05, + "loss": 0.0009, "step": 385 }, { - "epoch": 2.097826086956522, - "grad_norm": 0.029823878767931914, - "learning_rate": 4.4840310854094335e-06, - "loss": 0.001, + "epoch": 0.8577777777777778, + "grad_norm": 0.15377747786158083, + "learning_rate": 1.6816374767701437e-05, + "loss": 0.0018, "step": 386 }, { - "epoch": 2.1032608695652173, - "grad_norm": 0.2181034359186816, - "learning_rate": 4.435146346976873e-06, - "loss": 0.004, + "epoch": 0.86, + "grad_norm": 0.025274103773197356, + "learning_rate": 1.6798794633021192e-05, + "loss": 0.0007, "step": 387 }, { - "epoch": 2.108695652173913, - "grad_norm": 0.36490526428814946, - "learning_rate": 4.386453495158983e-06, - "loss": 0.0042, + "epoch": 0.8622222222222222, + "grad_norm": 0.024364169273204686, + "learning_rate": 1.678117533748581e-05, + "loss": 0.0007, "step": 388 }, { - "epoch": 2.114130434782609, - "grad_norm": 0.0743305865977075, - "learning_rate": 4.33795420897683e-06, - "loss": 0.0011, + "epoch": 0.8644444444444445, + "grad_norm": 3.739051669062766, + "learning_rate": 1.6763516982581905e-05, + "loss": 0.1779, "step": 389 }, { - "epoch": 2.119565217391304, - "grad_norm": 0.3000013681179252, - "learning_rate": 4.289650160776967e-06, - "loss": 0.0046, + "epoch": 0.8666666666666667, + "grad_norm": 0.08684921227588457, + "learning_rate": 1.6745819670021083e-05, + "loss": 0.002, "step": 390 }, { - "epoch": 2.125, - "grad_norm": 0.05973611485866258, - "learning_rate": 4.241543016173778e-06, - "loss": 0.0011, + "epoch": 0.8688888888888889, + "grad_norm": 0.050271751791448493, + "learning_rate": 1.6728083501739333e-05, + "loss": 0.0013, "step": 391 }, { - "epoch": 2.130434782608696, - "grad_norm": 0.02140783876818863, - "learning_rate": 4.19363443399204e-06, - "loss": 0.0008, + "epoch": 0.8711111111111111, + "grad_norm": 0.7587609198992383, + "learning_rate": 1.6710308579896462e-05, + "loss": 0.0192, "step": 392 }, { - "epoch": 2.135869565217391, - "grad_norm": 0.01680791379596923, - "learning_rate": 4.1459260662097235e-06, - "loss": 0.0007, + "epoch": 0.8733333333333333, + "grad_norm": 0.047998101271713715, + "learning_rate": 1.669249500687549e-05, + "loss": 0.0012, "step": 393 }, { - "epoch": 2.141304347826087, - "grad_norm": 0.5362708346340234, - "learning_rate": 4.098419557901036e-06, - "loss": 0.0077, + "epoch": 0.8755555555555555, + "grad_norm": 0.034697222517479344, + "learning_rate": 1.667464288528207e-05, + "loss": 0.0008, "step": 394 }, { - "epoch": 2.1467391304347827, - "grad_norm": 0.016360773071928784, - "learning_rate": 4.051116547179677e-06, - "loss": 0.0007, + "epoch": 0.8777777777777778, + "grad_norm": 0.0763178676668161, + "learning_rate": 1.6656752317943888e-05, + "loss": 0.0016, "step": 395 }, { - "epoch": 2.1521739130434785, - "grad_norm": 0.28985199290673336, - "learning_rate": 4.00401866514238e-06, - "loss": 0.0044, + "epoch": 0.88, + "grad_norm": 1.5090167070768492, + "learning_rate": 1.6638823407910085e-05, + "loss": 0.2079, "step": 396 }, { - "epoch": 2.157608695652174, - "grad_norm": 0.01604718518106245, - "learning_rate": 3.957127535812651e-06, - "loss": 0.0007, + "epoch": 0.8822222222222222, + "grad_norm": 2.1073689348767513, + "learning_rate": 1.6620856258450652e-05, + "loss": 0.2643, "step": 397 }, { - "epoch": 2.1630434782608696, - "grad_norm": 0.05241001721895836, - "learning_rate": 3.910444776084777e-06, - "loss": 0.0016, + "epoch": 0.8844444444444445, + "grad_norm": 0.9953199907891269, + "learning_rate": 1.6602850973055824e-05, + "loss": 0.0193, "step": 398 }, { - "epoch": 2.1684782608695654, - "grad_norm": 0.02209678496389779, - "learning_rate": 3.8639719956680624e-06, + "epoch": 0.8866666666666667, + "grad_norm": 0.027243332407602957, + "learning_rate": 1.6584807655435528e-05, "loss": 0.0008, "step": 399 }, { - "epoch": 2.1739130434782608, - "grad_norm": 0.020559716878607803, - "learning_rate": 3.817710797031338e-06, - "loss": 0.0008, + "epoch": 0.8888888888888888, + "grad_norm": 1.057519228197808, + "learning_rate": 1.6566726409518722e-05, + "loss": 0.0247, "step": 400 }, { - "epoch": 2.1793478260869565, - "grad_norm": 0.014824391810911752, - "learning_rate": 3.771662775347692e-06, - "loss": 0.0006, + "epoch": 0.8911111111111111, + "grad_norm": 3.5372153773358197, + "learning_rate": 1.6548607339452853e-05, + "loss": 0.2287, "step": 401 }, { - "epoch": 2.1847826086956523, - "grad_norm": 0.015796576868617806, - "learning_rate": 3.7258295184394743e-06, - "loss": 0.0007, + "epoch": 0.8933333333333333, + "grad_norm": 0.8487085761287783, + "learning_rate": 1.6530450549603223e-05, + "loss": 0.0131, "step": 402 }, { - "epoch": 2.1902173913043477, - "grad_norm": 1.9188157660999832, - "learning_rate": 3.680212606723542e-06, - "loss": 0.0306, + "epoch": 0.8955555555555555, + "grad_norm": 0.07166066334256051, + "learning_rate": 1.6512256144552407e-05, + "loss": 0.0016, "step": 403 }, { - "epoch": 2.1956521739130435, - "grad_norm": 0.06391438687127189, - "learning_rate": 3.6348136131567537e-06, - "loss": 0.0019, + "epoch": 0.8977777777777778, + "grad_norm": 0.04349791834431249, + "learning_rate": 1.6494024229099634e-05, + "loss": 0.0011, "step": 404 }, { - "epoch": 2.2010869565217392, - "grad_norm": 0.17262747887734978, - "learning_rate": 3.5896341031817517e-06, - "loss": 0.0036, + "epoch": 0.9, + "grad_norm": 2.1471826502562568, + "learning_rate": 1.64757549082602e-05, + "loss": 0.0504, "step": 405 }, { - "epoch": 2.2065217391304346, - "grad_norm": 0.056665382264410494, - "learning_rate": 3.5446756346729673e-06, - "loss": 0.0012, + "epoch": 0.9022222222222223, + "grad_norm": 0.07434319470839308, + "learning_rate": 1.645744828726484e-05, + "loss": 0.002, "step": 406 }, { - "epoch": 2.2119565217391304, - "grad_norm": 1.9642610912379441, - "learning_rate": 3.4999397578829076e-06, - "loss": 0.037, + "epoch": 0.9044444444444445, + "grad_norm": 1.4838157054513865, + "learning_rate": 1.6439104471559157e-05, + "loss": 0.1745, "step": 407 }, { - "epoch": 2.217391304347826, - "grad_norm": 0.014116778100650137, - "learning_rate": 3.4554280153886967e-06, - "loss": 0.0006, + "epoch": 0.9066666666666666, + "grad_norm": 0.25749488823039385, + "learning_rate": 1.6420723566802982e-05, + "loss": 0.0042, "step": 408 }, { - "epoch": 2.2228260869565215, - "grad_norm": 0.024488008150664965, - "learning_rate": 3.4111419420388904e-06, - "loss": 0.001, + "epoch": 0.9088888888888889, + "grad_norm": 0.10912285122462197, + "learning_rate": 1.640230567886978e-05, + "loss": 0.0028, "step": 409 }, { - "epoch": 2.2282608695652173, - "grad_norm": 0.5674032898921303, - "learning_rate": 3.3670830649005437e-06, - "loss": 0.0041, + "epoch": 0.9111111111111111, + "grad_norm": 1.7944452725145175, + "learning_rate": 1.6383850913846036e-05, + "loss": 0.0479, "step": 410 }, { - "epoch": 2.233695652173913, - "grad_norm": 0.02286422293729417, - "learning_rate": 3.323252903206562e-06, - "loss": 0.0009, + "epoch": 0.9133333333333333, + "grad_norm": 0.4370178249333847, + "learning_rate": 1.6365359378030654e-05, + "loss": 0.0091, "step": 411 }, { - "epoch": 2.239130434782609, - "grad_norm": 0.27168054236566974, - "learning_rate": 3.279652968303313e-06, - "loss": 0.0043, + "epoch": 0.9155555555555556, + "grad_norm": 1.8067728020030278, + "learning_rate": 1.6346831177934326e-05, + "loss": 0.0639, "step": 412 }, { - "epoch": 2.244565217391304, - "grad_norm": 0.1593898805811067, - "learning_rate": 3.236284763598512e-06, - "loss": 0.0035, + "epoch": 0.9177777777777778, + "grad_norm": 0.5922192517230894, + "learning_rate": 1.632826642027894e-05, + "loss": 0.0157, "step": 413 }, { - "epoch": 2.25, - "grad_norm": 0.013081366094026997, - "learning_rate": 3.1931497845093753e-06, - "loss": 0.0006, + "epoch": 0.92, + "grad_norm": 1.1833496542383064, + "learning_rate": 1.6309665211996936e-05, + "loss": 0.0377, "step": 414 }, { - "epoch": 2.255434782608696, - "grad_norm": 0.012814297915516075, - "learning_rate": 3.150249518411067e-06, - "loss": 0.0006, + "epoch": 0.9222222222222223, + "grad_norm": 0.36033294993841947, + "learning_rate": 1.6291027660230735e-05, + "loss": 0.0063, "step": 415 }, { - "epoch": 2.260869565217391, - "grad_norm": 0.07415100436276072, - "learning_rate": 3.1075854445854093e-06, - "loss": 0.0018, + "epoch": 0.9244444444444444, + "grad_norm": 0.08701178672573401, + "learning_rate": 1.6272353872332075e-05, + "loss": 0.0027, "step": 416 }, { - "epoch": 2.266304347826087, - "grad_norm": 0.027114643295979856, - "learning_rate": 3.0651590341698633e-06, - "loss": 0.0009, + "epoch": 0.9266666666666666, + "grad_norm": 0.23919848416504177, + "learning_rate": 1.625364395586142e-05, + "loss": 0.0045, "step": 417 }, { - "epoch": 2.2717391304347827, - "grad_norm": 0.13722514020501544, - "learning_rate": 3.0229717501068133e-06, - "loss": 0.0023, + "epoch": 0.9288888888888889, + "grad_norm": 0.1793604331408912, + "learning_rate": 1.6234898018587336e-05, + "loss": 0.0043, "step": 418 }, { - "epoch": 2.2771739130434785, - "grad_norm": 0.023053695918606187, - "learning_rate": 2.981025047093118e-06, - "loss": 0.0009, + "epoch": 0.9311111111111111, + "grad_norm": 0.07784604192384029, + "learning_rate": 1.6216116168485864e-05, + "loss": 0.0019, "step": 419 }, { - "epoch": 2.282608695652174, - "grad_norm": 3.7468189613648253, - "learning_rate": 2.9393203715299477e-06, - "loss": 0.0598, + "epoch": 0.9333333333333333, + "grad_norm": 0.08614360634700334, + "learning_rate": 1.61972985137399e-05, + "loss": 0.0025, "step": 420 }, { - "epoch": 2.2880434782608696, - "grad_norm": 0.08634045866789929, - "learning_rate": 2.8978591614729114e-06, - "loss": 0.0015, + "epoch": 0.9355555555555556, + "grad_norm": 0.08346052102710229, + "learning_rate": 1.6178445162738577e-05, + "loss": 0.0021, "step": 421 }, { - "epoch": 2.2934782608695654, - "grad_norm": 0.13994711242571936, - "learning_rate": 2.856642846582469e-06, - "loss": 0.0019, + "epoch": 0.9377777777777778, + "grad_norm": 0.13464240654030102, + "learning_rate": 1.6159556224076637e-05, + "loss": 0.0033, "step": 422 }, { - "epoch": 2.2989130434782608, - "grad_norm": 0.0519996408733201, - "learning_rate": 2.8156728480746386e-06, - "loss": 0.0011, + "epoch": 0.94, + "grad_norm": 0.20610917519969033, + "learning_rate": 1.614063180655381e-05, + "loss": 0.0046, "step": 423 }, { - "epoch": 2.3043478260869565, - "grad_norm": 0.01904905289611891, - "learning_rate": 2.77495057867198e-06, - "loss": 0.0007, + "epoch": 0.9422222222222222, + "grad_norm": 0.13282674590776067, + "learning_rate": 1.612167201917417e-05, + "loss": 0.0037, "step": 424 }, { - "epoch": 2.3097826086956523, - "grad_norm": 1.2476206988634295, - "learning_rate": 2.7344774425548917e-06, - "loss": 0.0339, + "epoch": 0.9444444444444444, + "grad_norm": 0.24865554912927337, + "learning_rate": 1.6102676971145543e-05, + "loss": 0.0074, "step": 425 }, { - "epoch": 2.3152173913043477, - "grad_norm": 1.7884596495622582, - "learning_rate": 2.694254835313187e-06, - "loss": 0.1375, + "epoch": 0.9466666666666667, + "grad_norm": 0.15675631759244346, + "learning_rate": 1.6083646771878826e-05, + "loss": 0.004, "step": 426 }, { - "epoch": 2.3206521739130435, - "grad_norm": 0.31025512064642874, - "learning_rate": 2.654284143897976e-06, - "loss": 0.0034, + "epoch": 0.9488888888888889, + "grad_norm": 0.11346738908268483, + "learning_rate": 1.6064581530987408e-05, + "loss": 0.0028, "step": 427 }, { - "epoch": 2.3260869565217392, - "grad_norm": 0.3488873501510679, - "learning_rate": 2.6145667465738333e-06, - "loss": 0.0039, + "epoch": 0.9511111111111111, + "grad_norm": 0.3843436403873271, + "learning_rate": 1.6045481358286516e-05, + "loss": 0.0096, "step": 428 }, { - "epoch": 2.3315217391304346, - "grad_norm": 0.589409734181312, - "learning_rate": 2.57510401287128e-06, - "loss": 0.0044, + "epoch": 0.9533333333333334, + "grad_norm": 0.10926600197674162, + "learning_rate": 1.6026346363792565e-05, + "loss": 0.0024, "step": 429 }, { - "epoch": 2.3369565217391304, - "grad_norm": 0.3987654975780055, - "learning_rate": 2.535897303539554e-06, - "loss": 0.0061, + "epoch": 0.9555555555555556, + "grad_norm": 0.068178019783604, + "learning_rate": 1.6007176657722567e-05, + "loss": 0.0018, "step": 430 }, { - "epoch": 2.342391304347826, - "grad_norm": 0.015719041310887562, - "learning_rate": 2.4969479704996935e-06, - "loss": 0.0006, + "epoch": 0.9577777777777777, + "grad_norm": 0.07594354636814077, + "learning_rate": 1.598797235049345e-05, + "loss": 0.0017, "step": 431 }, { - "epoch": 2.3478260869565215, - "grad_norm": 0.015180271606601303, - "learning_rate": 2.4582573567979196e-06, - "loss": 0.0006, + "epoch": 0.96, + "grad_norm": 0.07593090215178103, + "learning_rate": 1.5968733552721462e-05, + "loss": 0.0017, "step": 432 }, { - "epoch": 2.3532608695652173, - "grad_norm": 0.04482488635397311, - "learning_rate": 2.4198267965593224e-06, - "loss": 0.0011, + "epoch": 0.9622222222222222, + "grad_norm": 1.859359413537719, + "learning_rate": 1.59494603752215e-05, + "loss": 0.1544, "step": 433 }, { - "epoch": 2.358695652173913, - "grad_norm": 0.28160845350626884, - "learning_rate": 2.381657614941858e-06, - "loss": 0.005, + "epoch": 0.9644444444444444, + "grad_norm": 0.43219326692623933, + "learning_rate": 1.5930152929006496e-05, + "loss": 0.0097, "step": 434 }, { - "epoch": 2.364130434782609, - "grad_norm": 0.09873212459265543, - "learning_rate": 2.3437511280906576e-06, - "loss": 0.002, + "epoch": 0.9666666666666667, + "grad_norm": 0.9573986443708047, + "learning_rate": 1.5910811325286768e-05, + "loss": 0.0126, "step": 435 }, { - "epoch": 2.369565217391304, - "grad_norm": 0.028522981368259783, - "learning_rate": 2.306108643092647e-06, - "loss": 0.0008, + "epoch": 0.9688888888888889, + "grad_norm": 0.11451745773452655, + "learning_rate": 1.5891435675469376e-05, + "loss": 0.0022, "step": 436 }, { - "epoch": 2.375, - "grad_norm": 0.030887088059580514, - "learning_rate": 2.268731457931467e-06, - "loss": 0.001, + "epoch": 0.9711111111111111, + "grad_norm": 0.04409476657894775, + "learning_rate": 1.587202609115749e-05, + "loss": 0.0012, "step": 437 }, { - "epoch": 2.380434782608696, - "grad_norm": 0.2056153085824592, - "learning_rate": 2.2316208614427226e-06, - "loss": 0.003, + "epoch": 0.9733333333333334, + "grad_norm": 0.15395940177625353, + "learning_rate": 1.585258268414974e-05, + "loss": 0.0043, "step": 438 }, { - "epoch": 2.385869565217391, - "grad_norm": 0.03316498797260578, - "learning_rate": 2.1947781332695406e-06, - "loss": 0.001, + "epoch": 0.9755555555555555, + "grad_norm": 0.0389883612209622, + "learning_rate": 1.583310556643957e-05, + "loss": 0.0013, "step": 439 }, { - "epoch": 2.391304347826087, - "grad_norm": 0.020603866879399167, - "learning_rate": 2.1582045438184464e-06, - "loss": 0.0007, + "epoch": 0.9777777777777777, + "grad_norm": 0.3774001997217339, + "learning_rate": 1.58135948502146e-05, + "loss": 0.0065, "step": 440 }, { - "epoch": 2.3967391304347827, - "grad_norm": 0.022416446968247912, - "learning_rate": 2.121901354215553e-06, - "loss": 0.0008, + "epoch": 0.98, + "grad_norm": 0.051435405821458835, + "learning_rate": 1.5794050647855977e-05, + "loss": 0.0014, "step": 441 }, { - "epoch": 2.4021739130434785, - "grad_norm": 1.2759832400444016, - "learning_rate": 2.085869816263081e-06, - "loss": 0.0222, + "epoch": 0.9822222222222222, + "grad_norm": 0.04754404716444526, + "learning_rate": 1.5774473071937725e-05, + "loss": 0.0014, "step": 442 }, { - "epoch": 2.407608695652174, - "grad_norm": 2.7040121657564558, - "learning_rate": 2.050111172396192e-06, - "loss": 0.0472, + "epoch": 0.9844444444444445, + "grad_norm": 1.6566691546756405, + "learning_rate": 1.57548622352261e-05, + "loss": 0.0374, "step": 443 }, { - "epoch": 2.4130434782608696, - "grad_norm": 0.10233992459998235, - "learning_rate": 2.0146266556401405e-06, - "loss": 0.0016, + "epoch": 0.9866666666666667, + "grad_norm": 0.1037981859410034, + "learning_rate": 1.5735218250678944e-05, + "loss": 0.0026, "step": 444 }, { - "epoch": 2.4184782608695654, - "grad_norm": 0.244848209656816, - "learning_rate": 1.97941748956777e-06, - "loss": 0.004, + "epoch": 0.9888888888888889, + "grad_norm": 1.4533098350292877, + "learning_rate": 1.5715541231445018e-05, + "loss": 0.1089, "step": 445 }, { - "epoch": 2.4239130434782608, - "grad_norm": 0.05688444318906805, - "learning_rate": 1.944484888257312e-06, - "loss": 0.0013, + "epoch": 0.9911111111111112, + "grad_norm": 0.015167023971068238, + "learning_rate": 1.5695831290863367e-05, + "loss": 0.0004, "step": 446 }, { - "epoch": 2.4293478260869565, - "grad_norm": 0.5574195380686696, - "learning_rate": 1.9098300562505266e-06, - "loss": 0.0112, + "epoch": 0.9933333333333333, + "grad_norm": 0.0411769322299393, + "learning_rate": 1.567608854246267e-05, + "loss": 0.001, "step": 447 }, { - "epoch": 2.4347826086956523, - "grad_norm": 0.0932057849593417, - "learning_rate": 1.8754541885111631e-06, - "loss": 0.0018, + "epoch": 0.9955555555555555, + "grad_norm": 0.2988173473340702, + "learning_rate": 1.5656313099960564e-05, + "loss": 0.0051, "step": 448 }, { - "epoch": 2.4402173913043477, - "grad_norm": 0.10747253772821316, - "learning_rate": 1.8413584703837618e-06, - "loss": 0.0018, + "epoch": 0.9977777777777778, + "grad_norm": 0.20568756307591282, + "learning_rate": 1.5636505077263017e-05, + "loss": 0.005, "step": 449 }, { - "epoch": 2.4456521739130435, - "grad_norm": 0.39067007335009907, - "learning_rate": 1.8075440775527754e-06, - "loss": 0.0063, + "epoch": 1.0, + "grad_norm": 0.13301452077305276, + "learning_rate": 1.561666458846365e-05, + "loss": 0.0022, "step": 450 }, { - "epoch": 2.4510869565217392, - "grad_norm": 0.028328534672816628, - "learning_rate": 1.7740121760020324e-06, - "loss": 0.001, + "epoch": 1.0022222222222221, + "grad_norm": 3.1947231928375497, + "learning_rate": 1.5596791747843083e-05, + "loss": 0.0509, "step": 451 }, { - "epoch": 2.4565217391304346, - "grad_norm": 0.12079880404676811, - "learning_rate": 1.740763921974531e-06, - "loss": 0.0024, + "epoch": 1.0044444444444445, + "grad_norm": 1.3041500941665616, + "learning_rate": 1.5576886669868297e-05, + "loss": 0.1806, "step": 452 }, { - "epoch": 2.4619565217391304, - "grad_norm": 0.10850662346060039, - "learning_rate": 1.7078004619325728e-06, - "loss": 0.0017, + "epoch": 1.0066666666666666, + "grad_norm": 1.834807463526075, + "learning_rate": 1.5556949469191943e-05, + "loss": 0.1873, "step": 453 }, { - "epoch": 2.467391304347826, - "grad_norm": 0.2673103325118139, - "learning_rate": 1.6751229325182194e-06, - "loss": 0.0067, + "epoch": 1.008888888888889, + "grad_norm": 0.3443441271361637, + "learning_rate": 1.5536980260651705e-05, + "loss": 0.0066, "step": 454 }, { - "epoch": 2.4728260869565215, - "grad_norm": 0.20052250560415452, - "learning_rate": 1.6427324605141125e-06, - "loss": 0.0037, + "epoch": 1.011111111111111, + "grad_norm": 2.4491543340704434, + "learning_rate": 1.5516979159269638e-05, + "loss": 0.1749, "step": 455 }, { - "epoch": 2.4782608695652173, - "grad_norm": 0.08452549445673675, - "learning_rate": 1.610630162804615e-06, - "loss": 0.0015, + "epoch": 1.0133333333333334, + "grad_norm": 0.448361022205904, + "learning_rate": 1.5496946280251482e-05, + "loss": 0.0126, "step": 456 }, { - "epoch": 2.483695652173913, - "grad_norm": 0.01638519542637996, - "learning_rate": 1.578817146337297e-06, - "loss": 0.0006, + "epoch": 1.0155555555555555, + "grad_norm": 0.5051463640608859, + "learning_rate": 1.5476881738986037e-05, + "loss": 0.0081, "step": 457 }, { - "epoch": 2.489130434782609, - "grad_norm": 0.03107206330508472, - "learning_rate": 1.5472945080847679e-06, - "loss": 0.0008, + "epoch": 1.0177777777777777, + "grad_norm": 0.05644803186135449, + "learning_rate": 1.545678565104445e-05, + "loss": 0.0013, "step": 458 }, { - "epoch": 2.494565217391304, - "grad_norm": 0.03654411098415488, - "learning_rate": 1.516063335006851e-06, - "loss": 0.0009, + "epoch": 1.02, + "grad_norm": 0.4434329809886952, + "learning_rate": 1.5436658132179602e-05, + "loss": 0.0049, "step": 459 }, { - "epoch": 2.5, - "grad_norm": 0.07287899663917816, - "learning_rate": 1.485124704013101e-06, - "loss": 0.0017, + "epoch": 1.0222222222222221, + "grad_norm": 0.12232415280817052, + "learning_rate": 1.54164992983254e-05, + "loss": 0.0029, "step": 460 }, { - "epoch": 2.505434782608696, - "grad_norm": 0.9588849867572242, - "learning_rate": 1.4544796819256724e-06, - "loss": 0.0086, + "epoch": 1.0244444444444445, + "grad_norm": 0.061237928078889114, + "learning_rate": 1.5396309265596127e-05, + "loss": 0.0014, "step": 461 }, { - "epoch": 2.5108695652173916, - "grad_norm": 0.02467713549047941, - "learning_rate": 1.4241293254425337e-06, - "loss": 0.0007, + "epoch": 1.0266666666666666, + "grad_norm": 0.05548358280836634, + "learning_rate": 1.5376088150285777e-05, + "loss": 0.0018, "step": 462 }, { - "epoch": 2.516304347826087, - "grad_norm": 0.04748495142661645, - "learning_rate": 1.3940746811010297e-06, - "loss": 0.0011, + "epoch": 1.028888888888889, + "grad_norm": 0.4001290523790067, + "learning_rate": 1.5355836068867365e-05, + "loss": 0.0056, "step": 463 }, { - "epoch": 2.5217391304347827, - "grad_norm": 0.03054669361577949, - "learning_rate": 1.3643167852417894e-06, - "loss": 0.001, + "epoch": 1.031111111111111, + "grad_norm": 0.07407173821562185, + "learning_rate": 1.5335553137992286e-05, + "loss": 0.0021, "step": 464 }, { - "epoch": 2.5271739130434785, - "grad_norm": 0.027111109257002528, - "learning_rate": 1.3348566639730032e-06, - "loss": 0.0011, + "epoch": 1.0333333333333334, + "grad_norm": 0.4415551385855507, + "learning_rate": 1.5315239474489617e-05, + "loss": 0.01, "step": 465 }, { - "epoch": 2.532608695652174, - "grad_norm": 0.04377035701857717, - "learning_rate": 1.3056953331350297e-06, - "loss": 0.001, + "epoch": 1.0355555555555556, + "grad_norm": 1.1105193105944093, + "learning_rate": 1.5294895195365454e-05, + "loss": 0.0248, "step": 466 }, { - "epoch": 2.5380434782608696, - "grad_norm": 0.08382313642398824, - "learning_rate": 1.2768337982653744e-06, - "loss": 0.0014, + "epoch": 1.0377777777777777, + "grad_norm": 0.3852259669673318, + "learning_rate": 1.5274520417802243e-05, + "loss": 0.0109, "step": 467 }, { - "epoch": 2.5434782608695654, - "grad_norm": 0.030219514519134735, - "learning_rate": 1.2482730545640133e-06, - "loss": 0.0011, + "epoch": 1.04, + "grad_norm": 0.26591146053719716, + "learning_rate": 1.5254115259158095e-05, + "loss": 0.0049, "step": 468 }, { - "epoch": 2.5489130434782608, - "grad_norm": 0.42539314485494417, - "learning_rate": 1.2200140868590759e-06, - "loss": 0.0063, + "epoch": 1.0422222222222222, + "grad_norm": 0.4473631698800017, + "learning_rate": 1.5233679836966122e-05, + "loss": 0.011, "step": 469 }, { - "epoch": 2.5543478260869565, - "grad_norm": 0.025687483062924163, - "learning_rate": 1.1920578695728903e-06, - "loss": 0.0009, + "epoch": 1.0444444444444445, + "grad_norm": 0.25419158042751855, + "learning_rate": 1.5213214268933745e-05, + "loss": 0.0051, "step": 470 }, { - "epoch": 2.5597826086956523, - "grad_norm": 0.027491319722765094, - "learning_rate": 1.1644053666883803e-06, - "loss": 0.0009, + "epoch": 1.0466666666666666, + "grad_norm": 0.29350819828975866, + "learning_rate": 1.519271867294203e-05, + "loss": 0.0079, "step": 471 }, { - "epoch": 2.5652173913043477, - "grad_norm": 0.12070804850917503, - "learning_rate": 1.137057531715825e-06, - "loss": 0.0023, + "epoch": 1.048888888888889, + "grad_norm": 0.17566809369977404, + "learning_rate": 1.5172193167045e-05, + "loss": 0.0039, "step": 472 }, { - "epoch": 2.5706521739130435, - "grad_norm": 0.1648819505998384, - "learning_rate": 1.1100153076599862e-06, - "loss": 0.0025, + "epoch": 1.051111111111111, + "grad_norm": 0.11176819325740334, + "learning_rate": 1.515163786946896e-05, + "loss": 0.0024, "step": 473 }, { - "epoch": 2.5760869565217392, - "grad_norm": 0.1168751069545925, - "learning_rate": 1.0832796269875757e-06, - "loss": 0.0023, + "epoch": 1.0533333333333332, + "grad_norm": 0.15130120936643068, + "learning_rate": 1.5131052898611818e-05, + "loss": 0.0025, "step": 474 }, { - "epoch": 2.5815217391304346, - "grad_norm": 0.030968178239974237, - "learning_rate": 1.0568514115951256e-06, - "loss": 0.001, + "epoch": 1.0555555555555556, + "grad_norm": 0.17179626833269843, + "learning_rate": 1.5110438373042384e-05, + "loss": 0.0045, "step": 475 }, { - "epoch": 2.5869565217391304, - "grad_norm": 1.2108714841296098, - "learning_rate": 1.0307315727771806e-06, - "loss": 0.0126, + "epoch": 1.0577777777777777, + "grad_norm": 0.04802461646598465, + "learning_rate": 1.5089794411499718e-05, + "loss": 0.0013, "step": 476 }, { - "epoch": 2.592391304347826, - "grad_norm": 0.027899777268609836, - "learning_rate": 1.0049210111948815e-06, - "loss": 0.0009, + "epoch": 1.06, + "grad_norm": 0.0836178309207032, + "learning_rate": 1.5069121132892432e-05, + "loss": 0.0019, "step": 477 }, { - "epoch": 2.5978260869565215, - "grad_norm": 0.03180410299281123, - "learning_rate": 9.794206168449127e-07, - "loss": 0.0009, + "epoch": 1.0622222222222222, + "grad_norm": 0.10133395526940109, + "learning_rate": 1.504841865629799e-05, + "loss": 0.0018, "step": 478 }, { - "epoch": 2.6032608695652173, - "grad_norm": 0.033244233145600086, - "learning_rate": 9.542312690288035e-07, - "loss": 0.0009, + "epoch": 1.0644444444444445, + "grad_norm": 0.3271578100660573, + "learning_rate": 1.502768710096204e-05, + "loss": 0.0055, "step": 479 }, { - "epoch": 2.608695652173913, - "grad_norm": 0.03761724722059268, - "learning_rate": 9.293538363226196e-07, - "loss": 0.0013, + "epoch": 1.0666666666666667, + "grad_norm": 0.13347725654073384, + "learning_rate": 1.5006926586297725e-05, + "loss": 0.0026, "step": 480 }, { - "epoch": 2.6141304347826084, - "grad_norm": 0.09136376989366057, - "learning_rate": 9.04789176547004e-07, - "loss": 0.0018, + "epoch": 1.068888888888889, + "grad_norm": 0.33161162156140384, + "learning_rate": 1.4986137231885e-05, + "loss": 0.008, "step": 481 }, { - "epoch": 2.619565217391304, - "grad_norm": 0.18059210345284965, - "learning_rate": 8.80538136737602e-07, + "epoch": 1.0711111111111111, + "grad_norm": 0.17441911966746926, + "learning_rate": 1.4965319157469926e-05, "loss": 0.0029, "step": 482 }, { - "epoch": 2.625, - "grad_norm": 0.030807943380701246, - "learning_rate": 8.566015531158534e-07, - "loss": 0.0008, + "epoch": 1.0733333333333333, + "grad_norm": 0.0195196396607905, + "learning_rate": 1.4944472482963993e-05, + "loss": 0.0005, "step": 483 }, { - "epoch": 2.630434782608696, - "grad_norm": 0.05710411212363332, - "learning_rate": 8.329802510601559e-07, - "loss": 0.0014, + "epoch": 1.0755555555555556, + "grad_norm": 0.015384971479331495, + "learning_rate": 1.4923597328443423e-05, + "loss": 0.0004, "step": 484 }, { - "epoch": 2.6358695652173916, - "grad_norm": 0.061848371459409315, - "learning_rate": 8.096750450774071e-07, - "loss": 0.0016, + "epoch": 1.0777777777777777, + "grad_norm": 2.1317679477326115, + "learning_rate": 1.490269381414849e-05, + "loss": 0.1338, "step": 485 }, { - "epoch": 2.641304347826087, - "grad_norm": 1.0253370343843025, - "learning_rate": 7.866867387749199e-07, - "loss": 0.0166, + "epoch": 1.08, + "grad_norm": 2.6248236476598046, + "learning_rate": 1.4881762060482814e-05, + "loss": 0.0612, "step": 486 }, { - "epoch": 2.6467391304347827, - "grad_norm": 0.029136594892818037, - "learning_rate": 7.640161248327061e-07, - "loss": 0.001, + "epoch": 1.0822222222222222, + "grad_norm": 0.28123549962267874, + "learning_rate": 1.4860802188012677e-05, + "loss": 0.0085, "step": 487 }, { - "epoch": 2.6521739130434785, - "grad_norm": 1.092489264260611, - "learning_rate": 7.416639849761531e-07, - "loss": 0.0248, + "epoch": 1.0844444444444445, + "grad_norm": 0.0729643863878409, + "learning_rate": 1.4839814317466317e-05, + "loss": 0.0015, "step": 488 }, { - "epoch": 2.657608695652174, - "grad_norm": 2.2914238948250363, - "learning_rate": 7.196310899490577e-07, - "loss": 0.0723, + "epoch": 1.0866666666666667, + "grad_norm": 0.1361594493056867, + "learning_rate": 1.4818798569733246e-05, + "loss": 0.0047, "step": 489 }, { - "epoch": 2.6630434782608696, - "grad_norm": 0.016249601644455224, - "learning_rate": 6.979181994870587e-07, - "loss": 0.0007, + "epoch": 1.0888888888888888, + "grad_norm": 0.5953763769685275, + "learning_rate": 1.4797755065863553e-05, + "loss": 0.0131, "step": 490 }, { - "epoch": 2.6684782608695654, - "grad_norm": 0.021265124563151435, - "learning_rate": 6.765260622914361e-07, - "loss": 0.0007, + "epoch": 1.0911111111111111, + "grad_norm": 1.3812663452542153, + "learning_rate": 1.4776683927067189e-05, + "loss": 0.0328, "step": 491 }, { - "epoch": 2.6739130434782608, - "grad_norm": 0.03831610583206101, - "learning_rate": 6.554554160032899e-07, - "loss": 0.001, + "epoch": 1.0933333333333333, + "grad_norm": 0.027341548470446866, + "learning_rate": 1.4755585274713289e-05, + "loss": 0.0007, "step": 492 }, { - "epoch": 2.6793478260869565, - "grad_norm": 0.03101608692853337, - "learning_rate": 6.347069871781164e-07, - "loss": 0.0009, + "epoch": 1.0955555555555556, + "grad_norm": 0.016825530024494027, + "learning_rate": 1.473445923032946e-05, + "loss": 0.0005, "step": 493 }, { - "epoch": 2.6847826086956523, - "grad_norm": 0.01978989576112469, - "learning_rate": 6.142814912607409e-07, - "loss": 0.0008, + "epoch": 1.0977777777777777, + "grad_norm": 0.15845020053347977, + "learning_rate": 1.47133059156011e-05, + "loss": 0.0019, "step": 494 }, { - "epoch": 2.6902173913043477, - "grad_norm": 0.3852432741704962, - "learning_rate": 5.941796325606574e-07, - "loss": 0.007, + "epoch": 1.1, + "grad_norm": 0.3120365972287205, + "learning_rate": 1.4692125452370664e-05, + "loss": 0.0044, "step": 495 }, { - "epoch": 2.6956521739130435, - "grad_norm": 0.39628033120487305, - "learning_rate": 5.744021042277437e-07, - "loss": 0.0052, + "epoch": 1.1022222222222222, + "grad_norm": 0.054130706399276404, + "learning_rate": 1.4670917962636997e-05, + "loss": 0.0009, "step": 496 }, { - "epoch": 2.7010869565217392, - "grad_norm": 0.09815745867450933, - "learning_rate": 5.549495882283528e-07, - "loss": 0.0019, + "epoch": 1.1044444444444443, + "grad_norm": 2.4116947263228545, + "learning_rate": 1.4649683568554604e-05, + "loss": 0.1431, "step": 497 }, { - "epoch": 2.7065217391304346, - "grad_norm": 2.2778045886314655, - "learning_rate": 5.358227553218031e-07, - "loss": 0.0699, + "epoch": 1.1066666666666667, + "grad_norm": 0.1614746297650992, + "learning_rate": 1.4628422392432969e-05, + "loss": 0.0027, "step": 498 }, { - "epoch": 2.7119565217391304, - "grad_norm": 0.027783255312989117, - "learning_rate": 5.17022265037247e-07, - "loss": 0.0009, + "epoch": 1.1088888888888888, + "grad_norm": 0.013815181647335199, + "learning_rate": 1.4607134556735836e-05, + "loss": 0.0004, "step": 499 }, { - "epoch": 2.717391304347826, - "grad_norm": 0.04524039432637041, - "learning_rate": 4.985487656509313e-07, - "loss": 0.0013, + "epoch": 1.1111111111111112, + "grad_norm": 0.021380461855217348, + "learning_rate": 1.4585820184080502e-05, + "loss": 0.0006, "step": 500 }, { - "epoch": 2.7228260869565215, - "grad_norm": 1.8660426088847626, - "learning_rate": 4.804028941638405e-07, - "loss": 0.0379, + "epoch": 1.1133333333333333, + "grad_norm": 0.04989118438417548, + "learning_rate": 1.4564479397237124e-05, + "loss": 0.0009, "step": 501 }, { - "epoch": 2.7282608695652173, - "grad_norm": 0.05194490259797287, - "learning_rate": 4.6258527627973446e-07, - "loss": 0.0011, + "epoch": 1.1155555555555556, + "grad_norm": 0.10792532216546843, + "learning_rate": 1.4543112319127997e-05, + "loss": 0.0015, "step": 502 }, { - "epoch": 2.733695652173913, - "grad_norm": 0.5524275731086881, - "learning_rate": 4.450965263835694e-07, - "loss": 0.0059, + "epoch": 1.1177777777777778, + "grad_norm": 0.026565817960016617, + "learning_rate": 1.4521719072826858e-05, + "loss": 0.0005, "step": 503 }, { - "epoch": 2.7391304347826084, - "grad_norm": 0.09638176861935786, - "learning_rate": 4.2793724752031807e-07, - "loss": 0.0014, + "epoch": 1.12, + "grad_norm": 2.1178422069882292, + "learning_rate": 1.450029978155817e-05, + "loss": 0.0378, "step": 504 }, { - "epoch": 2.744565217391304, - "grad_norm": 1.5902794253403654, - "learning_rate": 4.111080313741711e-07, - "loss": 0.0265, + "epoch": 1.1222222222222222, + "grad_norm": 0.2138131174968219, + "learning_rate": 1.4478854568696419e-05, + "loss": 0.0024, "step": 505 }, { - "epoch": 2.75, - "grad_norm": 0.027472533837749617, - "learning_rate": 3.9460945824813635e-07, - "loss": 0.0007, + "epoch": 1.1244444444444444, + "grad_norm": 0.010603185720887814, + "learning_rate": 1.4457383557765385e-05, + "loss": 0.0003, "step": 506 }, { - "epoch": 2.755434782608696, - "grad_norm": 0.1279143225656888, - "learning_rate": 3.7844209704403055e-07, - "loss": 0.0029, + "epoch": 1.1266666666666667, + "grad_norm": 0.33637528877515427, + "learning_rate": 1.4435886872437456e-05, + "loss": 0.0034, "step": 507 }, { - "epoch": 2.7608695652173916, - "grad_norm": 0.026463459883835142, - "learning_rate": 3.626065052428551e-07, - "loss": 0.0008, + "epoch": 1.1288888888888888, + "grad_norm": 1.9510352800962318, + "learning_rate": 1.4414364636532909e-05, + "loss": 0.061, "step": 508 }, { - "epoch": 2.766304347826087, - "grad_norm": 0.27505638314757236, - "learning_rate": 3.471032288855869e-07, - "loss": 0.0041, + "epoch": 1.1311111111111112, + "grad_norm": 0.12344891206480194, + "learning_rate": 1.4392816974019176e-05, + "loss": 0.0014, "step": 509 }, { - "epoch": 2.7717391304347827, - "grad_norm": 0.03755249242727417, - "learning_rate": 3.3193280255433556e-07, - "loss": 0.0011, + "epoch": 1.1333333333333333, + "grad_norm": 0.009054802593147422, + "learning_rate": 1.437124400901015e-05, + "loss": 0.0002, "step": 510 }, { - "epoch": 2.7771739130434785, - "grad_norm": 1.3351363822022542, - "learning_rate": 3.170957493539195e-07, - "loss": 0.0158, + "epoch": 1.1355555555555557, + "grad_norm": 3.5207246391869402, + "learning_rate": 1.4349645865765476e-05, + "loss": 0.0684, "step": 511 }, { - "epoch": 2.782608695652174, - "grad_norm": 0.02416580008302714, - "learning_rate": 3.0259258089382236e-07, - "loss": 0.0009, + "epoch": 1.1377777777777778, + "grad_norm": 1.0857080956908496, + "learning_rate": 1.4328022668689816e-05, + "loss": 0.0145, "step": 512 }, { - "epoch": 2.7880434782608696, - "grad_norm": 0.24305894735810873, - "learning_rate": 2.88423797270555e-07, - "loss": 0.0033, + "epoch": 1.1400000000000001, + "grad_norm": 0.06020887004857472, + "learning_rate": 1.4306374542332141e-05, + "loss": 0.0015, "step": 513 }, { - "epoch": 2.7934782608695654, - "grad_norm": 0.0170002796253045, - "learning_rate": 2.745898870504116e-07, - "loss": 0.0006, + "epoch": 1.1422222222222222, + "grad_norm": 0.4447625281941771, + "learning_rate": 1.4284701611385015e-05, + "loss": 0.0088, "step": 514 }, { - "epoch": 2.7989130434782608, - "grad_norm": 0.07161898082689806, - "learning_rate": 2.6109132725262166e-07, - "loss": 0.0017, + "epoch": 1.1444444444444444, + "grad_norm": 0.020457031404331416, + "learning_rate": 1.4263004000683877e-05, + "loss": 0.0005, "step": 515 }, { - "epoch": 2.8043478260869565, - "grad_norm": 1.0122242308252756, - "learning_rate": 2.479285833329015e-07, - "loss": 0.0147, + "epoch": 1.1466666666666667, + "grad_norm": 0.15427187039051454, + "learning_rate": 1.4241281835206323e-05, + "loss": 0.0024, "step": 516 }, { - "epoch": 2.8097826086956523, - "grad_norm": 0.32571610548502183, - "learning_rate": 2.351021091674044e-07, - "loss": 0.0056, + "epoch": 1.1488888888888888, + "grad_norm": 0.11918926923199938, + "learning_rate": 1.4219535240071378e-05, + "loss": 0.0012, "step": 517 }, { - "epoch": 2.8152173913043477, - "grad_norm": 0.04130977709089724, - "learning_rate": 2.226123470370689e-07, - "loss": 0.0012, + "epoch": 1.1511111111111112, + "grad_norm": 0.25092264965378125, + "learning_rate": 1.4197764340538786e-05, + "loss": 0.0042, "step": 518 }, { - "epoch": 2.8206521739130435, - "grad_norm": 1.7181531921107351, - "learning_rate": 2.104597276123721e-07, - "loss": 0.0401, + "epoch": 1.1533333333333333, + "grad_norm": 0.13202886320547122, + "learning_rate": 1.417596926200828e-05, + "loss": 0.0024, "step": 519 }, { - "epoch": 2.8260869565217392, - "grad_norm": 0.02705959014069028, - "learning_rate": 1.9864466993847808e-07, - "loss": 0.0009, + "epoch": 1.1555555555555554, + "grad_norm": 0.07283357320502323, + "learning_rate": 1.4154150130018867e-05, + "loss": 0.0016, "step": 520 }, { - "epoch": 2.8315217391304346, - "grad_norm": 0.018768961604310398, - "learning_rate": 1.8716758142078295e-07, + "epoch": 1.1577777777777778, + "grad_norm": 0.03601087932870431, + "learning_rate": 1.4132307070248094e-05, "loss": 0.0007, "step": 521 }, { - "epoch": 2.8369565217391304, - "grad_norm": 0.018345985510552047, - "learning_rate": 1.7602885781087486e-07, - "loss": 0.0008, + "epoch": 1.16, + "grad_norm": 0.029098623820126967, + "learning_rate": 1.4110440208511345e-05, + "loss": 0.0007, "step": 522 }, { - "epoch": 2.842391304347826, - "grad_norm": 0.02605179058078712, - "learning_rate": 1.6522888319288166e-07, - "loss": 0.0009, + "epoch": 1.1622222222222223, + "grad_norm": 0.14571457254521622, + "learning_rate": 1.4088549670761084e-05, + "loss": 0.0034, "step": 523 }, { - "epoch": 2.8478260869565215, - "grad_norm": 0.03616601605859018, - "learning_rate": 1.5476802997022812e-07, - "loss": 0.001, + "epoch": 1.1644444444444444, + "grad_norm": 0.0331593028578858, + "learning_rate": 1.4066635583086167e-05, + "loss": 0.0007, "step": 524 }, { - "epoch": 2.8532608695652173, - "grad_norm": 0.025004543897905514, - "learning_rate": 1.4464665885279948e-07, - "loss": 0.0008, + "epoch": 1.1666666666666667, + "grad_norm": 0.07897888237311063, + "learning_rate": 1.4044698071711082e-05, + "loss": 0.0021, "step": 525 }, { - "epoch": 2.858695652173913, - "grad_norm": 2.0898366886384756, - "learning_rate": 1.3486511884449827e-07, - "loss": 0.0181, + "epoch": 1.1688888888888889, + "grad_norm": 0.10335450945542281, + "learning_rate": 1.4022737262995248e-05, + "loss": 0.0018, "step": 526 }, { - "epoch": 2.8641304347826084, - "grad_norm": 1.4763355304020689, - "learning_rate": 1.254237472312092e-07, - "loss": 0.0246, + "epoch": 1.1711111111111112, + "grad_norm": 0.07221657576269722, + "learning_rate": 1.4000753283432267e-05, + "loss": 0.0013, "step": 527 }, { - "epoch": 2.869565217391304, - "grad_norm": 0.012470194296572264, - "learning_rate": 1.1632286956917427e-07, - "loss": 0.0006, + "epoch": 1.1733333333333333, + "grad_norm": 0.4460732311254856, + "learning_rate": 1.397874625964921e-05, + "loss": 0.0069, "step": 528 }, { - "epoch": 2.875, - "grad_norm": 0.01829002489612097, - "learning_rate": 1.075627996737627e-07, - "loss": 0.0008, + "epoch": 1.1755555555555555, + "grad_norm": 0.15752245816209398, + "learning_rate": 1.395671631840588e-05, + "loss": 0.0025, "step": 529 }, { - "epoch": 2.880434782608696, - "grad_norm": 0.38603420185863435, - "learning_rate": 9.914383960865081e-08, - "loss": 0.0047, + "epoch": 1.1777777777777778, + "grad_norm": 0.041671902787855035, + "learning_rate": 1.3934663586594086e-05, + "loss": 0.0007, "step": 530 }, { - "epoch": 2.8858695652173916, - "grad_norm": 0.19367430087338477, - "learning_rate": 9.106627967540915e-08, - "loss": 0.0024, + "epoch": 1.18, + "grad_norm": 0.015926239666124373, + "learning_rate": 1.3912588191236904e-05, + "loss": 0.0004, "step": 531 }, { - "epoch": 2.891304347826087, - "grad_norm": 0.4411181370450418, - "learning_rate": 8.333039840348833e-08, - "loss": 0.0042, + "epoch": 1.1822222222222223, + "grad_norm": 0.00991790987304988, + "learning_rate": 1.3890490259487957e-05, + "loss": 0.0003, "step": 532 }, { - "epoch": 2.8967391304347827, - "grad_norm": 0.012209939294930026, - "learning_rate": 7.593646254061448e-08, - "loss": 0.0006, + "epoch": 1.1844444444444444, + "grad_norm": 1.8312229655908008, + "learning_rate": 1.3868369918630675e-05, + "loss": 0.0799, "step": 533 }, { - "epoch": 2.9021739130434785, - "grad_norm": 0.330056623962809, - "learning_rate": 6.888472704359661e-08, - "loss": 0.006, + "epoch": 1.1866666666666668, + "grad_norm": 0.8104600909061084, + "learning_rate": 1.3846227296077568e-05, + "loss": 0.0089, "step": 534 }, { - "epoch": 2.907608695652174, - "grad_norm": 0.034219507512194006, - "learning_rate": 6.217543506952916e-08, - "loss": 0.001, + "epoch": 1.1888888888888889, + "grad_norm": 0.005401196033189337, + "learning_rate": 1.3824062519369483e-05, + "loss": 0.0002, "step": 535 }, { - "epoch": 2.9130434782608696, - "grad_norm": 0.018554429841025816, - "learning_rate": 5.580881796741322e-08, - "loss": 0.0007, + "epoch": 1.1911111111111112, + "grad_norm": 0.31209215523074685, + "learning_rate": 1.3801875716174874e-05, + "loss": 0.009, "step": 536 }, { - "epoch": 2.9184782608695654, - "grad_norm": 0.03648100584653491, - "learning_rate": 4.978509527017283e-08, + "epoch": 1.1933333333333334, + "grad_norm": 0.03736701128820483, + "learning_rate": 1.3779667014289067e-05, "loss": 0.0009, "step": 537 }, { - "epoch": 2.9239130434782608, - "grad_norm": 0.024703372477637507, - "learning_rate": 4.410447468709001e-08, - "loss": 0.001, + "epoch": 1.1955555555555555, + "grad_norm": 0.022968933697137164, + "learning_rate": 1.3757436541633529e-05, + "loss": 0.0005, "step": 538 }, { - "epoch": 2.9293478260869565, - "grad_norm": 0.04043268074636393, - "learning_rate": 3.8767152096641504e-08, - "loss": 0.001, + "epoch": 1.1977777777777778, + "grad_norm": 0.01322352747670289, + "learning_rate": 1.3735184426255117e-05, + "loss": 0.0004, "step": 539 }, { - "epoch": 2.9347826086956523, - "grad_norm": 0.046268704953150705, - "learning_rate": 3.377331153974206e-08, - "loss": 0.0015, + "epoch": 1.2, + "grad_norm": 0.013658338792694062, + "learning_rate": 1.371291079632536e-05, + "loss": 0.0004, "step": 540 }, { - "epoch": 2.9402173913043477, - "grad_norm": 0.06556550271817785, - "learning_rate": 2.912312521340277e-08, - "loss": 0.001, + "epoch": 1.2022222222222223, + "grad_norm": 0.09535965672015695, + "learning_rate": 1.3690615780139703e-05, + "loss": 0.0018, "step": 541 }, { - "epoch": 2.9456521739130435, - "grad_norm": 0.11585425133268303, - "learning_rate": 2.4816753464789177e-08, - "loss": 0.0018, + "epoch": 1.2044444444444444, + "grad_norm": 0.006426707615038104, + "learning_rate": 1.3668299506116772e-05, + "loss": 0.0002, "step": 542 }, { - "epoch": 2.9510869565217392, - "grad_norm": 0.07767672609852741, - "learning_rate": 2.0854344785694593e-08, - "loss": 0.0016, + "epoch": 1.2066666666666666, + "grad_norm": 0.006224410852098417, + "learning_rate": 1.364596210279765e-05, + "loss": 0.0002, "step": 543 }, { - "epoch": 2.9565217391304346, - "grad_norm": 0.4371414896745222, - "learning_rate": 1.7236035807416397e-08, - "loss": 0.0058, + "epoch": 1.208888888888889, + "grad_norm": 0.0065160521778654485, + "learning_rate": 1.3623603698845115e-05, + "loss": 0.0002, "step": 544 }, { - "epoch": 2.9619565217391304, - "grad_norm": 0.05437143993551097, - "learning_rate": 1.3961951296053156e-08, - "loss": 0.0012, + "epoch": 1.211111111111111, + "grad_norm": 3.228782499639943, + "learning_rate": 1.3601224423042906e-05, + "loss": 0.1974, "step": 545 }, { - "epoch": 2.967391304347826, - "grad_norm": 0.08967830928285274, - "learning_rate": 1.1032204148191395e-08, - "loss": 0.0015, + "epoch": 1.2133333333333334, + "grad_norm": 0.010326527745100948, + "learning_rate": 1.357882440429499e-05, + "loss": 0.0003, "step": 546 }, { - "epoch": 2.9728260869565215, - "grad_norm": 0.0632058201324481, - "learning_rate": 8.446895387019815e-09, - "loss": 0.0013, + "epoch": 1.2155555555555555, + "grad_norm": 0.1475986589505389, + "learning_rate": 1.3556403771624809e-05, + "loss": 0.0014, "step": 547 }, { - "epoch": 2.9782608695652173, - "grad_norm": 0.5781101930402831, - "learning_rate": 6.206114158845422e-09, - "loss": 0.0104, + "epoch": 1.2177777777777778, + "grad_norm": 0.22019800909218434, + "learning_rate": 1.3533962654174542e-05, + "loss": 0.0046, "step": 548 }, { - "epoch": 2.983695652173913, - "grad_norm": 0.03599473788037359, - "learning_rate": 4.309937730015978e-09, - "loss": 0.0009, + "epoch": 1.22, + "grad_norm": 3.9812677291350735, + "learning_rate": 1.3511501181204354e-05, + "loss": 0.0699, "step": 549 }, { - "epoch": 2.9891304347826084, - "grad_norm": 0.029545010274494552, - "learning_rate": 2.758431484259916e-09, - "loss": 0.001, + "epoch": 1.2222222222222223, + "grad_norm": 0.044939429672609374, + "learning_rate": 1.348901948209167e-05, + "loss": 0.0009, "step": 550 }, { - "epoch": 2.994565217391304, - "grad_norm": 0.04112363341260623, - "learning_rate": 1.5516489204303598e-09, - "loss": 0.001, + "epoch": 1.2244444444444444, + "grad_norm": 0.011417247754415792, + "learning_rate": 1.3466517686330401e-05, + "loss": 0.0003, "step": 551 }, { - "epoch": 3.0, - "grad_norm": 0.28259585170188845, - "learning_rate": 6.896316506554979e-10, - "loss": 0.0056, + "epoch": 1.2266666666666666, + "grad_norm": 1.1622153868950391, + "learning_rate": 1.344399592353023e-05, + "loss": 0.0139, "step": 552 }, + { + "epoch": 1.228888888888889, + "grad_norm": 2.2795043498639265, + "learning_rate": 1.3421454323415837e-05, + "loss": 0.0872, + "step": 553 + }, + { + "epoch": 1.231111111111111, + "grad_norm": 0.16308629593450735, + "learning_rate": 1.3398893015826166e-05, + "loss": 0.0012, + "step": 554 + }, + { + "epoch": 1.2333333333333334, + "grad_norm": 0.35156823369282647, + "learning_rate": 1.337631213071369e-05, + "loss": 0.0031, + "step": 555 + }, + { + "epoch": 1.2355555555555555, + "grad_norm": 1.3157301521028126, + "learning_rate": 1.3353711798143624e-05, + "loss": 0.016, + "step": 556 + }, + { + "epoch": 1.2377777777777779, + "grad_norm": 0.13415185911508962, + "learning_rate": 1.333109214829322e-05, + "loss": 0.0012, + "step": 557 + }, + { + "epoch": 1.24, + "grad_norm": 0.28693593486521957, + "learning_rate": 1.3308453311450987e-05, + "loss": 0.0031, + "step": 558 + }, + { + "epoch": 1.2422222222222223, + "grad_norm": 0.3891494397645464, + "learning_rate": 1.328579541801595e-05, + "loss": 0.0037, + "step": 559 + }, + { + "epoch": 1.2444444444444445, + "grad_norm": 0.05185905167840212, + "learning_rate": 1.3263118598496905e-05, + "loss": 0.0009, + "step": 560 + }, + { + "epoch": 1.2466666666666666, + "grad_norm": 0.10559692092896959, + "learning_rate": 1.324042298351166e-05, + "loss": 0.0014, + "step": 561 + }, + { + "epoch": 1.248888888888889, + "grad_norm": 0.035661359215757735, + "learning_rate": 1.321770870378628e-05, + "loss": 0.0008, + "step": 562 + }, + { + "epoch": 1.251111111111111, + "grad_norm": 0.09774004849246036, + "learning_rate": 1.3194975890154344e-05, + "loss": 0.002, + "step": 563 + }, + { + "epoch": 1.2533333333333334, + "grad_norm": 0.8141104558926184, + "learning_rate": 1.3172224673556186e-05, + "loss": 0.0105, + "step": 564 + }, + { + "epoch": 1.2555555555555555, + "grad_norm": 0.014325325998108308, + "learning_rate": 1.3149455185038132e-05, + "loss": 0.0002, + "step": 565 + }, + { + "epoch": 1.2577777777777777, + "grad_norm": 0.07328296772938435, + "learning_rate": 1.3126667555751761e-05, + "loss": 0.0009, + "step": 566 + }, + { + "epoch": 1.26, + "grad_norm": 0.09064660098721963, + "learning_rate": 1.3103861916953142e-05, + "loss": 0.0016, + "step": 567 + }, + { + "epoch": 1.2622222222222224, + "grad_norm": 1.0745067672200206, + "learning_rate": 1.3081038400002078e-05, + "loss": 0.0161, + "step": 568 + }, + { + "epoch": 1.2644444444444445, + "grad_norm": 1.9317836665585848, + "learning_rate": 1.3058197136361344e-05, + "loss": 0.0253, + "step": 569 + }, + { + "epoch": 1.2666666666666666, + "grad_norm": 3.1838116030065273, + "learning_rate": 1.3035338257595946e-05, + "loss": 0.0462, + "step": 570 + }, + { + "epoch": 1.268888888888889, + "grad_norm": 0.003826594957003479, + "learning_rate": 1.3012461895372343e-05, + "loss": 0.0001, + "step": 571 + }, + { + "epoch": 1.271111111111111, + "grad_norm": 0.01586748353148232, + "learning_rate": 1.2989568181457704e-05, + "loss": 0.0004, + "step": 572 + }, + { + "epoch": 1.2733333333333334, + "grad_norm": 1.9699305781332712, + "learning_rate": 1.296665724771914e-05, + "loss": 0.1636, + "step": 573 + }, + { + "epoch": 1.2755555555555556, + "grad_norm": 0.04864348143736564, + "learning_rate": 1.2943729226122952e-05, + "loss": 0.0008, + "step": 574 + }, + { + "epoch": 1.2777777777777777, + "grad_norm": 0.03964630013093727, + "learning_rate": 1.2920784248733857e-05, + "loss": 0.0008, + "step": 575 + }, + { + "epoch": 1.28, + "grad_norm": 0.45589333744745353, + "learning_rate": 1.2897822447714247e-05, + "loss": 0.008, + "step": 576 + }, + { + "epoch": 1.2822222222222222, + "grad_norm": 0.2606137931083498, + "learning_rate": 1.2874843955323418e-05, + "loss": 0.0033, + "step": 577 + }, + { + "epoch": 1.2844444444444445, + "grad_norm": 0.04219156153436745, + "learning_rate": 1.2851848903916792e-05, + "loss": 0.0008, + "step": 578 + }, + { + "epoch": 1.2866666666666666, + "grad_norm": 0.21814672236586619, + "learning_rate": 1.2828837425945193e-05, + "loss": 0.0045, + "step": 579 + }, + { + "epoch": 1.2888888888888888, + "grad_norm": 0.0107509732377622, + "learning_rate": 1.2805809653954045e-05, + "loss": 0.0003, + "step": 580 + }, + { + "epoch": 1.291111111111111, + "grad_norm": 2.7447869866857495, + "learning_rate": 1.2782765720582634e-05, + "loss": 0.1308, + "step": 581 + }, + { + "epoch": 1.2933333333333334, + "grad_norm": 0.23215347952976342, + "learning_rate": 1.275970575856333e-05, + "loss": 0.0017, + "step": 582 + }, + { + "epoch": 1.2955555555555556, + "grad_norm": 0.05685995468857396, + "learning_rate": 1.2736629900720832e-05, + "loss": 0.0018, + "step": 583 + }, + { + "epoch": 1.2977777777777777, + "grad_norm": 0.027802071867143598, + "learning_rate": 1.271353827997139e-05, + "loss": 0.0006, + "step": 584 + }, + { + "epoch": 1.3, + "grad_norm": 0.07684213107408684, + "learning_rate": 1.2690431029322057e-05, + "loss": 0.0013, + "step": 585 + }, + { + "epoch": 1.3022222222222222, + "grad_norm": 0.16632273876889497, + "learning_rate": 1.266730828186991e-05, + "loss": 0.0026, + "step": 586 + }, + { + "epoch": 1.3044444444444445, + "grad_norm": 0.17259688722365182, + "learning_rate": 1.2644170170801288e-05, + "loss": 0.0018, + "step": 587 + }, + { + "epoch": 1.3066666666666666, + "grad_norm": 0.1544085517068672, + "learning_rate": 1.2621016829391022e-05, + "loss": 0.0043, + "step": 588 + }, + { + "epoch": 1.3088888888888888, + "grad_norm": 0.04345274707415049, + "learning_rate": 1.2597848391001675e-05, + "loss": 0.0008, + "step": 589 + }, + { + "epoch": 1.3111111111111111, + "grad_norm": 0.0637365420573487, + "learning_rate": 1.257466498908276e-05, + "loss": 0.0008, + "step": 590 + }, + { + "epoch": 1.3133333333333335, + "grad_norm": 0.3638123221320406, + "learning_rate": 1.2551466757169984e-05, + "loss": 0.0081, + "step": 591 + }, + { + "epoch": 1.3155555555555556, + "grad_norm": 0.3829401682850741, + "learning_rate": 1.2528253828884473e-05, + "loss": 0.0052, + "step": 592 + }, + { + "epoch": 1.3177777777777777, + "grad_norm": 0.35769957275528813, + "learning_rate": 1.2505026337932005e-05, + "loss": 0.0082, + "step": 593 + }, + { + "epoch": 1.32, + "grad_norm": 1.358635509256384, + "learning_rate": 1.248178441810224e-05, + "loss": 0.0507, + "step": 594 + }, + { + "epoch": 1.3222222222222222, + "grad_norm": 0.029220498098318323, + "learning_rate": 1.2458528203267945e-05, + "loss": 0.0007, + "step": 595 + }, + { + "epoch": 1.3244444444444445, + "grad_norm": 0.6866544711130306, + "learning_rate": 1.2435257827384224e-05, + "loss": 0.011, + "step": 596 + }, + { + "epoch": 1.3266666666666667, + "grad_norm": 0.2593707902898727, + "learning_rate": 1.2411973424487751e-05, + "loss": 0.0038, + "step": 597 + }, + { + "epoch": 1.3288888888888888, + "grad_norm": 0.011239889982804811, + "learning_rate": 1.2388675128696001e-05, + "loss": 0.0003, + "step": 598 + }, + { + "epoch": 1.3311111111111111, + "grad_norm": 0.022691155037312498, + "learning_rate": 1.236536307420646e-05, + "loss": 0.0005, + "step": 599 + }, + { + "epoch": 1.3333333333333333, + "grad_norm": 0.24824465485289093, + "learning_rate": 1.2342037395295871e-05, + "loss": 0.0059, + "step": 600 + }, + { + "epoch": 1.3355555555555556, + "grad_norm": 0.07573052707956424, + "learning_rate": 1.2318698226319452e-05, + "loss": 0.0011, + "step": 601 + }, + { + "epoch": 1.3377777777777777, + "grad_norm": 0.08903811478710244, + "learning_rate": 1.2295345701710124e-05, + "loss": 0.0014, + "step": 602 + }, + { + "epoch": 1.34, + "grad_norm": 0.03018580159131135, + "learning_rate": 1.2271979955977733e-05, + "loss": 0.0007, + "step": 603 + }, + { + "epoch": 1.3422222222222222, + "grad_norm": 0.03441284031799072, + "learning_rate": 1.2248601123708279e-05, + "loss": 0.0007, + "step": 604 + }, + { + "epoch": 1.3444444444444446, + "grad_norm": 0.02410550657954675, + "learning_rate": 1.2225209339563144e-05, + "loss": 0.0005, + "step": 605 + }, + { + "epoch": 1.3466666666666667, + "grad_norm": 0.044407497291730456, + "learning_rate": 1.2201804738278311e-05, + "loss": 0.0005, + "step": 606 + }, + { + "epoch": 1.3488888888888888, + "grad_norm": 0.012542803938978902, + "learning_rate": 1.2178387454663587e-05, + "loss": 0.0003, + "step": 607 + }, + { + "epoch": 1.3511111111111112, + "grad_norm": 0.04671297099539908, + "learning_rate": 1.2154957623601831e-05, + "loss": 0.0008, + "step": 608 + }, + { + "epoch": 1.3533333333333333, + "grad_norm": 0.07040331238246046, + "learning_rate": 1.2131515380048171e-05, + "loss": 0.0009, + "step": 609 + }, + { + "epoch": 1.3555555555555556, + "grad_norm": 0.010470686345511897, + "learning_rate": 1.2108060859029233e-05, + "loss": 0.0003, + "step": 610 + }, + { + "epoch": 1.3577777777777778, + "grad_norm": 0.06752888434009897, + "learning_rate": 1.2084594195642367e-05, + "loss": 0.0009, + "step": 611 + }, + { + "epoch": 1.3599999999999999, + "grad_norm": 0.00955281351645415, + "learning_rate": 1.2061115525054855e-05, + "loss": 0.0002, + "step": 612 + }, + { + "epoch": 1.3622222222222222, + "grad_norm": 0.01571339217707317, + "learning_rate": 1.2037624982503135e-05, + "loss": 0.0004, + "step": 613 + }, + { + "epoch": 1.3644444444444446, + "grad_norm": 0.019085097817798727, + "learning_rate": 1.2014122703292047e-05, + "loss": 0.0005, + "step": 614 + }, + { + "epoch": 1.3666666666666667, + "grad_norm": 0.1738137034687499, + "learning_rate": 1.1990608822794007e-05, + "loss": 0.0033, + "step": 615 + }, + { + "epoch": 1.3688888888888888, + "grad_norm": 0.012764536735770715, + "learning_rate": 1.1967083476448282e-05, + "loss": 0.0003, + "step": 616 + }, + { + "epoch": 1.3711111111111112, + "grad_norm": 1.2778573209406405, + "learning_rate": 1.1943546799760161e-05, + "loss": 0.0094, + "step": 617 + }, + { + "epoch": 1.3733333333333333, + "grad_norm": 0.015222687647785218, + "learning_rate": 1.1919998928300203e-05, + "loss": 0.0004, + "step": 618 + }, + { + "epoch": 1.3755555555555556, + "grad_norm": 0.01414115744440498, + "learning_rate": 1.1896439997703446e-05, + "loss": 0.0003, + "step": 619 + }, + { + "epoch": 1.3777777777777778, + "grad_norm": 0.008242605788693125, + "learning_rate": 1.1872870143668635e-05, + "loss": 0.0002, + "step": 620 + }, + { + "epoch": 1.38, + "grad_norm": 0.16196735490392988, + "learning_rate": 1.1849289501957429e-05, + "loss": 0.0039, + "step": 621 + }, + { + "epoch": 1.3822222222222222, + "grad_norm": 0.049102594286280506, + "learning_rate": 1.182569820839362e-05, + "loss": 0.0009, + "step": 622 + }, + { + "epoch": 1.3844444444444444, + "grad_norm": 0.010023338316956914, + "learning_rate": 1.1802096398862359e-05, + "loss": 0.0002, + "step": 623 + }, + { + "epoch": 1.3866666666666667, + "grad_norm": 0.05530939028984656, + "learning_rate": 1.1778484209309368e-05, + "loss": 0.0013, + "step": 624 + }, + { + "epoch": 1.3888888888888888, + "grad_norm": 0.35424446781633123, + "learning_rate": 1.1754861775740163e-05, + "loss": 0.0055, + "step": 625 + }, + { + "epoch": 1.3911111111111112, + "grad_norm": 0.06762069046289652, + "learning_rate": 1.1731229234219253e-05, + "loss": 0.0011, + "step": 626 + }, + { + "epoch": 1.3933333333333333, + "grad_norm": 0.009247773772932856, + "learning_rate": 1.1707586720869375e-05, + "loss": 0.0003, + "step": 627 + }, + { + "epoch": 1.3955555555555557, + "grad_norm": 0.01452987805532771, + "learning_rate": 1.168393437187071e-05, + "loss": 0.0003, + "step": 628 + }, + { + "epoch": 1.3977777777777778, + "grad_norm": 0.828597506148214, + "learning_rate": 1.166027232346008e-05, + "loss": 0.0097, + "step": 629 + }, + { + "epoch": 1.4, + "grad_norm": 0.010718939360474211, + "learning_rate": 1.1636600711930184e-05, + "loss": 0.0003, + "step": 630 + }, + { + "epoch": 1.4022222222222223, + "grad_norm": 0.0757637666190123, + "learning_rate": 1.1612919673628798e-05, + "loss": 0.0006, + "step": 631 + }, + { + "epoch": 1.4044444444444444, + "grad_norm": 0.005293326798975234, + "learning_rate": 1.1589229344958e-05, + "loss": 0.0001, + "step": 632 + }, + { + "epoch": 1.4066666666666667, + "grad_norm": 0.0038650344970012573, + "learning_rate": 1.1565529862373382e-05, + "loss": 0.0001, + "step": 633 + }, + { + "epoch": 1.4088888888888889, + "grad_norm": 0.06807433650458707, + "learning_rate": 1.154182136238326e-05, + "loss": 0.0012, + "step": 634 + }, + { + "epoch": 1.411111111111111, + "grad_norm": 0.006973771055011823, + "learning_rate": 1.1518103981547889e-05, + "loss": 0.0002, + "step": 635 + }, + { + "epoch": 1.4133333333333333, + "grad_norm": 0.009862019588853702, + "learning_rate": 1.1494377856478674e-05, + "loss": 0.0002, + "step": 636 + }, + { + "epoch": 1.4155555555555557, + "grad_norm": 0.005630703511541077, + "learning_rate": 1.1470643123837395e-05, + "loss": 0.0002, + "step": 637 + }, + { + "epoch": 1.4177777777777778, + "grad_norm": 0.0036995121849795224, + "learning_rate": 1.1446899920335407e-05, + "loss": 0.0001, + "step": 638 + }, + { + "epoch": 1.42, + "grad_norm": 0.036774788522589814, + "learning_rate": 1.1423148382732854e-05, + "loss": 0.0005, + "step": 639 + }, + { + "epoch": 1.4222222222222223, + "grad_norm": 0.024359443567447205, + "learning_rate": 1.1399388647837888e-05, + "loss": 0.0005, + "step": 640 + }, + { + "epoch": 1.4244444444444444, + "grad_norm": 0.0029742558317965057, + "learning_rate": 1.1375620852505878e-05, + "loss": 0.0001, + "step": 641 + }, + { + "epoch": 1.4266666666666667, + "grad_norm": 0.018680318608905518, + "learning_rate": 1.135184513363862e-05, + "loss": 0.0003, + "step": 642 + }, + { + "epoch": 1.4288888888888889, + "grad_norm": 0.006053504031972038, + "learning_rate": 1.1328061628183546e-05, + "loss": 0.0002, + "step": 643 + }, + { + "epoch": 1.431111111111111, + "grad_norm": 0.0073340025905183565, + "learning_rate": 1.130427047313294e-05, + "loss": 0.0002, + "step": 644 + }, + { + "epoch": 1.4333333333333333, + "grad_norm": 0.0037014816762071428, + "learning_rate": 1.1280471805523153e-05, + "loss": 0.0001, + "step": 645 + }, + { + "epoch": 1.4355555555555555, + "grad_norm": 0.004685604342327909, + "learning_rate": 1.1256665762433798e-05, + "loss": 0.0001, + "step": 646 + }, + { + "epoch": 1.4377777777777778, + "grad_norm": 0.10314596200459762, + "learning_rate": 1.123285248098698e-05, + "loss": 0.0011, + "step": 647 + }, + { + "epoch": 1.44, + "grad_norm": 3.7415363617504487, + "learning_rate": 1.1209032098346493e-05, + "loss": 0.1342, + "step": 648 + }, + { + "epoch": 1.4422222222222223, + "grad_norm": 0.0033335508221702904, + "learning_rate": 1.118520475171703e-05, + "loss": 0.0001, + "step": 649 + }, + { + "epoch": 1.4444444444444444, + "grad_norm": 0.0038837631587483098, + "learning_rate": 1.1161370578343398e-05, + "loss": 0.0001, + "step": 650 + }, + { + "epoch": 1.4466666666666668, + "grad_norm": 0.017613850604919906, + "learning_rate": 1.1137529715509736e-05, + "loss": 0.0003, + "step": 651 + }, + { + "epoch": 1.448888888888889, + "grad_norm": 0.0019730528535085124, + "learning_rate": 1.1113682300538702e-05, + "loss": 0.0001, + "step": 652 + }, + { + "epoch": 1.451111111111111, + "grad_norm": 0.1007497324364346, + "learning_rate": 1.1089828470790694e-05, + "loss": 0.0012, + "step": 653 + }, + { + "epoch": 1.4533333333333334, + "grad_norm": 1.3565848898645916, + "learning_rate": 1.1065968363663069e-05, + "loss": 0.0165, + "step": 654 + }, + { + "epoch": 1.4555555555555555, + "grad_norm": 0.10702620133801824, + "learning_rate": 1.1042102116589331e-05, + "loss": 0.0016, + "step": 655 + }, + { + "epoch": 1.4577777777777778, + "grad_norm": 0.6428937384338443, + "learning_rate": 1.1018229867038358e-05, + "loss": 0.0053, + "step": 656 + }, + { + "epoch": 1.46, + "grad_norm": 0.007033494299504707, + "learning_rate": 1.0994351752513593e-05, + "loss": 0.0002, + "step": 657 + }, + { + "epoch": 1.462222222222222, + "grad_norm": 0.07568813353340668, + "learning_rate": 1.0970467910552267e-05, + "loss": 0.0011, + "step": 658 + }, + { + "epoch": 1.4644444444444444, + "grad_norm": 0.006216197672565887, + "learning_rate": 1.0946578478724603e-05, + "loss": 0.0001, + "step": 659 + }, + { + "epoch": 1.4666666666666668, + "grad_norm": 0.006290084916689003, + "learning_rate": 1.092268359463302e-05, + "loss": 0.0002, + "step": 660 + }, + { + "epoch": 1.468888888888889, + "grad_norm": 0.008720497349279878, + "learning_rate": 1.0898783395911341e-05, + "loss": 0.0002, + "step": 661 + }, + { + "epoch": 1.471111111111111, + "grad_norm": 0.0057602106228413635, + "learning_rate": 1.0874878020223994e-05, + "loss": 0.0002, + "step": 662 + }, + { + "epoch": 1.4733333333333334, + "grad_norm": 0.2740229303868954, + "learning_rate": 1.085096760526524e-05, + "loss": 0.004, + "step": 663 + }, + { + "epoch": 1.4755555555555555, + "grad_norm": 0.7778502397800938, + "learning_rate": 1.0827052288758357e-05, + "loss": 0.0124, + "step": 664 + }, + { + "epoch": 1.4777777777777779, + "grad_norm": 0.005870117614021594, + "learning_rate": 1.0803132208454858e-05, + "loss": 0.0002, + "step": 665 + }, + { + "epoch": 1.48, + "grad_norm": 0.008306183150058374, + "learning_rate": 1.077920750213369e-05, + "loss": 0.0002, + "step": 666 + }, + { + "epoch": 1.482222222222222, + "grad_norm": 0.16653585759429312, + "learning_rate": 1.0755278307600459e-05, + "loss": 0.001, + "step": 667 + }, + { + "epoch": 1.4844444444444445, + "grad_norm": 4.770868893131752, + "learning_rate": 1.0731344762686606e-05, + "loss": 0.05, + "step": 668 + }, + { + "epoch": 1.4866666666666668, + "grad_norm": 0.012031490283950867, + "learning_rate": 1.0707407005248647e-05, + "loss": 0.0002, + "step": 669 + }, + { + "epoch": 1.488888888888889, + "grad_norm": 0.6110919104937079, + "learning_rate": 1.068346517316735e-05, + "loss": 0.0116, + "step": 670 + }, + { + "epoch": 1.491111111111111, + "grad_norm": 1.226161614585195, + "learning_rate": 1.0659519404346955e-05, + "loss": 0.0132, + "step": 671 + }, + { + "epoch": 1.4933333333333334, + "grad_norm": 0.14901656263992644, + "learning_rate": 1.0635569836714384e-05, + "loss": 0.003, + "step": 672 + }, + { + "epoch": 1.4955555555555555, + "grad_norm": 3.655641469627834, + "learning_rate": 1.0611616608218429e-05, + "loss": 0.2046, + "step": 673 + }, + { + "epoch": 1.4977777777777779, + "grad_norm": 9.020498449790125, + "learning_rate": 1.058765985682898e-05, + "loss": 0.0744, + "step": 674 + }, + { + "epoch": 1.5, + "grad_norm": 0.11112151149192631, + "learning_rate": 1.0563699720536209e-05, + "loss": 0.0006, + "step": 675 + }, + { + "epoch": 1.5022222222222221, + "grad_norm": 0.027242358729667567, + "learning_rate": 1.0539736337349792e-05, + "loss": 0.0005, + "step": 676 + }, + { + "epoch": 1.5044444444444445, + "grad_norm": 0.01494205339092187, + "learning_rate": 1.0515769845298106e-05, + "loss": 0.0003, + "step": 677 + }, + { + "epoch": 1.5066666666666668, + "grad_norm": 0.1409762748004615, + "learning_rate": 1.0491800382427429e-05, + "loss": 0.0012, + "step": 678 + }, + { + "epoch": 1.508888888888889, + "grad_norm": 0.11538707659739494, + "learning_rate": 1.0467828086801158e-05, + "loss": 0.001, + "step": 679 + }, + { + "epoch": 1.511111111111111, + "grad_norm": 0.04070360794967824, + "learning_rate": 1.0443853096499e-05, + "loss": 0.0007, + "step": 680 + }, + { + "epoch": 1.5133333333333332, + "grad_norm": 0.01871689843015799, + "learning_rate": 1.0419875549616196e-05, + "loss": 0.0004, + "step": 681 + }, + { + "epoch": 1.5155555555555555, + "grad_norm": 0.05123157746090478, + "learning_rate": 1.0395895584262696e-05, + "loss": 0.0009, + "step": 682 + }, + { + "epoch": 1.517777777777778, + "grad_norm": 0.21881223368268174, + "learning_rate": 1.0371913338562391e-05, + "loss": 0.0023, + "step": 683 + }, + { + "epoch": 1.52, + "grad_norm": 0.15898779905859248, + "learning_rate": 1.03479289506523e-05, + "loss": 0.0018, + "step": 684 + }, + { + "epoch": 1.5222222222222221, + "grad_norm": 0.08650153259070616, + "learning_rate": 1.032394255868179e-05, + "loss": 0.0014, + "step": 685 + }, + { + "epoch": 1.5244444444444445, + "grad_norm": 0.2926167131176993, + "learning_rate": 1.0299954300811763e-05, + "loss": 0.0031, + "step": 686 + }, + { + "epoch": 1.5266666666666666, + "grad_norm": 2.0363754437897104, + "learning_rate": 1.0275964315213873e-05, + "loss": 0.1389, + "step": 687 + }, + { + "epoch": 1.528888888888889, + "grad_norm": 0.06648367684121505, + "learning_rate": 1.0251972740069724e-05, + "loss": 0.0013, + "step": 688 + }, + { + "epoch": 1.531111111111111, + "grad_norm": 0.14397126691639991, + "learning_rate": 1.022797971357008e-05, + "loss": 0.002, + "step": 689 + }, + { + "epoch": 1.5333333333333332, + "grad_norm": 0.39653657931505837, + "learning_rate": 1.0203985373914056e-05, + "loss": 0.0055, + "step": 690 + }, + { + "epoch": 1.5355555555555556, + "grad_norm": 0.09043983057933398, + "learning_rate": 1.0179989859308337e-05, + "loss": 0.0016, + "step": 691 + }, + { + "epoch": 1.537777777777778, + "grad_norm": 0.041824481747261125, + "learning_rate": 1.0155993307966372e-05, + "loss": 0.001, + "step": 692 + }, + { + "epoch": 1.54, + "grad_norm": 0.04487245582263562, + "learning_rate": 1.013199585810759e-05, + "loss": 0.0008, + "step": 693 + }, + { + "epoch": 1.5422222222222222, + "grad_norm": 0.0732635491238001, + "learning_rate": 1.0107997647956587e-05, + "loss": 0.0012, + "step": 694 + }, + { + "epoch": 1.5444444444444443, + "grad_norm": 0.41128277752017367, + "learning_rate": 1.0083998815742335e-05, + "loss": 0.0052, + "step": 695 + }, + { + "epoch": 1.5466666666666666, + "grad_norm": 0.4611056403705125, + "learning_rate": 1.0059999499697403e-05, + "loss": 0.0041, + "step": 696 + }, + { + "epoch": 1.548888888888889, + "grad_norm": 0.07192286262550385, + "learning_rate": 1.0035999838057133e-05, + "loss": 0.0013, + "step": 697 + }, + { + "epoch": 1.551111111111111, + "grad_norm": 1.0267918998156909, + "learning_rate": 1.0011999969058867e-05, + "loss": 0.017, + "step": 698 + }, + { + "epoch": 1.5533333333333332, + "grad_norm": 3.1782228389299507, + "learning_rate": 9.988000030941134e-06, + "loss": 0.0413, + "step": 699 + }, + { + "epoch": 1.5555555555555556, + "grad_norm": 0.04719594561332705, + "learning_rate": 9.964000161942867e-06, + "loss": 0.0008, + "step": 700 + }, + { + "epoch": 1.557777777777778, + "grad_norm": 0.32506687857018934, + "learning_rate": 9.940000500302599e-06, + "loss": 0.0064, + "step": 701 + }, + { + "epoch": 1.56, + "grad_norm": 0.07253602489629701, + "learning_rate": 9.916001184257668e-06, + "loss": 0.0011, + "step": 702 + }, + { + "epoch": 1.5622222222222222, + "grad_norm": 4.151421126179543, + "learning_rate": 9.892002352043417e-06, + "loss": 0.0832, + "step": 703 + }, + { + "epoch": 1.5644444444444443, + "grad_norm": 0.049528938860719386, + "learning_rate": 9.868004141892412e-06, + "loss": 0.0009, + "step": 704 + }, + { + "epoch": 1.5666666666666667, + "grad_norm": 0.14865109809262217, + "learning_rate": 9.84400669203363e-06, + "loss": 0.0014, + "step": 705 + }, + { + "epoch": 1.568888888888889, + "grad_norm": 0.06033572078309496, + "learning_rate": 9.820010140691668e-06, + "loss": 0.0012, + "step": 706 + }, + { + "epoch": 1.5711111111111111, + "grad_norm": 0.6535764809555968, + "learning_rate": 9.79601462608595e-06, + "loss": 0.0132, + "step": 707 + }, + { + "epoch": 1.5733333333333333, + "grad_norm": 0.030929282054774264, + "learning_rate": 9.772020286429922e-06, + "loss": 0.0007, + "step": 708 + }, + { + "epoch": 1.5755555555555556, + "grad_norm": 0.17462870025876087, + "learning_rate": 9.748027259930276e-06, + "loss": 0.0016, + "step": 709 + }, + { + "epoch": 1.5777777777777777, + "grad_norm": 1.4888788322698738, + "learning_rate": 9.72403568478613e-06, + "loss": 0.1493, + "step": 710 + }, + { + "epoch": 1.58, + "grad_norm": 1.7833018552897537, + "learning_rate": 9.70004569918824e-06, + "loss": 0.0876, + "step": 711 + }, + { + "epoch": 1.5822222222222222, + "grad_norm": 0.016903977543855465, + "learning_rate": 9.676057441318212e-06, + "loss": 0.0004, + "step": 712 + }, + { + "epoch": 1.5844444444444443, + "grad_norm": 1.0540602509090238, + "learning_rate": 9.652071049347703e-06, + "loss": 0.0208, + "step": 713 + }, + { + "epoch": 1.5866666666666667, + "grad_norm": 0.04369111302987414, + "learning_rate": 9.628086661437615e-06, + "loss": 0.0008, + "step": 714 + }, + { + "epoch": 1.588888888888889, + "grad_norm": 0.15205831960110472, + "learning_rate": 9.604104415737309e-06, + "loss": 0.0035, + "step": 715 + }, + { + "epoch": 1.5911111111111111, + "grad_norm": 0.3511971112139258, + "learning_rate": 9.580124450383804e-06, + "loss": 0.0089, + "step": 716 + }, + { + "epoch": 1.5933333333333333, + "grad_norm": 0.17788014004888028, + "learning_rate": 9.556146903500997e-06, + "loss": 0.0026, + "step": 717 + }, + { + "epoch": 1.5955555555555554, + "grad_norm": 0.011752152963998678, + "learning_rate": 9.532171913198844e-06, + "loss": 0.0003, + "step": 718 + }, + { + "epoch": 1.5977777777777777, + "grad_norm": 0.021113250528307433, + "learning_rate": 9.508199617572574e-06, + "loss": 0.0006, + "step": 719 + }, + { + "epoch": 1.6, + "grad_norm": 0.022412804470793496, + "learning_rate": 9.4842301547019e-06, + "loss": 0.0004, + "step": 720 + }, + { + "epoch": 1.6022222222222222, + "grad_norm": 0.05292966257545496, + "learning_rate": 9.460263662650209e-06, + "loss": 0.0012, + "step": 721 + }, + { + "epoch": 1.6044444444444443, + "grad_norm": 0.015788712677888526, + "learning_rate": 9.436300279463794e-06, + "loss": 0.0004, + "step": 722 + }, + { + "epoch": 1.6066666666666667, + "grad_norm": 0.03410745277923201, + "learning_rate": 9.412340143171025e-06, + "loss": 0.001, + "step": 723 + }, + { + "epoch": 1.608888888888889, + "grad_norm": 0.045514090409174625, + "learning_rate": 9.388383391781576e-06, + "loss": 0.0009, + "step": 724 + }, + { + "epoch": 1.6111111111111112, + "grad_norm": 0.32101783191976146, + "learning_rate": 9.364430163285618e-06, + "loss": 0.0041, + "step": 725 + }, + { + "epoch": 1.6133333333333333, + "grad_norm": 0.029572487281167136, + "learning_rate": 9.340480595653047e-06, + "loss": 0.0007, + "step": 726 + }, + { + "epoch": 1.6155555555555554, + "grad_norm": 0.025433597527066996, + "learning_rate": 9.316534826832652e-06, + "loss": 0.0006, + "step": 727 + }, + { + "epoch": 1.6177777777777778, + "grad_norm": 0.09311745059791464, + "learning_rate": 9.292592994751356e-06, + "loss": 0.0013, + "step": 728 + }, + { + "epoch": 1.62, + "grad_norm": 0.012074255664018456, + "learning_rate": 9.268655237313397e-06, + "loss": 0.0003, + "step": 729 + }, + { + "epoch": 1.6222222222222222, + "grad_norm": 0.012997333181243298, + "learning_rate": 9.244721692399545e-06, + "loss": 0.0003, + "step": 730 + }, + { + "epoch": 1.6244444444444444, + "grad_norm": 0.01818383924441848, + "learning_rate": 9.220792497866313e-06, + "loss": 0.0003, + "step": 731 + }, + { + "epoch": 1.6266666666666667, + "grad_norm": 0.18780373065583086, + "learning_rate": 9.196867791545148e-06, + "loss": 0.0041, + "step": 732 + }, + { + "epoch": 1.628888888888889, + "grad_norm": 0.1560785907528772, + "learning_rate": 9.172947711241648e-06, + "loss": 0.0044, + "step": 733 + }, + { + "epoch": 1.6311111111111112, + "grad_norm": 0.012696502776696289, + "learning_rate": 9.14903239473476e-06, + "loss": 0.0003, + "step": 734 + }, + { + "epoch": 1.6333333333333333, + "grad_norm": 0.037853671190872275, + "learning_rate": 9.125121979776006e-06, + "loss": 0.0008, + "step": 735 + }, + { + "epoch": 1.6355555555555554, + "grad_norm": 0.0979786997705931, + "learning_rate": 9.101216604088662e-06, + "loss": 0.0011, + "step": 736 + }, + { + "epoch": 1.6377777777777778, + "grad_norm": 0.0623613704253322, + "learning_rate": 9.07731640536698e-06, + "loss": 0.0013, + "step": 737 + }, + { + "epoch": 1.6400000000000001, + "grad_norm": 0.14435597908685496, + "learning_rate": 9.0534215212754e-06, + "loss": 0.0036, + "step": 738 + }, + { + "epoch": 1.6422222222222222, + "grad_norm": 0.02978797023965623, + "learning_rate": 9.029532089447736e-06, + "loss": 0.0007, + "step": 739 + }, + { + "epoch": 1.6444444444444444, + "grad_norm": 0.014868962313862664, + "learning_rate": 9.005648247486412e-06, + "loss": 0.0003, + "step": 740 + }, + { + "epoch": 1.6466666666666665, + "grad_norm": 0.014059777886604851, + "learning_rate": 8.981770132961649e-06, + "loss": 0.0004, + "step": 741 + }, + { + "epoch": 1.6488888888888888, + "grad_norm": 0.015956510299049948, + "learning_rate": 8.957897883410669e-06, + "loss": 0.0003, + "step": 742 + }, + { + "epoch": 1.6511111111111112, + "grad_norm": 0.5905592698287212, + "learning_rate": 8.934031636336931e-06, + "loss": 0.0121, + "step": 743 + }, + { + "epoch": 1.6533333333333333, + "grad_norm": 0.02227491259130784, + "learning_rate": 8.910171529209306e-06, + "loss": 0.0004, + "step": 744 + }, + { + "epoch": 1.6555555555555554, + "grad_norm": 1.7506973235796142, + "learning_rate": 8.886317699461302e-06, + "loss": 0.031, + "step": 745 + }, + { + "epoch": 1.6577777777777778, + "grad_norm": 0.010901747490044121, + "learning_rate": 8.862470284490266e-06, + "loss": 0.0003, + "step": 746 + }, + { + "epoch": 1.6600000000000001, + "grad_norm": 1.5438709665002142, + "learning_rate": 8.838629421656604e-06, + "loss": 0.1237, + "step": 747 + }, + { + "epoch": 1.6622222222222223, + "grad_norm": 0.048152832873583136, + "learning_rate": 8.814795248282974e-06, + "loss": 0.0007, + "step": 748 + }, + { + "epoch": 1.6644444444444444, + "grad_norm": 0.00831518529886393, + "learning_rate": 8.790967901653512e-06, + "loss": 0.0002, + "step": 749 + }, + { + "epoch": 1.6666666666666665, + "grad_norm": 0.03747955518762971, + "learning_rate": 8.767147519013024e-06, + "loss": 0.0006, + "step": 750 + }, + { + "epoch": 1.6688888888888889, + "grad_norm": 0.07657351651376891, + "learning_rate": 8.743334237566202e-06, + "loss": 0.0013, + "step": 751 + }, + { + "epoch": 1.6711111111111112, + "grad_norm": 0.009249458606825666, + "learning_rate": 8.719528194476849e-06, + "loss": 0.0002, + "step": 752 + }, + { + "epoch": 1.6733333333333333, + "grad_norm": 0.009284298802179751, + "learning_rate": 8.695729526867061e-06, + "loss": 0.0003, + "step": 753 + }, + { + "epoch": 1.6755555555555555, + "grad_norm": 0.01892472297941197, + "learning_rate": 8.671938371816457e-06, + "loss": 0.0004, + "step": 754 + }, + { + "epoch": 1.6777777777777778, + "grad_norm": 0.014409642565937627, + "learning_rate": 8.648154866361384e-06, + "loss": 0.0003, + "step": 755 + }, + { + "epoch": 1.6800000000000002, + "grad_norm": 0.008171506393222436, + "learning_rate": 8.624379147494126e-06, + "loss": 0.0002, + "step": 756 + }, + { + "epoch": 1.6822222222222223, + "grad_norm": 0.018463449448706894, + "learning_rate": 8.600611352162115e-06, + "loss": 0.0004, + "step": 757 + }, + { + "epoch": 1.6844444444444444, + "grad_norm": 0.04419329195025522, + "learning_rate": 8.576851617267151e-06, + "loss": 0.0006, + "step": 758 + }, + { + "epoch": 1.6866666666666665, + "grad_norm": 0.02163499768068382, + "learning_rate": 8.553100079664598e-06, + "loss": 0.0004, + "step": 759 + }, + { + "epoch": 1.6888888888888889, + "grad_norm": 0.013187526457943157, + "learning_rate": 8.529356876162606e-06, + "loss": 0.0003, + "step": 760 + }, + { + "epoch": 1.6911111111111112, + "grad_norm": 0.43042377213432265, + "learning_rate": 8.505622143521327e-06, + "loss": 0.0083, + "step": 761 + }, + { + "epoch": 1.6933333333333334, + "grad_norm": 1.9277373954839263, + "learning_rate": 8.481896018452115e-06, + "loss": 0.0684, + "step": 762 + }, + { + "epoch": 1.6955555555555555, + "grad_norm": 0.0041233811816627506, + "learning_rate": 8.458178637616743e-06, + "loss": 0.0001, + "step": 763 + }, + { + "epoch": 1.6977777777777778, + "grad_norm": 0.07677591330405917, + "learning_rate": 8.43447013762662e-06, + "loss": 0.0016, + "step": 764 + }, + { + "epoch": 1.7, + "grad_norm": 0.023700237021973214, + "learning_rate": 8.410770655042003e-06, + "loss": 0.0005, + "step": 765 + }, + { + "epoch": 1.7022222222222223, + "grad_norm": 0.5539985642982581, + "learning_rate": 8.387080326371207e-06, + "loss": 0.0084, + "step": 766 + }, + { + "epoch": 1.7044444444444444, + "grad_norm": 0.011038549981672125, + "learning_rate": 8.363399288069821e-06, + "loss": 0.0003, + "step": 767 + }, + { + "epoch": 1.7066666666666666, + "grad_norm": 0.006588286548204523, + "learning_rate": 8.33972767653992e-06, + "loss": 0.0002, + "step": 768 + }, + { + "epoch": 1.708888888888889, + "grad_norm": 0.008394306829172917, + "learning_rate": 8.31606562812929e-06, + "loss": 0.0002, + "step": 769 + }, + { + "epoch": 1.7111111111111112, + "grad_norm": 0.01074870384603595, + "learning_rate": 8.292413279130625e-06, + "loss": 0.0002, + "step": 770 + }, + { + "epoch": 1.7133333333333334, + "grad_norm": 2.1420364989007967, + "learning_rate": 8.26877076578075e-06, + "loss": 0.0871, + "step": 771 + }, + { + "epoch": 1.7155555555555555, + "grad_norm": 0.004847317779493671, + "learning_rate": 8.24513822425984e-06, + "loss": 0.0002, + "step": 772 + }, + { + "epoch": 1.7177777777777776, + "grad_norm": 0.006971673915997941, + "learning_rate": 8.221515790690633e-06, + "loss": 0.0002, + "step": 773 + }, + { + "epoch": 1.72, + "grad_norm": 0.029595353140890852, + "learning_rate": 8.197903601137644e-06, + "loss": 0.0006, + "step": 774 + }, + { + "epoch": 1.7222222222222223, + "grad_norm": 0.03537726103937756, + "learning_rate": 8.174301791606384e-06, + "loss": 0.0007, + "step": 775 + }, + { + "epoch": 1.7244444444444444, + "grad_norm": 0.773498612025268, + "learning_rate": 8.150710498042576e-06, + "loss": 0.0142, + "step": 776 + }, + { + "epoch": 1.7266666666666666, + "grad_norm": 0.005870350562298961, + "learning_rate": 8.127129856331365e-06, + "loss": 0.0002, + "step": 777 + }, + { + "epoch": 1.728888888888889, + "grad_norm": 0.15089341512995286, + "learning_rate": 8.103560002296554e-06, + "loss": 0.0025, + "step": 778 + }, + { + "epoch": 1.7311111111111113, + "grad_norm": 1.8656046741329817, + "learning_rate": 8.0800010716998e-06, + "loss": 0.028, + "step": 779 + }, + { + "epoch": 1.7333333333333334, + "grad_norm": 0.08038490323196622, + "learning_rate": 8.056453200239842e-06, + "loss": 0.0011, + "step": 780 + }, + { + "epoch": 1.7355555555555555, + "grad_norm": 0.0495912268082433, + "learning_rate": 8.03291652355172e-06, + "loss": 0.0011, + "step": 781 + }, + { + "epoch": 1.7377777777777776, + "grad_norm": 0.06651075580250962, + "learning_rate": 8.009391177205995e-06, + "loss": 0.0009, + "step": 782 + }, + { + "epoch": 1.74, + "grad_norm": 1.573685389824864, + "learning_rate": 7.985877296707958e-06, + "loss": 0.0061, + "step": 783 + }, + { + "epoch": 1.7422222222222223, + "grad_norm": 0.006287502144298676, + "learning_rate": 7.962375017496867e-06, + "loss": 0.0002, + "step": 784 + }, + { + "epoch": 1.7444444444444445, + "grad_norm": 0.1479423457355862, + "learning_rate": 7.93888447494515e-06, + "loss": 0.0023, + "step": 785 + }, + { + "epoch": 1.7466666666666666, + "grad_norm": 0.014097473300959758, + "learning_rate": 7.915405804357632e-06, + "loss": 0.0003, + "step": 786 + }, + { + "epoch": 1.748888888888889, + "grad_norm": 0.00371459171510088, + "learning_rate": 7.891939140970767e-06, + "loss": 0.0001, + "step": 787 + }, + { + "epoch": 1.751111111111111, + "grad_norm": 0.0546553947101097, + "learning_rate": 7.868484619951832e-06, + "loss": 0.0009, + "step": 788 + }, + { + "epoch": 1.7533333333333334, + "grad_norm": 0.04784189798421328, + "learning_rate": 7.845042376398174e-06, + "loss": 0.0007, + "step": 789 + }, + { + "epoch": 1.7555555555555555, + "grad_norm": 0.008815695752755794, + "learning_rate": 7.821612545336416e-06, + "loss": 0.0002, + "step": 790 + }, + { + "epoch": 1.7577777777777777, + "grad_norm": 0.04234116904138505, + "learning_rate": 7.798195261721692e-06, + "loss": 0.0006, + "step": 791 + }, + { + "epoch": 1.76, + "grad_norm": 0.009489613696535294, + "learning_rate": 7.774790660436857e-06, + "loss": 0.0003, + "step": 792 + }, + { + "epoch": 1.7622222222222224, + "grad_norm": 2.344699241718399, + "learning_rate": 7.751398876291725e-06, + "loss": 0.0512, + "step": 793 + }, + { + "epoch": 1.7644444444444445, + "grad_norm": 0.006355089570008847, + "learning_rate": 7.72802004402227e-06, + "loss": 0.0002, + "step": 794 + }, + { + "epoch": 1.7666666666666666, + "grad_norm": 0.008753264449460613, + "learning_rate": 7.704654298289878e-06, + "loss": 0.0002, + "step": 795 + }, + { + "epoch": 1.7688888888888887, + "grad_norm": 2.3630538485092822, + "learning_rate": 7.681301773680548e-06, + "loss": 0.0237, + "step": 796 + }, + { + "epoch": 1.771111111111111, + "grad_norm": 0.22494219045677474, + "learning_rate": 7.65796260470413e-06, + "loss": 0.0058, + "step": 797 + }, + { + "epoch": 1.7733333333333334, + "grad_norm": 0.009927611249313266, + "learning_rate": 7.634636925793542e-06, + "loss": 0.0002, + "step": 798 + }, + { + "epoch": 1.7755555555555556, + "grad_norm": 0.03964292507611221, + "learning_rate": 7.611324871304002e-06, + "loss": 0.0009, + "step": 799 + }, + { + "epoch": 1.7777777777777777, + "grad_norm": 0.13074589279128435, + "learning_rate": 7.58802657551225e-06, + "loss": 0.0021, + "step": 800 + }, + { + "epoch": 1.78, + "grad_norm": 0.10692847688238526, + "learning_rate": 7.56474217261578e-06, + "loss": 0.0014, + "step": 801 + }, + { + "epoch": 1.7822222222222224, + "grad_norm": 1.3499196630039079, + "learning_rate": 7.54147179673206e-06, + "loss": 0.0292, + "step": 802 + }, + { + "epoch": 1.7844444444444445, + "grad_norm": 0.011232578121657228, + "learning_rate": 7.518215581897763e-06, + "loss": 0.0003, + "step": 803 + }, + { + "epoch": 1.7866666666666666, + "grad_norm": 0.08032609607048344, + "learning_rate": 7.494973662067996e-06, + "loss": 0.0015, + "step": 804 + }, + { + "epoch": 1.7888888888888888, + "grad_norm": 0.09875077945100784, + "learning_rate": 7.471746171115529e-06, + "loss": 0.0014, + "step": 805 + }, + { + "epoch": 1.791111111111111, + "grad_norm": 0.03154710195267777, + "learning_rate": 7.44853324283002e-06, + "loss": 0.0004, + "step": 806 + }, + { + "epoch": 1.7933333333333334, + "grad_norm": 0.03151288396588709, + "learning_rate": 7.425335010917244e-06, + "loss": 0.0002, + "step": 807 + }, + { + "epoch": 1.7955555555555556, + "grad_norm": 0.15327886584358602, + "learning_rate": 7.402151608998329e-06, + "loss": 0.0021, + "step": 808 + }, + { + "epoch": 1.7977777777777777, + "grad_norm": 0.2853885208960501, + "learning_rate": 7.378983170608982e-06, + "loss": 0.0047, + "step": 809 + }, + { + "epoch": 1.8, + "grad_norm": 0.08952741046469444, + "learning_rate": 7.355829829198715e-06, + "loss": 0.0015, + "step": 810 + }, + { + "epoch": 1.8022222222222222, + "grad_norm": 0.010189367077971101, + "learning_rate": 7.332691718130094e-06, + "loss": 0.0002, + "step": 811 + }, + { + "epoch": 1.8044444444444445, + "grad_norm": 1.9960241486188304, + "learning_rate": 7.3095689706779476e-06, + "loss": 0.0706, + "step": 812 + }, + { + "epoch": 1.8066666666666666, + "grad_norm": 0.450682853998699, + "learning_rate": 7.2864617200286124e-06, + "loss": 0.0088, + "step": 813 + }, + { + "epoch": 1.8088888888888888, + "grad_norm": 0.006640031606227429, + "learning_rate": 7.263370099279173e-06, + "loss": 0.0002, + "step": 814 + }, + { + "epoch": 1.8111111111111111, + "grad_norm": 0.004702757525352642, + "learning_rate": 7.2402942414366714e-06, + "loss": 0.0001, + "step": 815 + }, + { + "epoch": 1.8133333333333335, + "grad_norm": 1.6581967366616446, + "learning_rate": 7.217234279417369e-06, + "loss": 0.0883, + "step": 816 + }, + { + "epoch": 1.8155555555555556, + "grad_norm": 1.1252685844267665, + "learning_rate": 7.1941903460459575e-06, + "loss": 0.0241, + "step": 817 + }, + { + "epoch": 1.8177777777777777, + "grad_norm": 0.027757676523885715, + "learning_rate": 7.1711625740548115e-06, + "loss": 0.0004, + "step": 818 + }, + { + "epoch": 1.8199999999999998, + "grad_norm": 0.0028384645513604475, + "learning_rate": 7.148151096083211e-06, + "loss": 0.0001, + "step": 819 + }, + { + "epoch": 1.8222222222222222, + "grad_norm": 0.009575553089773836, + "learning_rate": 7.125156044676586e-06, + "loss": 0.0002, + "step": 820 + }, + { + "epoch": 1.8244444444444445, + "grad_norm": 0.008316787322784888, + "learning_rate": 7.102177552285753e-06, + "loss": 0.0003, + "step": 821 + }, + { + "epoch": 1.8266666666666667, + "grad_norm": 0.07540660769082985, + "learning_rate": 7.0792157512661445e-06, + "loss": 0.0011, + "step": 822 + }, + { + "epoch": 1.8288888888888888, + "grad_norm": 1.1424548709778362, + "learning_rate": 7.056270773877051e-06, + "loss": 0.0157, + "step": 823 + }, + { + "epoch": 1.8311111111111111, + "grad_norm": 0.003990868612157059, + "learning_rate": 7.033342752280861e-06, + "loss": 0.0001, + "step": 824 + }, + { + "epoch": 1.8333333333333335, + "grad_norm": 0.006575882570125605, + "learning_rate": 7.010431818542298e-06, + "loss": 0.0002, + "step": 825 + }, + { + "epoch": 1.8355555555555556, + "grad_norm": 0.21046307731754385, + "learning_rate": 6.9875381046276605e-06, + "loss": 0.0043, + "step": 826 + }, + { + "epoch": 1.8377777777777777, + "grad_norm": 0.6091612970961959, + "learning_rate": 6.964661742404058e-06, + "loss": 0.0069, + "step": 827 + }, + { + "epoch": 1.8399999999999999, + "grad_norm": 0.0075213525276237375, + "learning_rate": 6.9418028636386595e-06, + "loss": 0.0002, + "step": 828 + }, + { + "epoch": 1.8422222222222222, + "grad_norm": 0.0938972485961705, + "learning_rate": 6.918961599997926e-06, + "loss": 0.0008, + "step": 829 + }, + { + "epoch": 1.8444444444444446, + "grad_norm": 0.06163385239519462, + "learning_rate": 6.89613808304686e-06, + "loss": 0.0006, + "step": 830 + }, + { + "epoch": 1.8466666666666667, + "grad_norm": 0.0760322640469355, + "learning_rate": 6.873332444248241e-06, + "loss": 0.0012, + "step": 831 + }, + { + "epoch": 1.8488888888888888, + "grad_norm": 0.0068684737447744565, + "learning_rate": 6.85054481496187e-06, + "loss": 0.0002, + "step": 832 + }, + { + "epoch": 1.8511111111111112, + "grad_norm": 0.029850294502271224, + "learning_rate": 6.827775326443817e-06, + "loss": 0.0007, + "step": 833 + }, + { + "epoch": 1.8533333333333335, + "grad_norm": 0.005421834903518907, + "learning_rate": 6.805024109845657e-06, + "loss": 0.0001, + "step": 834 + }, + { + "epoch": 1.8555555555555556, + "grad_norm": 0.003201923710789719, + "learning_rate": 6.7822912962137225e-06, + "loss": 0.0001, + "step": 835 + }, + { + "epoch": 1.8577777777777778, + "grad_norm": 0.013857079342110726, + "learning_rate": 6.759577016488343e-06, + "loss": 0.0003, + "step": 836 + }, + { + "epoch": 1.8599999999999999, + "grad_norm": 0.0033745512676321607, + "learning_rate": 6.736881401503097e-06, + "loss": 0.0001, + "step": 837 + }, + { + "epoch": 1.8622222222222222, + "grad_norm": 0.006987265736735446, + "learning_rate": 6.714204581984052e-06, + "loss": 0.0002, + "step": 838 + }, + { + "epoch": 1.8644444444444446, + "grad_norm": 0.7068230946379546, + "learning_rate": 6.691546688549016e-06, + "loss": 0.0155, + "step": 839 + }, + { + "epoch": 1.8666666666666667, + "grad_norm": 0.034064935487275724, + "learning_rate": 6.668907851706782e-06, + "loss": 0.0006, + "step": 840 + }, + { + "epoch": 1.8688888888888888, + "grad_norm": 2.197594101226526, + "learning_rate": 6.646288201856377e-06, + "loss": 0.2022, + "step": 841 + }, + { + "epoch": 1.871111111111111, + "grad_norm": 0.01311843064796543, + "learning_rate": 6.623687869286314e-06, + "loss": 0.0002, + "step": 842 + }, + { + "epoch": 1.8733333333333333, + "grad_norm": 1.4935971951574831, + "learning_rate": 6.601106984173835e-06, + "loss": 0.0064, + "step": 843 + }, + { + "epoch": 1.8755555555555556, + "grad_norm": 0.006444001249753074, + "learning_rate": 6.578545676584168e-06, + "loss": 0.0002, + "step": 844 + }, + { + "epoch": 1.8777777777777778, + "grad_norm": 0.0032277141792131326, + "learning_rate": 6.556004076469773e-06, + "loss": 0.0001, + "step": 845 + }, + { + "epoch": 1.88, + "grad_norm": 0.07509966634884807, + "learning_rate": 6.533482313669599e-06, + "loss": 0.0004, + "step": 846 + }, + { + "epoch": 1.8822222222222222, + "grad_norm": 1.897051902421445, + "learning_rate": 6.510980517908334e-06, + "loss": 0.0849, + "step": 847 + }, + { + "epoch": 1.8844444444444446, + "grad_norm": 0.004739944818470111, + "learning_rate": 6.488498818795646e-06, + "loss": 0.0001, + "step": 848 + }, + { + "epoch": 1.8866666666666667, + "grad_norm": 0.005947578590265762, + "learning_rate": 6.466037345825462e-06, + "loss": 0.0002, + "step": 849 + }, + { + "epoch": 1.8888888888888888, + "grad_norm": 0.22474389565792294, + "learning_rate": 6.443596228375193e-06, + "loss": 0.0026, + "step": 850 + }, + { + "epoch": 1.891111111111111, + "grad_norm": 0.3105348091157302, + "learning_rate": 6.421175595705013e-06, + "loss": 0.0029, + "step": 851 + }, + { + "epoch": 1.8933333333333333, + "grad_norm": 0.005957073807861304, + "learning_rate": 6.398775576957097e-06, + "loss": 0.0002, + "step": 852 + }, + { + "epoch": 1.8955555555555557, + "grad_norm": 1.043107737415353, + "learning_rate": 6.37639630115489e-06, + "loss": 0.0095, + "step": 853 + }, + { + "epoch": 1.8977777777777778, + "grad_norm": 0.016831627729310676, + "learning_rate": 6.354037897202352e-06, + "loss": 0.0003, + "step": 854 + }, + { + "epoch": 1.9, + "grad_norm": 0.029983411383676318, + "learning_rate": 6.331700493883228e-06, + "loss": 0.0004, + "step": 855 + }, + { + "epoch": 1.9022222222222223, + "grad_norm": 0.014696354816547366, + "learning_rate": 6.3093842198603014e-06, + "loss": 0.0003, + "step": 856 + }, + { + "epoch": 1.9044444444444446, + "grad_norm": 0.0036568593102944617, + "learning_rate": 6.287089203674641e-06, + "loss": 0.0001, + "step": 857 + }, + { + "epoch": 1.9066666666666667, + "grad_norm": 0.026464868959462685, + "learning_rate": 6.264815573744884e-06, + "loss": 0.0004, + "step": 858 + }, + { + "epoch": 1.9088888888888889, + "grad_norm": 0.18948896791214753, + "learning_rate": 6.242563458366475e-06, + "loss": 0.0041, + "step": 859 + }, + { + "epoch": 1.911111111111111, + "grad_norm": 0.5317789241656258, + "learning_rate": 6.220332985710936e-06, + "loss": 0.0084, + "step": 860 + }, + { + "epoch": 1.9133333333333333, + "grad_norm": 0.0571255305959852, + "learning_rate": 6.198124283825131e-06, + "loss": 0.0006, + "step": 861 + }, + { + "epoch": 1.9155555555555557, + "grad_norm": 0.020068457211139715, + "learning_rate": 6.17593748063052e-06, + "loss": 0.0003, + "step": 862 + }, + { + "epoch": 1.9177777777777778, + "grad_norm": 0.02055381745418504, + "learning_rate": 6.153772703922434e-06, + "loss": 0.0003, + "step": 863 + }, + { + "epoch": 1.92, + "grad_norm": 0.24738801831593074, + "learning_rate": 6.131630081369325e-06, + "loss": 0.0034, + "step": 864 + }, + { + "epoch": 1.9222222222222223, + "grad_norm": 0.10567833393898092, + "learning_rate": 6.1095097405120465e-06, + "loss": 0.002, + "step": 865 + }, + { + "epoch": 1.9244444444444444, + "grad_norm": 0.006515408802317468, + "learning_rate": 6.0874118087631e-06, + "loss": 0.0002, + "step": 866 + }, + { + "epoch": 1.9266666666666667, + "grad_norm": 0.003846481791587862, + "learning_rate": 6.065336413405918e-06, + "loss": 0.0001, + "step": 867 + }, + { + "epoch": 1.9288888888888889, + "grad_norm": 0.0071745140869183405, + "learning_rate": 6.043283681594123e-06, + "loss": 0.0002, + "step": 868 + }, + { + "epoch": 1.931111111111111, + "grad_norm": 0.016877110525274252, + "learning_rate": 6.021253740350793e-06, + "loss": 0.0004, + "step": 869 + }, + { + "epoch": 1.9333333333333333, + "grad_norm": 0.00793701964441935, + "learning_rate": 5.999246716567737e-06, + "loss": 0.0002, + "step": 870 + }, + { + "epoch": 1.9355555555555557, + "grad_norm": 0.022684910738979516, + "learning_rate": 5.977262737004756e-06, + "loss": 0.0004, + "step": 871 + }, + { + "epoch": 1.9377777777777778, + "grad_norm": 0.7520255420641138, + "learning_rate": 5.955301928288919e-06, + "loss": 0.0066, + "step": 872 + }, + { + "epoch": 1.94, + "grad_norm": 0.005472995431458194, + "learning_rate": 5.933364416913836e-06, + "loss": 0.0001, + "step": 873 + }, + { + "epoch": 1.942222222222222, + "grad_norm": 0.003759195032362361, + "learning_rate": 5.911450329238918e-06, + "loss": 0.0001, + "step": 874 + }, + { + "epoch": 1.9444444444444444, + "grad_norm": 0.006937835779786125, + "learning_rate": 5.889559791488658e-06, + "loss": 0.0002, + "step": 875 + }, + { + "epoch": 1.9466666666666668, + "grad_norm": 0.16689891899522757, + "learning_rate": 5.867692929751907e-06, + "loss": 0.0027, + "step": 876 + }, + { + "epoch": 1.948888888888889, + "grad_norm": 0.01324040414006725, + "learning_rate": 5.845849869981137e-06, + "loss": 0.0002, + "step": 877 + }, + { + "epoch": 1.951111111111111, + "grad_norm": 5.4810563199711, + "learning_rate": 5.824030737991722e-06, + "loss": 0.0403, + "step": 878 + }, + { + "epoch": 1.9533333333333334, + "grad_norm": 0.0059542662841294, + "learning_rate": 5.802235659461216e-06, + "loss": 0.0002, + "step": 879 + }, + { + "epoch": 1.9555555555555557, + "grad_norm": 0.019701737203907513, + "learning_rate": 5.780464759928623e-06, + "loss": 0.0003, + "step": 880 + }, + { + "epoch": 1.9577777777777778, + "grad_norm": 0.0034604910600392006, + "learning_rate": 5.758718164793675e-06, + "loss": 0.0001, + "step": 881 + }, + { + "epoch": 1.96, + "grad_norm": 0.006040256218369666, + "learning_rate": 5.736995999316122e-06, + "loss": 0.0001, + "step": 882 + }, + { + "epoch": 1.962222222222222, + "grad_norm": 1.7417811869591662, + "learning_rate": 5.715298388614987e-06, + "loss": 0.0253, + "step": 883 + }, + { + "epoch": 1.9644444444444444, + "grad_norm": 0.0038077178453970713, + "learning_rate": 5.693625457667862e-06, + "loss": 0.0001, + "step": 884 + }, + { + "epoch": 1.9666666666666668, + "grad_norm": 0.003717870329096441, + "learning_rate": 5.671977331310187e-06, + "loss": 0.0001, + "step": 885 + }, + { + "epoch": 1.968888888888889, + "grad_norm": 0.1837575219710433, + "learning_rate": 5.650354134234526e-06, + "loss": 0.0022, + "step": 886 + }, + { + "epoch": 1.971111111111111, + "grad_norm": 0.02470298231876396, + "learning_rate": 5.628755990989854e-06, + "loss": 0.0006, + "step": 887 + }, + { + "epoch": 1.9733333333333334, + "grad_norm": 0.025672490451858657, + "learning_rate": 5.607183025980831e-06, + "loss": 0.0003, + "step": 888 + }, + { + "epoch": 1.9755555555555555, + "grad_norm": 0.009088990368624211, + "learning_rate": 5.585635363467097e-06, + "loss": 0.0003, + "step": 889 + }, + { + "epoch": 1.9777777777777779, + "grad_norm": 0.008033751501780564, + "learning_rate": 5.564113127562543e-06, + "loss": 0.0002, + "step": 890 + }, + { + "epoch": 1.98, + "grad_norm": 2.2114154899295775, + "learning_rate": 5.542616442234618e-06, + "loss": 0.0671, + "step": 891 + }, + { + "epoch": 1.982222222222222, + "grad_norm": 0.004169562218446099, + "learning_rate": 5.5211454313035865e-06, + "loss": 0.0001, + "step": 892 + }, + { + "epoch": 1.9844444444444445, + "grad_norm": 0.005541540674256128, + "learning_rate": 5.4997002184418325e-06, + "loss": 0.0002, + "step": 893 + }, + { + "epoch": 1.9866666666666668, + "grad_norm": 0.006534300633159017, + "learning_rate": 5.478280927173145e-06, + "loss": 0.0002, + "step": 894 + }, + { + "epoch": 1.988888888888889, + "grad_norm": 0.007472168802193255, + "learning_rate": 5.456887680872007e-06, + "loss": 0.0001, + "step": 895 + }, + { + "epoch": 1.991111111111111, + "grad_norm": 0.010645654958508707, + "learning_rate": 5.435520602762878e-06, + "loss": 0.0003, + "step": 896 + }, + { + "epoch": 1.9933333333333332, + "grad_norm": 3.0784256360079656, + "learning_rate": 5.4141798159195e-06, + "loss": 0.0687, + "step": 897 + }, + { + "epoch": 1.9955555555555555, + "grad_norm": 0.046706343079223556, + "learning_rate": 5.392865443264164e-06, + "loss": 0.0007, + "step": 898 + }, + { + "epoch": 1.9977777777777779, + "grad_norm": 0.0033155152581915356, + "learning_rate": 5.3715776075670286e-06, + "loss": 0.0001, + "step": 899 + }, + { + "epoch": 2.0, + "grad_norm": 0.006030150428065456, + "learning_rate": 5.350316431445397e-06, + "loss": 0.0001, + "step": 900 + }, + { + "epoch": 2.002222222222222, + "grad_norm": 0.003142431413964893, + "learning_rate": 5.329082037363007e-06, + "loss": 0.0001, + "step": 901 + }, + { + "epoch": 2.0044444444444443, + "grad_norm": 0.008142632539214184, + "learning_rate": 5.307874547629339e-06, + "loss": 0.0002, + "step": 902 + }, + { + "epoch": 2.006666666666667, + "grad_norm": 0.003148403153679196, + "learning_rate": 5.286694084398905e-06, + "loss": 0.0001, + "step": 903 + }, + { + "epoch": 2.008888888888889, + "grad_norm": 0.857473369987194, + "learning_rate": 5.2655407696705416e-06, + "loss": 0.015, + "step": 904 + }, + { + "epoch": 2.011111111111111, + "grad_norm": 0.00746507993579667, + "learning_rate": 5.244414725286717e-06, + "loss": 0.0002, + "step": 905 + }, + { + "epoch": 2.013333333333333, + "grad_norm": 0.02842818687179711, + "learning_rate": 5.223316072932817e-06, + "loss": 0.0006, + "step": 906 + }, + { + "epoch": 2.0155555555555558, + "grad_norm": 0.0031498195251105495, + "learning_rate": 5.202244934136449e-06, + "loss": 0.0001, + "step": 907 + }, + { + "epoch": 2.017777777777778, + "grad_norm": 0.19800775055032102, + "learning_rate": 5.1812014302667535e-06, + "loss": 0.0043, + "step": 908 + }, + { + "epoch": 2.02, + "grad_norm": 0.10335345303552698, + "learning_rate": 5.160185682533686e-06, + "loss": 0.0014, + "step": 909 + }, + { + "epoch": 2.022222222222222, + "grad_norm": 0.20409121345407502, + "learning_rate": 5.1391978119873275e-06, + "loss": 0.0045, + "step": 910 + }, + { + "epoch": 2.0244444444444443, + "grad_norm": 0.003325427833509806, + "learning_rate": 5.11823793951719e-06, + "loss": 0.0001, + "step": 911 + }, + { + "epoch": 2.026666666666667, + "grad_norm": 0.08885763424477246, + "learning_rate": 5.097306185851515e-06, + "loss": 0.0016, + "step": 912 + }, + { + "epoch": 2.028888888888889, + "grad_norm": 0.05186965909003598, + "learning_rate": 5.076402671556578e-06, + "loss": 0.0011, + "step": 913 + }, + { + "epoch": 2.031111111111111, + "grad_norm": 0.053189996848408035, + "learning_rate": 5.05552751703601e-06, + "loss": 0.0011, + "step": 914 + }, + { + "epoch": 2.033333333333333, + "grad_norm": 0.012419473898303434, + "learning_rate": 5.034680842530075e-06, + "loss": 0.0002, + "step": 915 + }, + { + "epoch": 2.0355555555555553, + "grad_norm": 0.25501752921171134, + "learning_rate": 5.0138627681149974e-06, + "loss": 0.0046, + "step": 916 + }, + { + "epoch": 2.037777777777778, + "grad_norm": 0.015816527745046393, + "learning_rate": 4.993073413702273e-06, + "loss": 0.0003, + "step": 917 + }, + { + "epoch": 2.04, + "grad_norm": 0.009825656146836809, + "learning_rate": 4.972312899037963e-06, + "loss": 0.0002, + "step": 918 + }, + { + "epoch": 2.042222222222222, + "grad_norm": 0.2998871408143298, + "learning_rate": 4.951581343702014e-06, + "loss": 0.0046, + "step": 919 + }, + { + "epoch": 2.0444444444444443, + "grad_norm": 0.008243559964961676, + "learning_rate": 4.930878867107572e-06, + "loss": 0.0002, + "step": 920 + }, + { + "epoch": 2.046666666666667, + "grad_norm": 0.04981753170873188, + "learning_rate": 4.9102055885002834e-06, + "loss": 0.0011, + "step": 921 + }, + { + "epoch": 2.048888888888889, + "grad_norm": 0.002803718988458032, + "learning_rate": 4.88956162695762e-06, + "loss": 0.0001, + "step": 922 + }, + { + "epoch": 2.051111111111111, + "grad_norm": 0.008191366079766756, + "learning_rate": 4.868947101388188e-06, + "loss": 0.0001, + "step": 923 + }, + { + "epoch": 2.0533333333333332, + "grad_norm": 0.0032732141533320484, + "learning_rate": 4.848362130531039e-06, + "loss": 0.0001, + "step": 924 + }, + { + "epoch": 2.0555555555555554, + "grad_norm": 0.004851029390065113, + "learning_rate": 4.827806832955e-06, + "loss": 0.0001, + "step": 925 + }, + { + "epoch": 2.057777777777778, + "grad_norm": 0.11595861379853764, + "learning_rate": 4.807281327057972e-06, + "loss": 0.0019, + "step": 926 + }, + { + "epoch": 2.06, + "grad_norm": 0.0031496951229834824, + "learning_rate": 4.786785731066258e-06, + "loss": 0.0001, + "step": 927 + }, + { + "epoch": 2.062222222222222, + "grad_norm": 0.06771468901899788, + "learning_rate": 4.766320163033882e-06, + "loss": 0.0006, + "step": 928 + }, + { + "epoch": 2.0644444444444443, + "grad_norm": 0.003164048174843136, + "learning_rate": 4.745884740841909e-06, + "loss": 0.0001, + "step": 929 + }, + { + "epoch": 2.066666666666667, + "grad_norm": 0.008822837748624192, + "learning_rate": 4.725479582197764e-06, + "loss": 0.0002, + "step": 930 + }, + { + "epoch": 2.068888888888889, + "grad_norm": 0.0034248483284673764, + "learning_rate": 4.705104804634549e-06, + "loss": 0.0001, + "step": 931 + }, + { + "epoch": 2.071111111111111, + "grad_norm": 0.09038563455747434, + "learning_rate": 4.684760525510388e-06, + "loss": 0.0019, + "step": 932 + }, + { + "epoch": 2.0733333333333333, + "grad_norm": 0.0023940206843144746, + "learning_rate": 4.664446862007718e-06, + "loss": 0.0001, + "step": 933 + }, + { + "epoch": 2.0755555555555554, + "grad_norm": 1.5993868376076514, + "learning_rate": 4.644163931132634e-06, + "loss": 0.0244, + "step": 934 + }, + { + "epoch": 2.077777777777778, + "grad_norm": 0.004975515617114705, + "learning_rate": 4.623911849714226e-06, + "loss": 0.0002, + "step": 935 + }, + { + "epoch": 2.08, + "grad_norm": 0.004440662060412932, + "learning_rate": 4.603690734403873e-06, + "loss": 0.0001, + "step": 936 + }, + { + "epoch": 2.082222222222222, + "grad_norm": 0.004746309226667228, + "learning_rate": 4.583500701674603e-06, + "loss": 0.0001, + "step": 937 + }, + { + "epoch": 2.0844444444444443, + "grad_norm": 0.007711963925031199, + "learning_rate": 4.5633418678204e-06, + "loss": 0.0002, + "step": 938 + }, + { + "epoch": 2.086666666666667, + "grad_norm": 0.0038492259162061213, + "learning_rate": 4.543214348955552e-06, + "loss": 0.0001, + "step": 939 + }, + { + "epoch": 2.088888888888889, + "grad_norm": 0.009334497540330637, + "learning_rate": 4.523118261013969e-06, + "loss": 0.0002, + "step": 940 + }, + { + "epoch": 2.091111111111111, + "grad_norm": 0.015828859687095534, + "learning_rate": 4.50305371974852e-06, + "loss": 0.0003, + "step": 941 + }, + { + "epoch": 2.0933333333333333, + "grad_norm": 0.004168865244367242, + "learning_rate": 4.483020840730365e-06, + "loss": 0.0001, + "step": 942 + }, + { + "epoch": 2.0955555555555554, + "grad_norm": 0.008086766253850675, + "learning_rate": 4.463019739348296e-06, + "loss": 0.0002, + "step": 943 + }, + { + "epoch": 2.097777777777778, + "grad_norm": 0.060673466395916587, + "learning_rate": 4.443050530808061e-06, + "loss": 0.0011, + "step": 944 + }, + { + "epoch": 2.1, + "grad_norm": 0.004594114371419457, + "learning_rate": 4.423113330131708e-06, + "loss": 0.0001, + "step": 945 + }, + { + "epoch": 2.102222222222222, + "grad_norm": 1.9079725084281944, + "learning_rate": 4.403208252156921e-06, + "loss": 0.0534, + "step": 946 + }, + { + "epoch": 2.1044444444444443, + "grad_norm": 0.08834050517138371, + "learning_rate": 4.383335411536357e-06, + "loss": 0.0013, + "step": 947 + }, + { + "epoch": 2.1066666666666665, + "grad_norm": 2.0262515187628476, + "learning_rate": 4.363494922736988e-06, + "loss": 0.0485, + "step": 948 + }, + { + "epoch": 2.108888888888889, + "grad_norm": 0.005216514961835403, + "learning_rate": 4.343686900039438e-06, + "loss": 0.0001, + "step": 949 + }, + { + "epoch": 2.111111111111111, + "grad_norm": 0.0031397169761585852, + "learning_rate": 4.323911457537335e-06, + "loss": 0.0001, + "step": 950 + }, + { + "epoch": 2.1133333333333333, + "grad_norm": 0.019125343655220832, + "learning_rate": 4.3041687091366325e-06, + "loss": 0.0004, + "step": 951 + }, + { + "epoch": 2.1155555555555554, + "grad_norm": 0.009913854012762522, + "learning_rate": 4.284458768554984e-06, + "loss": 0.0002, + "step": 952 + }, + { + "epoch": 2.117777777777778, + "grad_norm": 0.002648410778185918, + "learning_rate": 4.264781749321058e-06, + "loss": 0.0001, + "step": 953 + }, + { + "epoch": 2.12, + "grad_norm": 0.46329612837924644, + "learning_rate": 4.245137764773899e-06, + "loss": 0.0034, + "step": 954 + }, + { + "epoch": 2.1222222222222222, + "grad_norm": 3.3558097264248707, + "learning_rate": 4.2255269280622754e-06, + "loss": 0.0429, + "step": 955 + }, + { + "epoch": 2.1244444444444444, + "grad_norm": 0.05119986985717654, + "learning_rate": 4.205949352144025e-06, + "loss": 0.0005, + "step": 956 + }, + { + "epoch": 2.1266666666666665, + "grad_norm": 0.004423253468968909, + "learning_rate": 4.186405149785403e-06, + "loss": 0.0001, + "step": 957 + }, + { + "epoch": 2.128888888888889, + "grad_norm": 0.010387416625117609, + "learning_rate": 4.166894433560435e-06, + "loss": 0.0002, + "step": 958 + }, + { + "epoch": 2.131111111111111, + "grad_norm": 0.007708651338171354, + "learning_rate": 4.1474173158502615e-06, + "loss": 0.0001, + "step": 959 + }, + { + "epoch": 2.1333333333333333, + "grad_norm": 0.004329811465815702, + "learning_rate": 4.1279739088425106e-06, + "loss": 0.0001, + "step": 960 + }, + { + "epoch": 2.1355555555555554, + "grad_norm": 0.0070742546481506454, + "learning_rate": 4.108564324530626e-06, + "loss": 0.0001, + "step": 961 + }, + { + "epoch": 2.137777777777778, + "grad_norm": 0.004694191756069144, + "learning_rate": 4.0891886747132356e-06, + "loss": 0.0001, + "step": 962 + }, + { + "epoch": 2.14, + "grad_norm": 0.1509873871178636, + "learning_rate": 4.069847070993508e-06, + "loss": 0.0019, + "step": 963 + }, + { + "epoch": 2.1422222222222222, + "grad_norm": 0.00377191340525509, + "learning_rate": 4.050539624778506e-06, + "loss": 0.0001, + "step": 964 + }, + { + "epoch": 2.1444444444444444, + "grad_norm": 0.007784364714409816, + "learning_rate": 4.031266447278543e-06, + "loss": 0.0002, + "step": 965 + }, + { + "epoch": 2.1466666666666665, + "grad_norm": 0.2285721436442924, + "learning_rate": 4.012027649506555e-06, + "loss": 0.0023, + "step": 966 + }, + { + "epoch": 2.148888888888889, + "grad_norm": 0.010911955724838131, + "learning_rate": 3.992823342277437e-06, + "loss": 0.0002, + "step": 967 + }, + { + "epoch": 2.151111111111111, + "grad_norm": 0.012811372500334811, + "learning_rate": 3.973653636207437e-06, + "loss": 0.0002, + "step": 968 + }, + { + "epoch": 2.1533333333333333, + "grad_norm": 0.004889071112346101, + "learning_rate": 3.9545186417134865e-06, + "loss": 0.0001, + "step": 969 + }, + { + "epoch": 2.1555555555555554, + "grad_norm": 0.0098177042751365, + "learning_rate": 3.935418469012592e-06, + "loss": 0.0003, + "step": 970 + }, + { + "epoch": 2.1577777777777776, + "grad_norm": 0.02624010714064344, + "learning_rate": 3.916353228121176e-06, + "loss": 0.0005, + "step": 971 + }, + { + "epoch": 2.16, + "grad_norm": 0.0022815617796102024, + "learning_rate": 3.897323028854461e-06, + "loss": 0.0001, + "step": 972 + }, + { + "epoch": 2.1622222222222223, + "grad_norm": 0.008490046076174299, + "learning_rate": 3.878327980825829e-06, + "loss": 0.0002, + "step": 973 + }, + { + "epoch": 2.1644444444444444, + "grad_norm": 0.0061986348663882335, + "learning_rate": 3.859368193446193e-06, + "loss": 0.0002, + "step": 974 + }, + { + "epoch": 2.1666666666666665, + "grad_norm": 0.06158160210502114, + "learning_rate": 3.840443775923365e-06, + "loss": 0.0013, + "step": 975 + }, + { + "epoch": 2.168888888888889, + "grad_norm": 0.004413107666680964, + "learning_rate": 3.821554837261424e-06, + "loss": 0.0001, + "step": 976 + }, + { + "epoch": 2.171111111111111, + "grad_norm": 0.029902081602871354, + "learning_rate": 3.802701486260102e-06, + "loss": 0.0006, + "step": 977 + }, + { + "epoch": 2.1733333333333333, + "grad_norm": 0.004941386221685535, + "learning_rate": 3.783883831514139e-06, + "loss": 0.0001, + "step": 978 + }, + { + "epoch": 2.1755555555555555, + "grad_norm": 3.207411560563467, + "learning_rate": 3.7651019814126656e-06, + "loss": 0.0551, + "step": 979 + }, + { + "epoch": 2.1777777777777776, + "grad_norm": 6.631226103087978, + "learning_rate": 3.7463560441385814e-06, + "loss": 0.1596, + "step": 980 + }, + { + "epoch": 2.18, + "grad_norm": 1.5156629750586899, + "learning_rate": 3.727646127667929e-06, + "loss": 0.0149, + "step": 981 + }, + { + "epoch": 2.1822222222222223, + "grad_norm": 0.007854037215791183, + "learning_rate": 3.70897233976927e-06, + "loss": 0.0002, + "step": 982 + }, + { + "epoch": 2.1844444444444444, + "grad_norm": 0.025353241359213696, + "learning_rate": 3.6903347880030684e-06, + "loss": 0.0003, + "step": 983 + }, + { + "epoch": 2.1866666666666665, + "grad_norm": 3.0795220720092296, + "learning_rate": 3.6717335797210663e-06, + "loss": 0.0873, + "step": 984 + }, + { + "epoch": 2.188888888888889, + "grad_norm": 0.04924251333223918, + "learning_rate": 3.653168822065677e-06, + "loss": 0.0004, + "step": 985 + }, + { + "epoch": 2.1911111111111112, + "grad_norm": 0.008547852626440691, + "learning_rate": 3.6346406219693485e-06, + "loss": 0.0002, + "step": 986 + }, + { + "epoch": 2.1933333333333334, + "grad_norm": 0.0026899390651576693, + "learning_rate": 3.6161490861539626e-06, + "loss": 0.0001, + "step": 987 + }, + { + "epoch": 2.1955555555555555, + "grad_norm": 0.0031064518336188316, + "learning_rate": 3.5976943211302206e-06, + "loss": 0.0001, + "step": 988 + }, + { + "epoch": 2.1977777777777776, + "grad_norm": 0.038661383726358044, + "learning_rate": 3.5792764331970187e-06, + "loss": 0.0007, + "step": 989 + }, + { + "epoch": 2.2, + "grad_norm": 0.07055129095159182, + "learning_rate": 3.560895528440844e-06, + "loss": 0.0013, + "step": 990 + }, + { + "epoch": 2.2022222222222223, + "grad_norm": 0.008536625154208732, + "learning_rate": 3.5425517127351614e-06, + "loss": 0.0002, + "step": 991 + }, + { + "epoch": 2.2044444444444444, + "grad_norm": 0.004598353141487085, + "learning_rate": 3.524245091739805e-06, + "loss": 0.0001, + "step": 992 + }, + { + "epoch": 2.2066666666666666, + "grad_norm": 0.0043804701873085125, + "learning_rate": 3.5059757709003685e-06, + "loss": 0.0001, + "step": 993 + }, + { + "epoch": 2.2088888888888887, + "grad_norm": 0.0048874989404565465, + "learning_rate": 3.487743855447593e-06, + "loss": 0.0001, + "step": 994 + }, + { + "epoch": 2.2111111111111112, + "grad_norm": 0.018150444697692728, + "learning_rate": 3.4695494503967773e-06, + "loss": 0.0004, + "step": 995 + }, + { + "epoch": 2.2133333333333334, + "grad_norm": 0.010992629667357695, + "learning_rate": 3.4513926605471504e-06, + "loss": 0.0003, + "step": 996 + }, + { + "epoch": 2.2155555555555555, + "grad_norm": 0.36209841138688464, + "learning_rate": 3.433273590481282e-06, + "loss": 0.0041, + "step": 997 + }, + { + "epoch": 2.2177777777777776, + "grad_norm": 0.0063774594454677255, + "learning_rate": 3.4151923445644785e-06, + "loss": 0.0002, + "step": 998 + }, + { + "epoch": 2.22, + "grad_norm": 0.00460202330129417, + "learning_rate": 3.3971490269441777e-06, + "loss": 0.0001, + "step": 999 + }, + { + "epoch": 2.2222222222222223, + "grad_norm": 0.0037052833023356927, + "learning_rate": 3.3791437415493556e-06, + "loss": 0.0001, + "step": 1000 + }, + { + "epoch": 2.2244444444444444, + "grad_norm": 1.428587345448699, + "learning_rate": 3.361176592089919e-06, + "loss": 0.0242, + "step": 1001 + }, + { + "epoch": 2.2266666666666666, + "grad_norm": 0.05325728706107447, + "learning_rate": 3.3432476820561134e-06, + "loss": 0.0007, + "step": 1002 + }, + { + "epoch": 2.2288888888888887, + "grad_norm": 0.034573286474176415, + "learning_rate": 3.3253571147179333e-06, + "loss": 0.0005, + "step": 1003 + }, + { + "epoch": 2.2311111111111113, + "grad_norm": 0.013854854763350999, + "learning_rate": 3.307504993124513e-06, + "loss": 0.0003, + "step": 1004 + }, + { + "epoch": 2.2333333333333334, + "grad_norm": 0.0048975771147271715, + "learning_rate": 3.2896914201035377e-06, + "loss": 0.0001, + "step": 1005 + }, + { + "epoch": 2.2355555555555555, + "grad_norm": 1.6914127638588388, + "learning_rate": 3.2719164982606675e-06, + "loss": 0.1959, + "step": 1006 + }, + { + "epoch": 2.2377777777777776, + "grad_norm": 0.1467345099562343, + "learning_rate": 3.254180329978921e-06, + "loss": 0.0028, + "step": 1007 + }, + { + "epoch": 2.24, + "grad_norm": 0.011296444747974371, + "learning_rate": 3.2364830174180984e-06, + "loss": 0.0003, + "step": 1008 + }, + { + "epoch": 2.2422222222222223, + "grad_norm": 0.006409358511812489, + "learning_rate": 3.2188246625141963e-06, + "loss": 0.0002, + "step": 1009 + }, + { + "epoch": 2.2444444444444445, + "grad_norm": 0.015956893004168776, + "learning_rate": 3.2012053669788136e-06, + "loss": 0.0004, + "step": 1010 + }, + { + "epoch": 2.2466666666666666, + "grad_norm": 0.4829188340377067, + "learning_rate": 3.183625232298566e-06, + "loss": 0.0057, + "step": 1011 + }, + { + "epoch": 2.2488888888888887, + "grad_norm": 0.040673372539010794, + "learning_rate": 3.1660843597345137e-06, + "loss": 0.0006, + "step": 1012 + }, + { + "epoch": 2.2511111111111113, + "grad_norm": 0.008579993178229843, + "learning_rate": 3.1485828503215588e-06, + "loss": 0.0003, + "step": 1013 + }, + { + "epoch": 2.2533333333333334, + "grad_norm": 0.010387984145614839, + "learning_rate": 3.1311208048678742e-06, + "loss": 0.0002, + "step": 1014 + }, + { + "epoch": 2.2555555555555555, + "grad_norm": 0.4959673918536231, + "learning_rate": 3.113698323954326e-06, + "loss": 0.0059, + "step": 1015 + }, + { + "epoch": 2.2577777777777777, + "grad_norm": 1.708681294299263, + "learning_rate": 3.0963155079338834e-06, + "loss": 0.02, + "step": 1016 + }, + { + "epoch": 2.26, + "grad_norm": 5.358148769237903, + "learning_rate": 3.0789724569310532e-06, + "loss": 0.0973, + "step": 1017 + }, + { + "epoch": 2.2622222222222224, + "grad_norm": 0.007180082584462281, + "learning_rate": 3.061669270841291e-06, + "loss": 0.0002, + "step": 1018 + }, + { + "epoch": 2.2644444444444445, + "grad_norm": 0.36858174295972057, + "learning_rate": 3.044406049330437e-06, + "loss": 0.0066, + "step": 1019 + }, + { + "epoch": 2.2666666666666666, + "grad_norm": 0.3630721817490338, + "learning_rate": 3.0271828918341317e-06, + "loss": 0.0044, + "step": 1020 + }, + { + "epoch": 2.2688888888888887, + "grad_norm": 0.9849360168045747, + "learning_rate": 3.0099998975572553e-06, + "loss": 0.0083, + "step": 1021 + }, + { + "epoch": 2.2711111111111113, + "grad_norm": 0.0064416689636884, + "learning_rate": 2.9928571654733374e-06, + "loss": 0.0002, + "step": 1022 + }, + { + "epoch": 2.2733333333333334, + "grad_norm": 0.6446847155954345, + "learning_rate": 2.975754794324015e-06, + "loss": 0.0031, + "step": 1023 + }, + { + "epoch": 2.2755555555555556, + "grad_norm": 0.004810172332772929, + "learning_rate": 2.9586928826184323e-06, + "loss": 0.0001, + "step": 1024 + }, + { + "epoch": 2.2777777777777777, + "grad_norm": 1.6002845951875935, + "learning_rate": 2.941671528632695e-06, + "loss": 0.0132, + "step": 1025 + }, + { + "epoch": 2.2800000000000002, + "grad_norm": 0.0036595634357199395, + "learning_rate": 2.9246908304092945e-06, + "loss": 0.0001, + "step": 1026 + }, + { + "epoch": 2.2822222222222224, + "grad_norm": 0.003598792615123913, + "learning_rate": 2.9077508857565507e-06, + "loss": 0.0001, + "step": 1027 + }, + { + "epoch": 2.2844444444444445, + "grad_norm": 0.009024938869830142, + "learning_rate": 2.8908517922480385e-06, + "loss": 0.0002, + "step": 1028 + }, + { + "epoch": 2.2866666666666666, + "grad_norm": 0.007620452626952381, + "learning_rate": 2.8739936472220385e-06, + "loss": 0.0002, + "step": 1029 + }, + { + "epoch": 2.2888888888888888, + "grad_norm": 0.09309169256438858, + "learning_rate": 2.8571765477809645e-06, + "loss": 0.001, + "step": 1030 + }, + { + "epoch": 2.2911111111111113, + "grad_norm": 0.0056390318504238954, + "learning_rate": 2.8404005907908083e-06, + "loss": 0.0002, + "step": 1031 + }, + { + "epoch": 2.2933333333333334, + "grad_norm": 0.20850874873907546, + "learning_rate": 2.8236658728805844e-06, + "loss": 0.0028, + "step": 1032 + }, + { + "epoch": 2.2955555555555556, + "grad_norm": 0.008088819341255153, + "learning_rate": 2.8069724904417704e-06, + "loss": 0.0002, + "step": 1033 + }, + { + "epoch": 2.2977777777777777, + "grad_norm": 0.004886801265119917, + "learning_rate": 2.7903205396277546e-06, + "loss": 0.0001, + "step": 1034 + }, + { + "epoch": 2.3, + "grad_norm": 0.023136136694457902, + "learning_rate": 2.7737101163532763e-06, + "loss": 0.0004, + "step": 1035 + }, + { + "epoch": 2.3022222222222224, + "grad_norm": 0.011703631941899864, + "learning_rate": 2.757141316293884e-06, + "loss": 0.0003, + "step": 1036 + }, + { + "epoch": 2.3044444444444445, + "grad_norm": 0.004262021394227616, + "learning_rate": 2.740614234885368e-06, + "loss": 0.0001, + "step": 1037 + }, + { + "epoch": 2.3066666666666666, + "grad_norm": 0.00511212438268216, + "learning_rate": 2.724128967323234e-06, + "loss": 0.0001, + "step": 1038 + }, + { + "epoch": 2.3088888888888888, + "grad_norm": 0.00671755248239093, + "learning_rate": 2.7076856085621294e-06, + "loss": 0.0002, + "step": 1039 + }, + { + "epoch": 2.311111111111111, + "grad_norm": 0.016996065566194065, + "learning_rate": 2.691284253315309e-06, + "loss": 0.0002, + "step": 1040 + }, + { + "epoch": 2.3133333333333335, + "grad_norm": 0.010492888449662405, + "learning_rate": 2.674924996054099e-06, + "loss": 0.0002, + "step": 1041 + }, + { + "epoch": 2.3155555555555556, + "grad_norm": 0.005409391356538481, + "learning_rate": 2.6586079310073323e-06, + "loss": 0.0001, + "step": 1042 + }, + { + "epoch": 2.3177777777777777, + "grad_norm": 0.01327179205634621, + "learning_rate": 2.6423331521608173e-06, + "loss": 0.0003, + "step": 1043 + }, + { + "epoch": 2.32, + "grad_norm": 0.9898605527372344, + "learning_rate": 2.626100753256798e-06, + "loss": 0.0193, + "step": 1044 + }, + { + "epoch": 2.3222222222222224, + "grad_norm": 2.291456795960324, + "learning_rate": 2.6099108277934105e-06, + "loss": 0.0547, + "step": 1045 + }, + { + "epoch": 2.3244444444444445, + "grad_norm": 0.0033026240230800527, + "learning_rate": 2.5937634690241396e-06, + "loss": 0.0001, + "step": 1046 + }, + { + "epoch": 2.3266666666666667, + "grad_norm": 0.008853568678890181, + "learning_rate": 2.5776587699573007e-06, + "loss": 0.0002, + "step": 1047 + }, + { + "epoch": 2.328888888888889, + "grad_norm": 0.004873492910089222, + "learning_rate": 2.5615968233554766e-06, + "loss": 0.0001, + "step": 1048 + }, + { + "epoch": 2.3311111111111114, + "grad_norm": 0.007284193949325532, + "learning_rate": 2.545577721735004e-06, + "loss": 0.0002, + "step": 1049 + }, + { + "epoch": 2.3333333333333335, + "grad_norm": 0.004891756370100183, + "learning_rate": 2.529601557365432e-06, + "loss": 0.0001, + "step": 1050 + }, + { + "epoch": 2.3355555555555556, + "grad_norm": 0.015090022878465751, + "learning_rate": 2.5136684222689933e-06, + "loss": 0.0003, + "step": 1051 + }, + { + "epoch": 2.3377777777777777, + "grad_norm": 0.004215920776481473, + "learning_rate": 2.4977784082200728e-06, + "loss": 0.0001, + "step": 1052 + }, + { + "epoch": 2.34, + "grad_norm": 0.007341265910072392, + "learning_rate": 2.4819316067446787e-06, + "loss": 0.0002, + "step": 1053 + }, + { + "epoch": 2.3422222222222224, + "grad_norm": 0.013325584927197088, + "learning_rate": 2.4661281091199142e-06, + "loss": 0.0002, + "step": 1054 + }, + { + "epoch": 2.3444444444444446, + "grad_norm": 0.00701244759788022, + "learning_rate": 2.4503680063734615e-06, + "loss": 0.0002, + "step": 1055 + }, + { + "epoch": 2.3466666666666667, + "grad_norm": 1.2954319094797195, + "learning_rate": 2.4346513892830427e-06, + "loss": 0.035, + "step": 1056 + }, + { + "epoch": 2.348888888888889, + "grad_norm": 0.29887803427114434, + "learning_rate": 2.418978348375904e-06, + "loss": 0.0053, + "step": 1057 + }, + { + "epoch": 2.351111111111111, + "grad_norm": 0.03979158239681823, + "learning_rate": 2.4033489739282943e-06, + "loss": 0.0007, + "step": 1058 + }, + { + "epoch": 2.3533333333333335, + "grad_norm": 3.0064451365139617, + "learning_rate": 2.3877633559649505e-06, + "loss": 0.0075, + "step": 1059 + }, + { + "epoch": 2.3555555555555556, + "grad_norm": 0.005708737728139413, + "learning_rate": 2.372221584258566e-06, + "loss": 0.0002, + "step": 1060 + }, + { + "epoch": 2.3577777777777778, + "grad_norm": 0.0034006219887576104, + "learning_rate": 2.356723748329286e-06, + "loss": 0.0001, + "step": 1061 + }, + { + "epoch": 2.36, + "grad_norm": 0.04064716717610486, + "learning_rate": 2.341269937444183e-06, + "loss": 0.0006, + "step": 1062 + }, + { + "epoch": 2.362222222222222, + "grad_norm": 0.010167763828473255, + "learning_rate": 2.3258602406167465e-06, + "loss": 0.0002, + "step": 1063 + }, + { + "epoch": 2.3644444444444446, + "grad_norm": 0.3081478204285823, + "learning_rate": 2.3104947466063785e-06, + "loss": 0.0054, + "step": 1064 + }, + { + "epoch": 2.3666666666666667, + "grad_norm": 0.34404355752926685, + "learning_rate": 2.295173543917867e-06, + "loss": 0.0054, + "step": 1065 + }, + { + "epoch": 2.368888888888889, + "grad_norm": 0.002736888185456215, + "learning_rate": 2.2798967208008806e-06, + "loss": 0.0001, + "step": 1066 + }, + { + "epoch": 2.371111111111111, + "grad_norm": 0.00838800724438987, + "learning_rate": 2.2646643652494693e-06, + "loss": 0.0002, + "step": 1067 + }, + { + "epoch": 2.3733333333333335, + "grad_norm": 0.2551739637615219, + "learning_rate": 2.249476565001548e-06, + "loss": 0.003, + "step": 1068 + }, + { + "epoch": 2.3755555555555556, + "grad_norm": 0.03913880618684705, + "learning_rate": 2.234333407538396e-06, + "loss": 0.0006, + "step": 1069 + }, + { + "epoch": 2.3777777777777778, + "grad_norm": 0.008667316714413415, + "learning_rate": 2.219234980084148e-06, + "loss": 0.0002, + "step": 1070 + }, + { + "epoch": 2.38, + "grad_norm": 0.01913537674512143, + "learning_rate": 2.2041813696052996e-06, + "loss": 0.0004, + "step": 1071 + }, + { + "epoch": 2.3822222222222225, + "grad_norm": 0.003632602548880107, + "learning_rate": 2.189172662810197e-06, + "loss": 0.0001, + "step": 1072 + }, + { + "epoch": 2.3844444444444446, + "grad_norm": 0.00260555566513924, + "learning_rate": 2.1742089461485504e-06, + "loss": 0.0001, + "step": 1073 + }, + { + "epoch": 2.3866666666666667, + "grad_norm": 0.004141409692179484, + "learning_rate": 2.1592903058109215e-06, + "loss": 0.0001, + "step": 1074 + }, + { + "epoch": 2.388888888888889, + "grad_norm": 0.0025084048144379967, + "learning_rate": 2.1444168277282352e-06, + "loss": 0.0001, + "step": 1075 + }, + { + "epoch": 2.391111111111111, + "grad_norm": 0.9994762269583267, + "learning_rate": 2.1295885975712805e-06, + "loss": 0.0207, + "step": 1076 + }, + { + "epoch": 2.3933333333333335, + "grad_norm": 0.004712602608037591, + "learning_rate": 2.1148057007502277e-06, + "loss": 0.0001, + "step": 1077 + }, + { + "epoch": 2.3955555555555557, + "grad_norm": 0.032881076618684384, + "learning_rate": 2.100068222414121e-06, + "loss": 0.0006, + "step": 1078 + }, + { + "epoch": 2.397777777777778, + "grad_norm": 0.7892595227182483, + "learning_rate": 2.0853762474503982e-06, + "loss": 0.0076, + "step": 1079 + }, + { + "epoch": 2.4, + "grad_norm": 0.03997402549213818, + "learning_rate": 2.0707298604843964e-06, + "loss": 0.0005, + "step": 1080 + }, + { + "epoch": 2.402222222222222, + "grad_norm": 0.008994166745022555, + "learning_rate": 2.0561291458788736e-06, + "loss": 0.0002, + "step": 1081 + }, + { + "epoch": 2.4044444444444446, + "grad_norm": 0.0030961914197480253, + "learning_rate": 2.0415741877335095e-06, + "loss": 0.0001, + "step": 1082 + }, + { + "epoch": 2.4066666666666667, + "grad_norm": 1.0954550811144694, + "learning_rate": 2.027065069884432e-06, + "loss": 0.0155, + "step": 1083 + }, + { + "epoch": 2.408888888888889, + "grad_norm": 0.00385022146573814, + "learning_rate": 2.0126018759037292e-06, + "loss": 0.0001, + "step": 1084 + }, + { + "epoch": 2.411111111111111, + "grad_norm": 0.008432785464892805, + "learning_rate": 1.9981846890989665e-06, + "loss": 0.0002, + "step": 1085 + }, + { + "epoch": 2.413333333333333, + "grad_norm": 0.017130037251140335, + "learning_rate": 1.9838135925127134e-06, + "loss": 0.0003, + "step": 1086 + }, + { + "epoch": 2.4155555555555557, + "grad_norm": 0.00507208516067379, + "learning_rate": 1.9694886689220592e-06, + "loss": 0.0001, + "step": 1087 + }, + { + "epoch": 2.417777777777778, + "grad_norm": 0.005716067937564253, + "learning_rate": 1.955210000838138e-06, + "loss": 0.0002, + "step": 1088 + }, + { + "epoch": 2.42, + "grad_norm": 0.1350186217517163, + "learning_rate": 1.9409776705056514e-06, + "loss": 0.0024, + "step": 1089 + }, + { + "epoch": 2.422222222222222, + "grad_norm": 3.649434271443835, + "learning_rate": 1.9267917599024045e-06, + "loss": 0.1884, + "step": 1090 + }, + { + "epoch": 2.4244444444444446, + "grad_norm": 0.08705003094128638, + "learning_rate": 1.912652350738818e-06, + "loss": 0.0007, + "step": 1091 + }, + { + "epoch": 2.4266666666666667, + "grad_norm": 0.013650643606555064, + "learning_rate": 1.8985595244574707e-06, + "loss": 0.0003, + "step": 1092 + }, + { + "epoch": 2.428888888888889, + "grad_norm": 0.2555410592342028, + "learning_rate": 1.8845133622326174e-06, + "loss": 0.0043, + "step": 1093 + }, + { + "epoch": 2.431111111111111, + "grad_norm": 0.08808035807573998, + "learning_rate": 1.870513944969743e-06, + "loss": 0.002, + "step": 1094 + }, + { + "epoch": 2.4333333333333336, + "grad_norm": 0.024577916286246934, + "learning_rate": 1.8565613533050719e-06, + "loss": 0.0005, + "step": 1095 + }, + { + "epoch": 2.4355555555555557, + "grad_norm": 0.0024942064609750447, + "learning_rate": 1.8426556676051178e-06, + "loss": 0.0001, + "step": 1096 + }, + { + "epoch": 2.437777777777778, + "grad_norm": 2.141254351009635, + "learning_rate": 1.8287969679662165e-06, + "loss": 0.1046, + "step": 1097 + }, + { + "epoch": 2.44, + "grad_norm": 0.011730956715081198, + "learning_rate": 1.8149853342140644e-06, + "loss": 0.0002, + "step": 1098 + }, + { + "epoch": 2.442222222222222, + "grad_norm": 0.012260161865128473, + "learning_rate": 1.8012208459032665e-06, + "loss": 0.0002, + "step": 1099 + }, + { + "epoch": 2.4444444444444446, + "grad_norm": 0.0028547187152581097, + "learning_rate": 1.7875035823168641e-06, + "loss": 0.0001, + "step": 1100 + }, + { + "epoch": 2.4466666666666668, + "grad_norm": 0.003747126763893755, + "learning_rate": 1.773833622465888e-06, + "loss": 0.0001, + "step": 1101 + }, + { + "epoch": 2.448888888888889, + "grad_norm": 0.023036646035635683, + "learning_rate": 1.760211045088902e-06, + "loss": 0.0003, + "step": 1102 + }, + { + "epoch": 2.451111111111111, + "grad_norm": 0.01089247813864428, + "learning_rate": 1.7466359286515443e-06, + "loss": 0.0002, + "step": 1103 + }, + { + "epoch": 2.453333333333333, + "grad_norm": 0.018720723752491147, + "learning_rate": 1.7331083513460855e-06, + "loss": 0.0003, + "step": 1104 + }, + { + "epoch": 2.4555555555555557, + "grad_norm": 0.0039037337447842277, + "learning_rate": 1.7196283910909673e-06, + "loss": 0.0001, + "step": 1105 + }, + { + "epoch": 2.457777777777778, + "grad_norm": 0.10029130105465163, + "learning_rate": 1.7061961255303594e-06, + "loss": 0.0018, + "step": 1106 + }, + { + "epoch": 2.46, + "grad_norm": 0.011968416570773485, + "learning_rate": 1.692811632033715e-06, + "loss": 0.0003, + "step": 1107 + }, + { + "epoch": 2.462222222222222, + "grad_norm": 0.01854557286317915, + "learning_rate": 1.6794749876953187e-06, + "loss": 0.0002, + "step": 1108 + }, + { + "epoch": 2.464444444444444, + "grad_norm": 0.05574965172713316, + "learning_rate": 1.6661862693338437e-06, + "loss": 0.0011, + "step": 1109 + }, + { + "epoch": 2.466666666666667, + "grad_norm": 0.22380239193365228, + "learning_rate": 1.652945553491916e-06, + "loss": 0.0021, + "step": 1110 + }, + { + "epoch": 2.468888888888889, + "grad_norm": 0.0048590856660869425, + "learning_rate": 1.6397529164356606e-06, + "loss": 0.0001, + "step": 1111 + }, + { + "epoch": 2.471111111111111, + "grad_norm": 6.4214852263880315, + "learning_rate": 1.626608434154281e-06, + "loss": 0.0313, + "step": 1112 + }, + { + "epoch": 2.473333333333333, + "grad_norm": 0.005919758840638271, + "learning_rate": 1.613512182359601e-06, + "loss": 0.0002, + "step": 1113 + }, + { + "epoch": 2.4755555555555557, + "grad_norm": 0.007559717066406532, + "learning_rate": 1.6004642364856438e-06, + "loss": 0.0002, + "step": 1114 + }, + { + "epoch": 2.477777777777778, + "grad_norm": 0.0056203152438801685, + "learning_rate": 1.587464671688187e-06, + "loss": 0.0002, + "step": 1115 + }, + { + "epoch": 2.48, + "grad_norm": 0.011469347697589198, + "learning_rate": 1.574513562844342e-06, + "loss": 0.0002, + "step": 1116 + }, + { + "epoch": 2.482222222222222, + "grad_norm": 0.013882779435095118, + "learning_rate": 1.5616109845521099e-06, + "loss": 0.0003, + "step": 1117 + }, + { + "epoch": 2.4844444444444447, + "grad_norm": 0.002865724305067781, + "learning_rate": 1.5487570111299566e-06, + "loss": 0.0001, + "step": 1118 + }, + { + "epoch": 2.486666666666667, + "grad_norm": 0.03487507290291792, + "learning_rate": 1.5359517166163884e-06, + "loss": 0.0006, + "step": 1119 + }, + { + "epoch": 2.488888888888889, + "grad_norm": 1.1872262778966778, + "learning_rate": 1.5231951747695207e-06, + "loss": 0.0202, + "step": 1120 + }, + { + "epoch": 2.491111111111111, + "grad_norm": 0.048571999120266225, + "learning_rate": 1.5104874590666563e-06, + "loss": 0.0009, + "step": 1121 + }, + { + "epoch": 2.493333333333333, + "grad_norm": 0.3746895191056621, + "learning_rate": 1.4978286427038602e-06, + "loss": 0.0053, + "step": 1122 + }, + { + "epoch": 2.4955555555555557, + "grad_norm": 0.005904768567153099, + "learning_rate": 1.485218798595538e-06, + "loss": 0.0002, + "step": 1123 + }, + { + "epoch": 2.497777777777778, + "grad_norm": 0.7550138714060836, + "learning_rate": 1.4726579993740153e-06, + "loss": 0.0083, + "step": 1124 + }, + { + "epoch": 2.5, + "grad_norm": 0.007007054781610603, + "learning_rate": 1.4601463173891273e-06, + "loss": 0.0001, + "step": 1125 + }, + { + "epoch": 2.502222222222222, + "grad_norm": 0.1071533921736725, + "learning_rate": 1.4476838247077874e-06, + "loss": 0.0024, + "step": 1126 + }, + { + "epoch": 2.5044444444444443, + "grad_norm": 0.004803729194633826, + "learning_rate": 1.4352705931135835e-06, + "loss": 0.0001, + "step": 1127 + }, + { + "epoch": 2.506666666666667, + "grad_norm": 0.00673788960668807, + "learning_rate": 1.4229066941063618e-06, + "loss": 0.0002, + "step": 1128 + }, + { + "epoch": 2.508888888888889, + "grad_norm": 0.0067472453381644815, + "learning_rate": 1.4105921989018112e-06, + "loss": 0.0002, + "step": 1129 + }, + { + "epoch": 2.511111111111111, + "grad_norm": 0.11919719490895432, + "learning_rate": 1.3983271784310616e-06, + "loss": 0.0018, + "step": 1130 + }, + { + "epoch": 2.513333333333333, + "grad_norm": 0.13405007425610568, + "learning_rate": 1.3861117033402639e-06, + "loss": 0.0025, + "step": 1131 + }, + { + "epoch": 2.5155555555555553, + "grad_norm": 0.09283533830085984, + "learning_rate": 1.373945843990192e-06, + "loss": 0.0015, + "step": 1132 + }, + { + "epoch": 2.517777777777778, + "grad_norm": 0.009818493697285989, + "learning_rate": 1.3618296704558364e-06, + "loss": 0.0002, + "step": 1133 + }, + { + "epoch": 2.52, + "grad_norm": 0.005595699930958301, + "learning_rate": 1.3497632525259963e-06, + "loss": 0.0002, + "step": 1134 + }, + { + "epoch": 2.522222222222222, + "grad_norm": 0.020671200320299628, + "learning_rate": 1.3377466597028788e-06, + "loss": 0.0004, + "step": 1135 + }, + { + "epoch": 2.5244444444444447, + "grad_norm": 0.004073195826503962, + "learning_rate": 1.325779961201703e-06, + "loss": 0.0001, + "step": 1136 + }, + { + "epoch": 2.5266666666666664, + "grad_norm": 0.6303713684548944, + "learning_rate": 1.313863225950297e-06, + "loss": 0.0096, + "step": 1137 + }, + { + "epoch": 2.528888888888889, + "grad_norm": 0.17205309274205605, + "learning_rate": 1.301996522588701e-06, + "loss": 0.0029, + "step": 1138 + }, + { + "epoch": 2.531111111111111, + "grad_norm": 0.0041911422648637795, + "learning_rate": 1.2901799194687737e-06, + "loss": 0.0001, + "step": 1139 + }, + { + "epoch": 2.533333333333333, + "grad_norm": 0.005900521605739531, + "learning_rate": 1.2784134846537988e-06, + "loss": 0.0002, + "step": 1140 + }, + { + "epoch": 2.535555555555556, + "grad_norm": 0.007191764750945701, + "learning_rate": 1.2666972859180894e-06, + "loss": 0.0002, + "step": 1141 + }, + { + "epoch": 2.537777777777778, + "grad_norm": 0.03717878933983772, + "learning_rate": 1.255031390746605e-06, + "loss": 0.0007, + "step": 1142 + }, + { + "epoch": 2.54, + "grad_norm": 0.006952676520782671, + "learning_rate": 1.2434158663345553e-06, + "loss": 0.0002, + "step": 1143 + }, + { + "epoch": 2.542222222222222, + "grad_norm": 0.0363938008477587, + "learning_rate": 1.2318507795870138e-06, + "loss": 0.0006, + "step": 1144 + }, + { + "epoch": 2.5444444444444443, + "grad_norm": 0.0034412970804913803, + "learning_rate": 1.220336197118539e-06, + "loss": 0.0001, + "step": 1145 + }, + { + "epoch": 2.546666666666667, + "grad_norm": 0.01035871662848605, + "learning_rate": 1.2088721852527807e-06, + "loss": 0.0002, + "step": 1146 + }, + { + "epoch": 2.548888888888889, + "grad_norm": 0.004768177858031779, + "learning_rate": 1.1974588100221074e-06, + "loss": 0.0002, + "step": 1147 + }, + { + "epoch": 2.551111111111111, + "grad_norm": 2.241205328646083, + "learning_rate": 1.1860961371672242e-06, + "loss": 0.0299, + "step": 1148 + }, + { + "epoch": 2.5533333333333332, + "grad_norm": 0.18116320605102984, + "learning_rate": 1.1747842321367886e-06, + "loss": 0.0031, + "step": 1149 + }, + { + "epoch": 2.5555555555555554, + "grad_norm": 0.12225470147847156, + "learning_rate": 1.1635231600870334e-06, + "loss": 0.0018, + "step": 1150 + }, + { + "epoch": 2.557777777777778, + "grad_norm": 0.003754550476031459, + "learning_rate": 1.1523129858814042e-06, + "loss": 0.0001, + "step": 1151 + }, + { + "epoch": 2.56, + "grad_norm": 0.015858050480712076, + "learning_rate": 1.14115377409017e-06, + "loss": 0.0004, + "step": 1152 + }, + { + "epoch": 2.562222222222222, + "grad_norm": 0.003755162986734331, + "learning_rate": 1.1300455889900587e-06, + "loss": 0.0001, + "step": 1153 + }, + { + "epoch": 2.5644444444444443, + "grad_norm": 0.0074796981641479035, + "learning_rate": 1.1189884945638874e-06, + "loss": 0.0002, + "step": 1154 + }, + { + "epoch": 2.5666666666666664, + "grad_norm": 0.0073107060692593285, + "learning_rate": 1.1079825545001887e-06, + "loss": 0.0002, + "step": 1155 + }, + { + "epoch": 2.568888888888889, + "grad_norm": 0.0039457346862741115, + "learning_rate": 1.097027832192854e-06, + "loss": 0.0001, + "step": 1156 + }, + { + "epoch": 2.571111111111111, + "grad_norm": 0.005760169379248278, + "learning_rate": 1.086124390740757e-06, + "loss": 0.0002, + "step": 1157 + }, + { + "epoch": 2.5733333333333333, + "grad_norm": 0.14344351679684078, + "learning_rate": 1.0752722929473936e-06, + "loss": 0.0028, + "step": 1158 + }, + { + "epoch": 2.575555555555556, + "grad_norm": 0.005912586675499084, + "learning_rate": 1.0644716013205303e-06, + "loss": 0.0002, + "step": 1159 + }, + { + "epoch": 2.5777777777777775, + "grad_norm": 0.02237988013452415, + "learning_rate": 1.0537223780718265e-06, + "loss": 0.0004, + "step": 1160 + }, + { + "epoch": 2.58, + "grad_norm": 0.09721374578014466, + "learning_rate": 1.0430246851164904e-06, + "loss": 0.002, + "step": 1161 + }, + { + "epoch": 2.582222222222222, + "grad_norm": 0.01883400651954982, + "learning_rate": 1.032378584072915e-06, + "loss": 0.0004, + "step": 1162 + }, + { + "epoch": 2.5844444444444443, + "grad_norm": 0.014807066013998105, + "learning_rate": 1.021784136262326e-06, + "loss": 0.0004, + "step": 1163 + }, + { + "epoch": 2.586666666666667, + "grad_norm": 0.0035812861961145898, + "learning_rate": 1.0112414027084262e-06, + "loss": 0.0001, + "step": 1164 + }, + { + "epoch": 2.588888888888889, + "grad_norm": 1.9188345449763051, + "learning_rate": 1.0007504441370508e-06, + "loss": 0.2146, + "step": 1165 + }, + { + "epoch": 2.591111111111111, + "grad_norm": 0.014302333694207527, + "learning_rate": 9.903113209758098e-07, + "loss": 0.0003, + "step": 1166 + }, + { + "epoch": 2.5933333333333333, + "grad_norm": 0.004643731643679011, + "learning_rate": 9.799240933537379e-07, + "loss": 0.0001, + "step": 1167 + }, + { + "epoch": 2.5955555555555554, + "grad_norm": 0.00554964920745309, + "learning_rate": 9.69588821100963e-07, + "loss": 0.0001, + "step": 1168 + }, + { + "epoch": 2.597777777777778, + "grad_norm": 0.008979439414718172, + "learning_rate": 9.59305563748345e-07, + "loss": 0.0002, + "step": 1169 + }, + { + "epoch": 2.6, + "grad_norm": 0.04012136022356773, + "learning_rate": 9.490743805271396e-07, + "loss": 0.0009, + "step": 1170 + }, + { + "epoch": 2.602222222222222, + "grad_norm": 0.0037534518092945824, + "learning_rate": 9.388953303686587e-07, + "loss": 0.0001, + "step": 1171 + }, + { + "epoch": 2.6044444444444443, + "grad_norm": 0.01720709149399208, + "learning_rate": 9.28768471903928e-07, + "loss": 0.0004, + "step": 1172 + }, + { + "epoch": 2.6066666666666665, + "grad_norm": 0.009241920892025104, + "learning_rate": 9.186938634633536e-07, + "loss": 0.0002, + "step": 1173 + }, + { + "epoch": 2.608888888888889, + "grad_norm": 0.004384340328917472, + "learning_rate": 9.086715630763787e-07, + "loss": 0.0001, + "step": 1174 + }, + { + "epoch": 2.611111111111111, + "grad_norm": 0.03613028762556633, + "learning_rate": 8.987016284711569e-07, + "loss": 0.0009, + "step": 1175 + }, + { + "epoch": 2.6133333333333333, + "grad_norm": 0.04595669495012611, + "learning_rate": 8.887841170742128e-07, + "loss": 0.0008, + "step": 1176 + }, + { + "epoch": 2.6155555555555554, + "grad_norm": 0.01289348447278326, + "learning_rate": 8.789190860101226e-07, + "loss": 0.0002, + "step": 1177 + }, + { + "epoch": 2.6177777777777775, + "grad_norm": 0.008817691413725045, + "learning_rate": 8.691065921011687e-07, + "loss": 0.0002, + "step": 1178 + }, + { + "epoch": 2.62, + "grad_norm": 0.43308246768403375, + "learning_rate": 8.593466918670257e-07, + "loss": 0.0055, + "step": 1179 + }, + { + "epoch": 2.6222222222222222, + "grad_norm": 2.1143425680999166, + "learning_rate": 8.49639441524428e-07, + "loss": 0.0994, + "step": 1180 + }, + { + "epoch": 2.6244444444444444, + "grad_norm": 0.35606727417715717, + "learning_rate": 8.399848969868507e-07, + "loss": 0.0044, + "step": 1181 + }, + { + "epoch": 2.626666666666667, + "grad_norm": 0.003873520107070059, + "learning_rate": 8.303831138641805e-07, + "loss": 0.0001, + "step": 1182 + }, + { + "epoch": 2.628888888888889, + "grad_norm": 0.033508240487768313, + "learning_rate": 8.208341474624071e-07, + "loss": 0.0006, + "step": 1183 + }, + { + "epoch": 2.631111111111111, + "grad_norm": 0.005981439874368762, + "learning_rate": 8.113380527832904e-07, + "loss": 0.0001, + "step": 1184 + }, + { + "epoch": 2.6333333333333333, + "grad_norm": 0.006273052186171175, + "learning_rate": 8.018948845240538e-07, + "loss": 0.0002, + "step": 1185 + }, + { + "epoch": 2.6355555555555554, + "grad_norm": 0.003240223481901292, + "learning_rate": 7.925046970770689e-07, + "loss": 0.0001, + "step": 1186 + }, + { + "epoch": 2.637777777777778, + "grad_norm": 0.017429461732411514, + "learning_rate": 7.83167544529534e-07, + "loss": 0.0005, + "step": 1187 + }, + { + "epoch": 2.64, + "grad_norm": 0.01442398035452965, + "learning_rate": 7.738834806631712e-07, + "loss": 0.0002, + "step": 1188 + }, + { + "epoch": 2.6422222222222222, + "grad_norm": 0.00454093353593792, + "learning_rate": 7.646525589539122e-07, + "loss": 0.0001, + "step": 1189 + }, + { + "epoch": 2.6444444444444444, + "grad_norm": 0.005767121462450202, + "learning_rate": 7.554748325715921e-07, + "loss": 0.0002, + "step": 1190 + }, + { + "epoch": 2.6466666666666665, + "grad_norm": 2.3981139343497584, + "learning_rate": 7.463503543796413e-07, + "loss": 0.1073, + "step": 1191 + }, + { + "epoch": 2.648888888888889, + "grad_norm": 2.635779388521255, + "learning_rate": 7.372791769347843e-07, + "loss": 0.0606, + "step": 1192 + }, + { + "epoch": 2.651111111111111, + "grad_norm": 0.005619490559119173, + "learning_rate": 7.282613524867321e-07, + "loss": 0.0002, + "step": 1193 + }, + { + "epoch": 2.6533333333333333, + "grad_norm": 0.003628786172391859, + "learning_rate": 7.192969329778888e-07, + "loss": 0.0001, + "step": 1194 + }, + { + "epoch": 2.6555555555555554, + "grad_norm": 0.007861707756792482, + "learning_rate": 7.103859700430416e-07, + "loss": 0.0002, + "step": 1195 + }, + { + "epoch": 2.6577777777777776, + "grad_norm": 0.0563076698830864, + "learning_rate": 7.015285150090744e-07, + "loss": 0.0004, + "step": 1196 + }, + { + "epoch": 2.66, + "grad_norm": 0.2616158415649392, + "learning_rate": 6.927246188946635e-07, + "loss": 0.0048, + "step": 1197 + }, + { + "epoch": 2.6622222222222223, + "grad_norm": 0.0034970272356174103, + "learning_rate": 6.839743324099901e-07, + "loss": 0.0001, + "step": 1198 + }, + { + "epoch": 2.6644444444444444, + "grad_norm": 0.009196507414575821, + "learning_rate": 6.752777059564431e-07, + "loss": 0.0002, + "step": 1199 + }, + { + "epoch": 2.6666666666666665, + "grad_norm": 0.13733638276835183, + "learning_rate": 6.666347896263326e-07, + "loss": 0.0019, + "step": 1200 + }, + { + "epoch": 2.6688888888888886, + "grad_norm": 0.01963936871866741, + "learning_rate": 6.58045633202602e-07, + "loss": 0.0004, + "step": 1201 + }, + { + "epoch": 2.671111111111111, + "grad_norm": 0.21254340028685287, + "learning_rate": 6.495102861585356e-07, + "loss": 0.0022, + "step": 1202 + }, + { + "epoch": 2.6733333333333333, + "grad_norm": 0.21230152686288337, + "learning_rate": 6.41028797657478e-07, + "loss": 0.0038, + "step": 1203 + }, + { + "epoch": 2.6755555555555555, + "grad_norm": 0.05233606949242994, + "learning_rate": 6.32601216552553e-07, + "loss": 0.0007, + "step": 1204 + }, + { + "epoch": 2.677777777777778, + "grad_norm": 0.07908960159367866, + "learning_rate": 6.242275913863772e-07, + "loss": 0.0013, + "step": 1205 + }, + { + "epoch": 2.68, + "grad_norm": 0.00762262973404685, + "learning_rate": 6.159079703907823e-07, + "loss": 0.0002, + "step": 1206 + }, + { + "epoch": 2.6822222222222223, + "grad_norm": 0.4738971579571063, + "learning_rate": 6.076424014865378e-07, + "loss": 0.0042, + "step": 1207 + }, + { + "epoch": 2.6844444444444444, + "grad_norm": 0.367575717336441, + "learning_rate": 5.994309322830749e-07, + "loss": 0.0023, + "step": 1208 + }, + { + "epoch": 2.6866666666666665, + "grad_norm": 2.6891050284075777, + "learning_rate": 5.912736100782135e-07, + "loss": 0.1045, + "step": 1209 + }, + { + "epoch": 2.688888888888889, + "grad_norm": 0.15036486789442854, + "learning_rate": 5.831704818578842e-07, + "loss": 0.0015, + "step": 1210 + }, + { + "epoch": 2.6911111111111112, + "grad_norm": 0.02059600123651437, + "learning_rate": 5.751215942958699e-07, + "loss": 0.0004, + "step": 1211 + }, + { + "epoch": 2.6933333333333334, + "grad_norm": 0.003223667891492533, + "learning_rate": 5.671269937535196e-07, + "loss": 0.0001, + "step": 1212 + }, + { + "epoch": 2.6955555555555555, + "grad_norm": 0.011795836037816232, + "learning_rate": 5.591867262794969e-07, + "loss": 0.0002, + "step": 1213 + }, + { + "epoch": 2.6977777777777776, + "grad_norm": 0.04831110448874025, + "learning_rate": 5.513008376095064e-07, + "loss": 0.0007, + "step": 1214 + }, + { + "epoch": 2.7, + "grad_norm": 0.4274712640257391, + "learning_rate": 5.434693731660324e-07, + "loss": 0.0042, + "step": 1215 + }, + { + "epoch": 2.7022222222222223, + "grad_norm": 0.0044383141730774435, + "learning_rate": 5.356923780580759e-07, + "loss": 0.0001, + "step": 1216 + }, + { + "epoch": 2.7044444444444444, + "grad_norm": 0.04566690904679661, + "learning_rate": 5.279698970809011e-07, + "loss": 0.0004, + "step": 1217 + }, + { + "epoch": 2.7066666666666666, + "grad_norm": 0.036104723209363594, + "learning_rate": 5.203019747157645e-07, + "loss": 0.0006, + "step": 1218 + }, + { + "epoch": 2.7088888888888887, + "grad_norm": 0.7253347753864402, + "learning_rate": 5.12688655129675e-07, + "loss": 0.0028, + "step": 1219 + }, + { + "epoch": 2.7111111111111112, + "grad_norm": 0.031179639964128432, + "learning_rate": 5.051299821751254e-07, + "loss": 0.0005, + "step": 1220 + }, + { + "epoch": 2.7133333333333334, + "grad_norm": 0.004301416270371997, + "learning_rate": 4.976259993898503e-07, + "loss": 0.0001, + "step": 1221 + }, + { + "epoch": 2.7155555555555555, + "grad_norm": 0.006554965453751134, + "learning_rate": 4.901767499965637e-07, + "loss": 0.0002, + "step": 1222 + }, + { + "epoch": 2.7177777777777776, + "grad_norm": 0.12349433205595654, + "learning_rate": 4.827822769027235e-07, + "loss": 0.0017, + "step": 1223 + }, + { + "epoch": 2.7199999999999998, + "grad_norm": 0.04081422447202619, + "learning_rate": 4.7544262270027396e-07, + "loss": 0.0005, + "step": 1224 + }, + { + "epoch": 2.7222222222222223, + "grad_norm": 0.9817055743442086, + "learning_rate": 4.6815782966540546e-07, + "loss": 0.0106, + "step": 1225 + }, + { + "epoch": 2.7244444444444444, + "grad_norm": 0.01379721685401414, + "learning_rate": 4.6092793975831e-07, + "loss": 0.0003, + "step": 1226 + }, + { + "epoch": 2.7266666666666666, + "grad_norm": 0.007589759166324705, + "learning_rate": 4.537529946229369e-07, + "loss": 0.0002, + "step": 1227 + }, + { + "epoch": 2.728888888888889, + "grad_norm": 5.469198341698043, + "learning_rate": 4.4663303558675764e-07, + "loss": 0.0868, + "step": 1228 + }, + { + "epoch": 2.7311111111111113, + "grad_norm": 0.00509266112370004, + "learning_rate": 4.3956810366052705e-07, + "loss": 0.0001, + "step": 1229 + }, + { + "epoch": 2.7333333333333334, + "grad_norm": 0.003806452397286481, + "learning_rate": 4.325582395380412e-07, + "loss": 0.0001, + "step": 1230 + }, + { + "epoch": 2.7355555555555555, + "grad_norm": 0.0047501656845345035, + "learning_rate": 4.2560348359590995e-07, + "loss": 0.0001, + "step": 1231 + }, + { + "epoch": 2.7377777777777776, + "grad_norm": 0.1218184385138035, + "learning_rate": 4.187038758933204e-07, + "loss": 0.0022, + "step": 1232 + }, + { + "epoch": 2.74, + "grad_norm": 0.005029103908626548, + "learning_rate": 4.118594561718081e-07, + "loss": 0.0001, + "step": 1233 + }, + { + "epoch": 2.7422222222222223, + "grad_norm": 0.004284850377844312, + "learning_rate": 4.0507026385502747e-07, + "loss": 0.0001, + "step": 1234 + }, + { + "epoch": 2.7444444444444445, + "grad_norm": 0.35678839182955296, + "learning_rate": 3.9833633804852277e-07, + "loss": 0.0052, + "step": 1235 + }, + { + "epoch": 2.7466666666666666, + "grad_norm": 0.011549488918900322, + "learning_rate": 3.916577175395098e-07, + "loss": 0.0003, + "step": 1236 + }, + { + "epoch": 2.7488888888888887, + "grad_norm": 0.05071331853661549, + "learning_rate": 3.8503444079664334e-07, + "loss": 0.0007, + "step": 1237 + }, + { + "epoch": 2.7511111111111113, + "grad_norm": 0.008597480670433017, + "learning_rate": 3.784665459697989e-07, + "loss": 0.0002, + "step": 1238 + }, + { + "epoch": 2.7533333333333334, + "grad_norm": 0.02311507683943809, + "learning_rate": 3.7195407088985834e-07, + "loss": 0.0004, + "step": 1239 + }, + { + "epoch": 2.7555555555555555, + "grad_norm": 0.3878882593654115, + "learning_rate": 3.6549705306848313e-07, + "loss": 0.0048, + "step": 1240 + }, + { + "epoch": 2.7577777777777777, + "grad_norm": 0.03162216140531413, + "learning_rate": 3.5909552969790376e-07, + "loss": 0.0007, + "step": 1241 + }, + { + "epoch": 2.76, + "grad_norm": 0.01012754943398893, + "learning_rate": 3.5274953765070505e-07, + "loss": 0.0002, + "step": 1242 + }, + { + "epoch": 2.7622222222222224, + "grad_norm": 0.015681353741538905, + "learning_rate": 3.4645911347961357e-07, + "loss": 0.0003, + "step": 1243 + }, + { + "epoch": 2.7644444444444445, + "grad_norm": 0.006565266290510121, + "learning_rate": 3.4022429341728503e-07, + "loss": 0.0002, + "step": 1244 + }, + { + "epoch": 2.7666666666666666, + "grad_norm": 0.03201213742072042, + "learning_rate": 3.340451133760958e-07, + "loss": 0.0006, + "step": 1245 + }, + { + "epoch": 2.7688888888888887, + "grad_norm": 0.00536329376757758, + "learning_rate": 3.279216089479431e-07, + "loss": 0.0002, + "step": 1246 + }, + { + "epoch": 2.771111111111111, + "grad_norm": 0.009525072869966967, + "learning_rate": 3.218538154040285e-07, + "loss": 0.0003, + "step": 1247 + }, + { + "epoch": 2.7733333333333334, + "grad_norm": 0.00721917435905328, + "learning_rate": 3.158417676946635e-07, + "loss": 0.0002, + "step": 1248 + }, + { + "epoch": 2.7755555555555556, + "grad_norm": 0.010026802570801489, + "learning_rate": 3.0988550044906305e-07, + "loss": 0.0002, + "step": 1249 + }, + { + "epoch": 2.7777777777777777, + "grad_norm": 0.0446275392456314, + "learning_rate": 3.039850479751505e-07, + "loss": 0.0008, + "step": 1250 + }, + { + "epoch": 2.7800000000000002, + "grad_norm": 0.005183736033521112, + "learning_rate": 2.9814044425935605e-07, + "loss": 0.0002, + "step": 1251 + }, + { + "epoch": 2.7822222222222224, + "grad_norm": 0.310152362383429, + "learning_rate": 2.923517229664241e-07, + "loss": 0.0038, + "step": 1252 + }, + { + "epoch": 2.7844444444444445, + "grad_norm": 0.018561588932111687, + "learning_rate": 2.8661891743921644e-07, + "loss": 0.0004, + "step": 1253 + }, + { + "epoch": 2.7866666666666666, + "grad_norm": 0.015550194732309095, + "learning_rate": 2.809420606985236e-07, + "loss": 0.0004, + "step": 1254 + }, + { + "epoch": 2.7888888888888888, + "grad_norm": 0.006990339103153065, + "learning_rate": 2.753211854428728e-07, + "loss": 0.0002, + "step": 1255 + }, + { + "epoch": 2.7911111111111113, + "grad_norm": 0.025160062259127834, + "learning_rate": 2.6975632404833584e-07, + "loss": 0.0005, + "step": 1256 + }, + { + "epoch": 2.7933333333333334, + "grad_norm": 0.043027891231747314, + "learning_rate": 2.6424750856835155e-07, + "loss": 0.0005, + "step": 1257 + }, + { + "epoch": 2.7955555555555556, + "grad_norm": 0.12325242493690418, + "learning_rate": 2.5879477073353254e-07, + "loss": 0.0015, + "step": 1258 + }, + { + "epoch": 2.7977777777777777, + "grad_norm": 0.044844037879654554, + "learning_rate": 2.5339814195148636e-07, + "loss": 0.0008, + "step": 1259 + }, + { + "epoch": 2.8, + "grad_norm": 0.059257782123314194, + "learning_rate": 2.480576533066348e-07, + "loss": 0.0008, + "step": 1260 + }, + { + "epoch": 2.8022222222222224, + "grad_norm": 0.024962914871947572, + "learning_rate": 2.427733355600337e-07, + "loss": 0.0003, + "step": 1261 + }, + { + "epoch": 2.8044444444444445, + "grad_norm": 0.007362782245617757, + "learning_rate": 2.375452191491967e-07, + "loss": 0.0002, + "step": 1262 + }, + { + "epoch": 2.8066666666666666, + "grad_norm": 0.007567040774454748, + "learning_rate": 2.3237333418791863e-07, + "loss": 0.0002, + "step": 1263 + }, + { + "epoch": 2.8088888888888888, + "grad_norm": 0.019111459661082648, + "learning_rate": 2.2725771046610335e-07, + "loss": 0.0004, + "step": 1264 + }, + { + "epoch": 2.811111111111111, + "grad_norm": 1.4074407601021746, + "learning_rate": 2.2219837744959284e-07, + "loss": 0.0269, + "step": 1265 + }, + { + "epoch": 2.8133333333333335, + "grad_norm": 0.017221687878442086, + "learning_rate": 2.1719536427999289e-07, + "loss": 0.0003, + "step": 1266 + }, + { + "epoch": 2.8155555555555556, + "grad_norm": 0.0808181348687061, + "learning_rate": 2.1224869977451102e-07, + "loss": 0.0012, + "step": 1267 + }, + { + "epoch": 2.8177777777777777, + "grad_norm": 0.012404515981648245, + "learning_rate": 2.0735841242578992e-07, + "loss": 0.0003, + "step": 1268 + }, + { + "epoch": 2.82, + "grad_norm": 1.2712888020390898, + "learning_rate": 2.0252453040173646e-07, + "loss": 0.0171, + "step": 1269 + }, + { + "epoch": 2.822222222222222, + "grad_norm": 0.005552929244558772, + "learning_rate": 1.9774708154536971e-07, + "loss": 0.0001, + "step": 1270 + }, + { + "epoch": 2.8244444444444445, + "grad_norm": 0.0032132962061250715, + "learning_rate": 1.9302609337465195e-07, + "loss": 0.0001, + "step": 1271 + }, + { + "epoch": 2.8266666666666667, + "grad_norm": 2.702054784330389, + "learning_rate": 1.8836159308233571e-07, + "loss": 0.1092, + "step": 1272 + }, + { + "epoch": 2.828888888888889, + "grad_norm": 0.10404110555219867, + "learning_rate": 1.8375360753580485e-07, + "loss": 0.0019, + "step": 1273 + }, + { + "epoch": 2.8311111111111114, + "grad_norm": 0.010378726661429279, + "learning_rate": 1.7920216327691696e-07, + "loss": 0.0003, + "step": 1274 + }, + { + "epoch": 2.8333333333333335, + "grad_norm": 0.04932173728306056, + "learning_rate": 1.7470728652185688e-07, + "loss": 0.0005, + "step": 1275 + }, + { + "epoch": 2.8355555555555556, + "grad_norm": 0.11817313303484879, + "learning_rate": 1.7026900316098217e-07, + "loss": 0.0021, + "step": 1276 + }, + { + "epoch": 2.8377777777777777, + "grad_norm": 0.00712917525218908, + "learning_rate": 1.6588733875867237e-07, + "loss": 0.0002, + "step": 1277 + }, + { + "epoch": 2.84, + "grad_norm": 1.7169851274423176, + "learning_rate": 1.615623185531845e-07, + "loss": 0.1554, + "step": 1278 + }, + { + "epoch": 2.8422222222222224, + "grad_norm": 0.007226361017833988, + "learning_rate": 1.572939674565055e-07, + "loss": 0.0002, + "step": 1279 + }, + { + "epoch": 2.8444444444444446, + "grad_norm": 0.011419083745535635, + "learning_rate": 1.5308231005421115e-07, + "loss": 0.0003, + "step": 1280 + }, + { + "epoch": 2.8466666666666667, + "grad_norm": 0.03384637402914988, + "learning_rate": 1.4892737060532404e-07, + "loss": 0.0004, + "step": 1281 + }, + { + "epoch": 2.848888888888889, + "grad_norm": 0.01035924503651678, + "learning_rate": 1.4482917304217136e-07, + "loss": 0.0002, + "step": 1282 + }, + { + "epoch": 2.851111111111111, + "grad_norm": 0.1962846261569105, + "learning_rate": 1.407877409702496e-07, + "loss": 0.003, + "step": 1283 + }, + { + "epoch": 2.8533333333333335, + "grad_norm": 0.0075898320252030405, + "learning_rate": 1.3680309766808675e-07, + "loss": 0.0002, + "step": 1284 + }, + { + "epoch": 2.8555555555555556, + "grad_norm": 0.005951507350888723, + "learning_rate": 1.3287526608711132e-07, + "loss": 0.0002, + "step": 1285 + }, + { + "epoch": 2.8577777777777778, + "grad_norm": 0.01951294017508138, + "learning_rate": 1.2900426885151473e-07, + "loss": 0.0003, + "step": 1286 + }, + { + "epoch": 2.86, + "grad_norm": 0.1832568874905622, + "learning_rate": 1.2519012825812804e-07, + "loss": 0.0025, + "step": 1287 + }, + { + "epoch": 2.862222222222222, + "grad_norm": 0.689246799017246, + "learning_rate": 1.2143286627628424e-07, + "loss": 0.0134, + "step": 1288 + }, + { + "epoch": 2.8644444444444446, + "grad_norm": 0.005177296618630876, + "learning_rate": 1.1773250454770512e-07, + "loss": 0.0001, + "step": 1289 + }, + { + "epoch": 2.8666666666666667, + "grad_norm": 0.003107918089040882, + "learning_rate": 1.1408906438636236e-07, + "loss": 0.0001, + "step": 1290 + }, + { + "epoch": 2.868888888888889, + "grad_norm": 0.07946242865140096, + "learning_rate": 1.1050256677836213e-07, + "loss": 0.0011, + "step": 1291 + }, + { + "epoch": 2.871111111111111, + "grad_norm": 0.37247407448603137, + "learning_rate": 1.0697303238182522e-07, + "loss": 0.0048, + "step": 1292 + }, + { + "epoch": 2.873333333333333, + "grad_norm": 0.038608440801768805, + "learning_rate": 1.0350048152676484e-07, + "loss": 0.0006, + "step": 1293 + }, + { + "epoch": 2.8755555555555556, + "grad_norm": 0.0028124564827239577, + "learning_rate": 1.0008493421497123e-07, + "loss": 0.0001, + "step": 1294 + }, + { + "epoch": 2.8777777777777778, + "grad_norm": 0.00664426228879201, + "learning_rate": 9.672641011989503e-08, + "loss": 0.0001, + "step": 1295 + }, + { + "epoch": 2.88, + "grad_norm": 3.8056118126556675, + "learning_rate": 9.342492858653519e-08, + "loss": 0.0421, + "step": 1296 + }, + { + "epoch": 2.8822222222222225, + "grad_norm": 0.0045748121289391555, + "learning_rate": 9.018050863132566e-08, + "loss": 0.0001, + "step": 1297 + }, + { + "epoch": 2.8844444444444446, + "grad_norm": 0.01398265736950471, + "learning_rate": 8.699316894203225e-08, + "loss": 0.0003, + "step": 1298 + }, + { + "epoch": 2.8866666666666667, + "grad_norm": 0.11450110076180904, + "learning_rate": 8.386292787763483e-08, + "loss": 0.001, + "step": 1299 + }, + { + "epoch": 2.888888888888889, + "grad_norm": 0.018447803569358514, + "learning_rate": 8.078980346822863e-08, + "loss": 0.0003, + "step": 1300 + }, + { + "epoch": 2.891111111111111, + "grad_norm": 0.0034948701819001284, + "learning_rate": 7.777381341492085e-08, + "loss": 0.0001, + "step": 1301 + }, + { + "epoch": 2.8933333333333335, + "grad_norm": 0.03502009921123794, + "learning_rate": 7.481497508972313e-08, + "loss": 0.0005, + "step": 1302 + }, + { + "epoch": 2.8955555555555557, + "grad_norm": 3.9958813368894157, + "learning_rate": 7.191330553545595e-08, + "loss": 0.1326, + "step": 1303 + }, + { + "epoch": 2.897777777777778, + "grad_norm": 0.01948834490644077, + "learning_rate": 6.906882146565097e-08, + "loss": 0.0004, + "step": 1304 + }, + { + "epoch": 2.9, + "grad_norm": 0.01078686952740169, + "learning_rate": 6.628153926445113e-08, + "loss": 0.0002, + "step": 1305 + }, + { + "epoch": 2.902222222222222, + "grad_norm": 0.16558280828413696, + "learning_rate": 6.355147498651959e-08, + "loss": 0.0028, + "step": 1306 + }, + { + "epoch": 2.9044444444444446, + "grad_norm": 0.03163260519864881, + "learning_rate": 6.087864435694535e-08, + "loss": 0.0006, + "step": 1307 + }, + { + "epoch": 2.9066666666666667, + "grad_norm": 0.6379648849259939, + "learning_rate": 5.8263062771153344e-08, + "loss": 0.009, + "step": 1308 + }, + { + "epoch": 2.908888888888889, + "grad_norm": 0.00849046711144789, + "learning_rate": 5.5704745294815624e-08, + "loss": 0.0002, + "step": 1309 + }, + { + "epoch": 2.911111111111111, + "grad_norm": 1.2370504701354363, + "learning_rate": 5.3203706663765845e-08, + "loss": 0.0167, + "step": 1310 + }, + { + "epoch": 2.913333333333333, + "grad_norm": 0.017198918435653646, + "learning_rate": 5.0759961283911584e-08, + "loss": 0.0003, + "step": 1311 + }, + { + "epoch": 2.9155555555555557, + "grad_norm": 0.0035803439069190244, + "learning_rate": 4.8373523231153297e-08, + "loss": 0.0001, + "step": 1312 + }, + { + "epoch": 2.917777777777778, + "grad_norm": 0.056190672353575416, + "learning_rate": 4.604440625130324e-08, + "loss": 0.0009, + "step": 1313 + }, + { + "epoch": 2.92, + "grad_norm": 0.5583924896198119, + "learning_rate": 4.377262376000557e-08, + "loss": 0.0102, + "step": 1314 + }, + { + "epoch": 2.9222222222222225, + "grad_norm": 1.5602277135164384, + "learning_rate": 4.155818884266194e-08, + "loss": 0.0266, + "step": 1315 + }, + { + "epoch": 2.924444444444444, + "grad_norm": 0.005070809577874555, + "learning_rate": 3.940111425435045e-08, + "loss": 0.0001, + "step": 1316 + }, + { + "epoch": 2.9266666666666667, + "grad_norm": 0.004298130851400702, + "learning_rate": 3.730141241975682e-08, + "loss": 0.0001, + "step": 1317 + }, + { + "epoch": 2.928888888888889, + "grad_norm": 0.0081825334849266, + "learning_rate": 3.525909543310002e-08, + "loss": 0.0002, + "step": 1318 + }, + { + "epoch": 2.931111111111111, + "grad_norm": 0.004978457446793133, + "learning_rate": 3.327417505806785e-08, + "loss": 0.0002, + "step": 1319 + }, + { + "epoch": 2.9333333333333336, + "grad_norm": 0.004442095878631214, + "learning_rate": 3.134666272774034e-08, + "loss": 0.0001, + "step": 1320 + }, + { + "epoch": 2.9355555555555557, + "grad_norm": 0.10935697132636515, + "learning_rate": 2.9476569544532042e-08, + "loss": 0.0021, + "step": 1321 + }, + { + "epoch": 2.937777777777778, + "grad_norm": 0.6917385814754962, + "learning_rate": 2.7663906280124276e-08, + "loss": 0.0113, + "step": 1322 + }, + { + "epoch": 2.94, + "grad_norm": 0.00614412031277472, + "learning_rate": 2.5908683375404088e-08, + "loss": 0.0002, + "step": 1323 + }, + { + "epoch": 2.942222222222222, + "grad_norm": 0.002767569729845455, + "learning_rate": 2.4210910940402066e-08, + "loss": 0.0001, + "step": 1324 + }, + { + "epoch": 2.9444444444444446, + "grad_norm": 0.006264338098344262, + "learning_rate": 2.257059875423795e-08, + "loss": 0.0002, + "step": 1325 + }, + { + "epoch": 2.9466666666666668, + "grad_norm": 0.13468660908170602, + "learning_rate": 2.0987756265060664e-08, + "loss": 0.0023, + "step": 1326 + }, + { + "epoch": 2.948888888888889, + "grad_norm": 0.03256323170905649, + "learning_rate": 1.946239258999616e-08, + "loss": 0.0007, + "step": 1327 + }, + { + "epoch": 2.951111111111111, + "grad_norm": 0.0596375571365393, + "learning_rate": 1.7994516515094097e-08, + "loss": 0.0008, + "step": 1328 + }, + { + "epoch": 2.953333333333333, + "grad_norm": 0.006974247597938422, + "learning_rate": 1.6584136495277904e-08, + "loss": 0.0002, + "step": 1329 + }, + { + "epoch": 2.9555555555555557, + "grad_norm": 0.21166344390909245, + "learning_rate": 1.523126065429259e-08, + "loss": 0.0032, + "step": 1330 + }, + { + "epoch": 2.957777777777778, + "grad_norm": 0.006487442449703825, + "learning_rate": 1.3935896784663671e-08, + "loss": 0.0002, + "step": 1331 + }, + { + "epoch": 2.96, + "grad_norm": 0.20431861043146837, + "learning_rate": 1.2698052347649426e-08, + "loss": 0.0034, + "step": 1332 + }, + { + "epoch": 2.962222222222222, + "grad_norm": 0.004965213877008438, + "learning_rate": 1.1517734473195375e-08, + "loss": 0.0001, + "step": 1333 + }, + { + "epoch": 2.964444444444444, + "grad_norm": 0.005969233748611253, + "learning_rate": 1.0394949959898759e-08, + "loss": 0.0002, + "step": 1334 + }, + { + "epoch": 2.966666666666667, + "grad_norm": 0.018107299377156945, + "learning_rate": 9.32970527496524e-09, + "loss": 0.0004, + "step": 1335 + }, + { + "epoch": 2.968888888888889, + "grad_norm": 0.06417713582158226, + "learning_rate": 8.322006554171147e-09, + "loss": 0.0008, + "step": 1336 + }, + { + "epoch": 2.971111111111111, + "grad_norm": 0.007247861237568498, + "learning_rate": 7.371859601832398e-09, + "loss": 0.0002, + "step": 1337 + }, + { + "epoch": 2.9733333333333336, + "grad_norm": 0.8552291468932627, + "learning_rate": 6.479269890766748e-09, + "loss": 0.0101, + "step": 1338 + }, + { + "epoch": 2.9755555555555553, + "grad_norm": 0.3122328271044666, + "learning_rate": 5.644242562264923e-09, + "loss": 0.0031, + "step": 1339 + }, + { + "epoch": 2.977777777777778, + "grad_norm": 0.024503320282025485, + "learning_rate": 4.866782426058425e-09, + "loss": 0.0005, + "step": 1340 + }, + { + "epoch": 2.98, + "grad_norm": 0.004575423886556389, + "learning_rate": 4.146893960295106e-09, + "loss": 0.0002, + "step": 1341 + }, + { + "epoch": 2.982222222222222, + "grad_norm": 0.01082982437935974, + "learning_rate": 3.4845813115114147e-09, + "loss": 0.0003, + "step": 1342 + }, + { + "epoch": 2.9844444444444447, + "grad_norm": 0.04894680669640281, + "learning_rate": 2.879848294609078e-09, + "loss": 0.0008, + "step": 1343 + }, + { + "epoch": 2.986666666666667, + "grad_norm": 0.4259216094163621, + "learning_rate": 2.332698392830679e-09, + "loss": 0.0072, + "step": 1344 + }, + { + "epoch": 2.988888888888889, + "grad_norm": 0.102181368002208, + "learning_rate": 1.843134757745224e-09, + "loss": 0.002, + "step": 1345 + }, + { + "epoch": 2.991111111111111, + "grad_norm": 0.00881634487689039, + "learning_rate": 1.4111602092226062e-09, + "loss": 0.0002, + "step": 1346 + }, + { + "epoch": 2.993333333333333, + "grad_norm": 0.1189951023202183, + "learning_rate": 1.0367772354258342e-09, + "loss": 0.0022, + "step": 1347 + }, + { + "epoch": 2.9955555555555557, + "grad_norm": 0.014686539330107563, + "learning_rate": 7.199879927877185e-10, + "loss": 0.0003, + "step": 1348 + }, + { + "epoch": 2.997777777777778, + "grad_norm": 0.08632351906277422, + "learning_rate": 4.6079430600531883e-10, + "loss": 0.0019, + "step": 1349 + }, + { + "epoch": 3.0, + "grad_norm": 0.040295182187885616, + "learning_rate": 2.5919766802773306e-10, + "loss": 0.0007, + "step": 1350 + }, { "epoch": 3.0, - "step": 552, - "total_flos": 4395674998272.0, - "train_loss": 0.49586228307948593, - "train_runtime": 3133.0691, - "train_samples_per_second": 2.813, - "train_steps_per_second": 0.176 + "step": 1350, + "total_flos": 12682783526400.0, + "train_loss": 0.19971357741427104, + "train_runtime": 58331.8733, + "train_samples_per_second": 0.37, + "train_steps_per_second": 0.023 } ], "logging_steps": 1.0, - "max_steps": 552, + "max_steps": 1350, "num_input_tokens_seen": 0, "num_train_epochs": 3, "save_steps": 50000, - "total_flos": 4395674998272.0, + "total_flos": 12682783526400.0, "train_batch_size": 8, "trial_name": null, "trial_params": null