|
{ |
|
"best_metric": 0.3966299593448639, |
|
"best_model_checkpoint": "mikhail-panzo/ceb_b128_le4_s8000/checkpoint-2000", |
|
"epoch": 594.059405940594, |
|
"eval_steps": 500, |
|
"global_step": 7500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 3.9603960396039604, |
|
"grad_norm": 2.95651912689209, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.7707, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 7.920792079207921, |
|
"grad_norm": 1.1273679733276367, |
|
"learning_rate": 5e-06, |
|
"loss": 0.6934, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 11.881188118811881, |
|
"grad_norm": 2.1830034255981445, |
|
"learning_rate": 7.5e-06, |
|
"loss": 0.5983, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 15.841584158415841, |
|
"grad_norm": 1.0733364820480347, |
|
"learning_rate": 1e-05, |
|
"loss": 0.5094, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 19.801980198019802, |
|
"grad_norm": 1.2824387550354004, |
|
"learning_rate": 1.25e-05, |
|
"loss": 0.4913, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 23.762376237623762, |
|
"grad_norm": 0.9164418578147888, |
|
"learning_rate": 1.5e-05, |
|
"loss": 0.4764, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 27.722772277227723, |
|
"grad_norm": 0.8938279747962952, |
|
"learning_rate": 1.75e-05, |
|
"loss": 0.4648, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 31.683168316831683, |
|
"grad_norm": 0.7855221033096313, |
|
"learning_rate": 2e-05, |
|
"loss": 0.459, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 35.64356435643565, |
|
"grad_norm": 0.9247761964797974, |
|
"learning_rate": 2.25e-05, |
|
"loss": 0.4519, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 39.603960396039604, |
|
"grad_norm": 1.1086606979370117, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.4432, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 39.603960396039604, |
|
"eval_loss": 0.4058004915714264, |
|
"eval_runtime": 7.6054, |
|
"eval_samples_per_second": 23.667, |
|
"eval_steps_per_second": 3.024, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 43.56435643564357, |
|
"grad_norm": 1.3994853496551514, |
|
"learning_rate": 2.7500000000000004e-05, |
|
"loss": 0.4438, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 47.524752475247524, |
|
"grad_norm": 1.1104933023452759, |
|
"learning_rate": 3e-05, |
|
"loss": 0.4367, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 51.48514851485149, |
|
"grad_norm": 0.7456745505332947, |
|
"learning_rate": 3.2500000000000004e-05, |
|
"loss": 0.4315, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 55.445544554455445, |
|
"grad_norm": 1.0349308252334595, |
|
"learning_rate": 3.5e-05, |
|
"loss": 0.4294, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 59.40594059405941, |
|
"grad_norm": 0.5290626287460327, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 0.4234, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 63.366336633663366, |
|
"grad_norm": 1.054995059967041, |
|
"learning_rate": 4e-05, |
|
"loss": 0.4231, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 67.32673267326733, |
|
"grad_norm": 1.1774096488952637, |
|
"learning_rate": 4.25e-05, |
|
"loss": 0.4198, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 71.2871287128713, |
|
"grad_norm": 0.7513113617897034, |
|
"learning_rate": 4.5e-05, |
|
"loss": 0.4182, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 75.24752475247524, |
|
"grad_norm": 0.8465940952301025, |
|
"learning_rate": 4.75e-05, |
|
"loss": 0.4179, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 79.20792079207921, |
|
"grad_norm": 1.601589322090149, |
|
"learning_rate": 5e-05, |
|
"loss": 0.4129, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 79.20792079207921, |
|
"eval_loss": 0.3995668292045593, |
|
"eval_runtime": 8.0229, |
|
"eval_samples_per_second": 22.436, |
|
"eval_steps_per_second": 2.867, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 83.16831683168317, |
|
"grad_norm": 2.049264669418335, |
|
"learning_rate": 5.25e-05, |
|
"loss": 0.4129, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 87.12871287128714, |
|
"grad_norm": 1.0590511560440063, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 0.4118, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 91.08910891089108, |
|
"grad_norm": 1.015751600265503, |
|
"learning_rate": 5.7499999999999995e-05, |
|
"loss": 0.4074, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 95.04950495049505, |
|
"grad_norm": 1.295837640762329, |
|
"learning_rate": 6e-05, |
|
"loss": 0.4056, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 99.00990099009901, |
|
"grad_norm": 1.49947988986969, |
|
"learning_rate": 6.25e-05, |
|
"loss": 0.4052, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 102.97029702970298, |
|
"grad_norm": 1.3930383920669556, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 0.4072, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 106.93069306930693, |
|
"grad_norm": 1.1130273342132568, |
|
"learning_rate": 6.750000000000001e-05, |
|
"loss": 0.4015, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 110.89108910891089, |
|
"grad_norm": 2.1075382232666016, |
|
"learning_rate": 7e-05, |
|
"loss": 0.3994, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 114.85148514851485, |
|
"grad_norm": 1.8470244407653809, |
|
"learning_rate": 7.25e-05, |
|
"loss": 0.397, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 118.81188118811882, |
|
"grad_norm": 2.0715270042419434, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 0.3992, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 118.81188118811882, |
|
"eval_loss": 0.39855822920799255, |
|
"eval_runtime": 7.8631, |
|
"eval_samples_per_second": 22.892, |
|
"eval_steps_per_second": 2.925, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 122.77227722772277, |
|
"grad_norm": 0.9546501636505127, |
|
"learning_rate": 7.75e-05, |
|
"loss": 0.3934, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 126.73267326732673, |
|
"grad_norm": 1.1335992813110352, |
|
"learning_rate": 8e-05, |
|
"loss": 0.3927, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 130.69306930693068, |
|
"grad_norm": 1.905771017074585, |
|
"learning_rate": 8.25e-05, |
|
"loss": 0.3895, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 134.65346534653466, |
|
"grad_norm": 1.129404067993164, |
|
"learning_rate": 8.5e-05, |
|
"loss": 0.3897, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 138.6138613861386, |
|
"grad_norm": 0.9640969038009644, |
|
"learning_rate": 8.75e-05, |
|
"loss": 0.3894, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 142.5742574257426, |
|
"grad_norm": 0.9840293526649475, |
|
"learning_rate": 9e-05, |
|
"loss": 0.3862, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 146.53465346534654, |
|
"grad_norm": 2.770336627960205, |
|
"learning_rate": 9.250000000000001e-05, |
|
"loss": 0.3861, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 150.4950495049505, |
|
"grad_norm": 1.7189514636993408, |
|
"learning_rate": 9.5e-05, |
|
"loss": 0.3914, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 154.45544554455446, |
|
"grad_norm": 2.454707622528076, |
|
"learning_rate": 9.75e-05, |
|
"loss": 0.3848, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 158.41584158415841, |
|
"grad_norm": 2.1555542945861816, |
|
"learning_rate": 0.0001, |
|
"loss": 0.3814, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 158.41584158415841, |
|
"eval_loss": 0.3966299593448639, |
|
"eval_runtime": 7.7159, |
|
"eval_samples_per_second": 23.329, |
|
"eval_steps_per_second": 2.981, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 162.37623762376236, |
|
"grad_norm": 1.1228975057601929, |
|
"learning_rate": 9.916666666666667e-05, |
|
"loss": 0.3817, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 166.33663366336634, |
|
"grad_norm": 1.629294991493225, |
|
"learning_rate": 9.833333333333333e-05, |
|
"loss": 0.3813, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 170.2970297029703, |
|
"grad_norm": 0.9008976221084595, |
|
"learning_rate": 9.75e-05, |
|
"loss": 0.377, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 174.25742574257427, |
|
"grad_norm": 2.174643039703369, |
|
"learning_rate": 9.666666666666667e-05, |
|
"loss": 0.3737, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 178.21782178217822, |
|
"grad_norm": 1.617219090461731, |
|
"learning_rate": 9.583333333333334e-05, |
|
"loss": 0.3736, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 182.17821782178217, |
|
"grad_norm": 1.292871117591858, |
|
"learning_rate": 9.5e-05, |
|
"loss": 0.3786, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 186.13861386138615, |
|
"grad_norm": 1.5530415773391724, |
|
"learning_rate": 9.416666666666667e-05, |
|
"loss": 0.3726, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 190.0990099009901, |
|
"grad_norm": 0.9837754964828491, |
|
"learning_rate": 9.333333333333334e-05, |
|
"loss": 0.3713, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 194.05940594059405, |
|
"grad_norm": 0.8343392610549927, |
|
"learning_rate": 9.250000000000001e-05, |
|
"loss": 0.3727, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 198.01980198019803, |
|
"grad_norm": 2.9330906867980957, |
|
"learning_rate": 9.166666666666667e-05, |
|
"loss": 0.3688, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 198.01980198019803, |
|
"eval_loss": 0.40214845538139343, |
|
"eval_runtime": 7.9273, |
|
"eval_samples_per_second": 22.706, |
|
"eval_steps_per_second": 2.901, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 201.98019801980197, |
|
"grad_norm": 2.427091598510742, |
|
"learning_rate": 9.083333333333334e-05, |
|
"loss": 0.3699, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 205.94059405940595, |
|
"grad_norm": 0.680686891078949, |
|
"learning_rate": 9e-05, |
|
"loss": 0.3659, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 209.9009900990099, |
|
"grad_norm": 2.0860631465911865, |
|
"learning_rate": 8.916666666666667e-05, |
|
"loss": 0.3631, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 213.86138613861385, |
|
"grad_norm": 0.7303352355957031, |
|
"learning_rate": 8.833333333333333e-05, |
|
"loss": 0.3656, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 217.82178217821783, |
|
"grad_norm": 1.2716140747070312, |
|
"learning_rate": 8.75e-05, |
|
"loss": 0.3627, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 221.78217821782178, |
|
"grad_norm": 1.5935007333755493, |
|
"learning_rate": 8.666666666666667e-05, |
|
"loss": 0.3607, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 225.74257425742573, |
|
"grad_norm": 1.156733512878418, |
|
"learning_rate": 8.583333333333334e-05, |
|
"loss": 0.3594, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 229.7029702970297, |
|
"grad_norm": 1.1708168983459473, |
|
"learning_rate": 8.5e-05, |
|
"loss": 0.3596, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 233.66336633663366, |
|
"grad_norm": 1.4882978200912476, |
|
"learning_rate": 8.416666666666668e-05, |
|
"loss": 0.3613, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 237.62376237623764, |
|
"grad_norm": 1.0416059494018555, |
|
"learning_rate": 8.333333333333334e-05, |
|
"loss": 0.3574, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 237.62376237623764, |
|
"eval_loss": 0.39999938011169434, |
|
"eval_runtime": 7.4331, |
|
"eval_samples_per_second": 24.216, |
|
"eval_steps_per_second": 3.094, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 241.58415841584159, |
|
"grad_norm": 0.9737104773521423, |
|
"learning_rate": 8.25e-05, |
|
"loss": 0.3555, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 245.54455445544554, |
|
"grad_norm": 2.4858767986297607, |
|
"learning_rate": 8.166666666666667e-05, |
|
"loss": 0.3585, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 249.5049504950495, |
|
"grad_norm": 0.7558372616767883, |
|
"learning_rate": 8.083333333333334e-05, |
|
"loss": 0.3562, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 253.46534653465346, |
|
"grad_norm": 0.5738223195075989, |
|
"learning_rate": 8e-05, |
|
"loss": 0.3515, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 257.4257425742574, |
|
"grad_norm": 0.8245187401771545, |
|
"learning_rate": 7.916666666666666e-05, |
|
"loss": 0.3557, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 261.38613861386136, |
|
"grad_norm": 0.8837783336639404, |
|
"learning_rate": 7.833333333333333e-05, |
|
"loss": 0.3556, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 265.34653465346537, |
|
"grad_norm": 1.0626522302627563, |
|
"learning_rate": 7.75e-05, |
|
"loss": 0.3531, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 269.3069306930693, |
|
"grad_norm": 0.9158534407615662, |
|
"learning_rate": 7.666666666666667e-05, |
|
"loss": 0.3504, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 273.26732673267327, |
|
"grad_norm": 1.1604119539260864, |
|
"learning_rate": 7.583333333333334e-05, |
|
"loss": 0.3502, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 277.2277227722772, |
|
"grad_norm": 0.5547149181365967, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 0.3482, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 277.2277227722772, |
|
"eval_loss": 0.39900416135787964, |
|
"eval_runtime": 7.7665, |
|
"eval_samples_per_second": 23.177, |
|
"eval_steps_per_second": 2.961, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 281.18811881188117, |
|
"grad_norm": 1.213918924331665, |
|
"learning_rate": 7.416666666666668e-05, |
|
"loss": 0.3465, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 285.1485148514852, |
|
"grad_norm": 1.0824240446090698, |
|
"learning_rate": 7.333333333333333e-05, |
|
"loss": 0.3467, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 289.1089108910891, |
|
"grad_norm": 0.8079837560653687, |
|
"learning_rate": 7.25e-05, |
|
"loss": 0.3457, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 293.0693069306931, |
|
"grad_norm": 1.1617168188095093, |
|
"learning_rate": 7.166666666666667e-05, |
|
"loss": 0.3457, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 297.029702970297, |
|
"grad_norm": 1.0282728672027588, |
|
"learning_rate": 7.083333333333334e-05, |
|
"loss": 0.3491, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 300.990099009901, |
|
"grad_norm": 0.8772862553596497, |
|
"learning_rate": 7e-05, |
|
"loss": 0.3494, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 304.9504950495049, |
|
"grad_norm": 1.0405845642089844, |
|
"learning_rate": 6.916666666666666e-05, |
|
"loss": 0.3469, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 308.91089108910893, |
|
"grad_norm": 0.7217592000961304, |
|
"learning_rate": 6.833333333333333e-05, |
|
"loss": 0.3449, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 312.8712871287129, |
|
"grad_norm": 0.7694669365882874, |
|
"learning_rate": 6.750000000000001e-05, |
|
"loss": 0.341, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 316.83168316831683, |
|
"grad_norm": 0.6203835010528564, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 0.3421, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 316.83168316831683, |
|
"eval_loss": 0.4013901650905609, |
|
"eval_runtime": 8.6905, |
|
"eval_samples_per_second": 20.712, |
|
"eval_steps_per_second": 2.647, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 320.7920792079208, |
|
"grad_norm": 0.7539447546005249, |
|
"learning_rate": 6.583333333333334e-05, |
|
"loss": 0.3445, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 324.7524752475247, |
|
"grad_norm": 0.9108504056930542, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 0.341, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 328.71287128712873, |
|
"grad_norm": 0.5291554927825928, |
|
"learning_rate": 6.416666666666668e-05, |
|
"loss": 0.3412, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 332.6732673267327, |
|
"grad_norm": 1.2332825660705566, |
|
"learning_rate": 6.333333333333333e-05, |
|
"loss": 0.3408, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 336.63366336633663, |
|
"grad_norm": 0.5890567302703857, |
|
"learning_rate": 6.25e-05, |
|
"loss": 0.339, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 340.5940594059406, |
|
"grad_norm": 1.05361807346344, |
|
"learning_rate": 6.168333333333333e-05, |
|
"loss": 0.3388, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 344.55445544554453, |
|
"grad_norm": 1.6529746055603027, |
|
"learning_rate": 6.085000000000001e-05, |
|
"loss": 0.3423, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 348.51485148514854, |
|
"grad_norm": 0.6428557634353638, |
|
"learning_rate": 6.0016666666666664e-05, |
|
"loss": 0.3406, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 352.4752475247525, |
|
"grad_norm": 0.832099437713623, |
|
"learning_rate": 5.918333333333333e-05, |
|
"loss": 0.3399, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 356.43564356435644, |
|
"grad_norm": 1.4958369731903076, |
|
"learning_rate": 5.835e-05, |
|
"loss": 0.3414, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 356.43564356435644, |
|
"eval_loss": 0.4059315621852875, |
|
"eval_runtime": 7.3212, |
|
"eval_samples_per_second": 24.586, |
|
"eval_steps_per_second": 3.142, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 360.3960396039604, |
|
"grad_norm": 0.7380837798118591, |
|
"learning_rate": 5.751666666666667e-05, |
|
"loss": 0.3363, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 364.35643564356434, |
|
"grad_norm": 1.1721248626708984, |
|
"learning_rate": 5.668333333333333e-05, |
|
"loss": 0.3385, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 368.3168316831683, |
|
"grad_norm": 0.6880260109901428, |
|
"learning_rate": 5.585e-05, |
|
"loss": 0.3355, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 372.2772277227723, |
|
"grad_norm": 0.614639401435852, |
|
"learning_rate": 5.501666666666667e-05, |
|
"loss": 0.3378, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 376.23762376237624, |
|
"grad_norm": 0.6579806208610535, |
|
"learning_rate": 5.4183333333333334e-05, |
|
"loss": 0.3342, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 380.1980198019802, |
|
"grad_norm": 0.7675396203994751, |
|
"learning_rate": 5.335e-05, |
|
"loss": 0.3333, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 384.15841584158414, |
|
"grad_norm": 0.6529810428619385, |
|
"learning_rate": 5.251666666666667e-05, |
|
"loss": 0.3348, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 388.1188118811881, |
|
"grad_norm": 0.6606137752532959, |
|
"learning_rate": 5.168333333333334e-05, |
|
"loss": 0.3351, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 392.0792079207921, |
|
"grad_norm": 0.5794258713722229, |
|
"learning_rate": 5.0849999999999996e-05, |
|
"loss": 0.3319, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 396.03960396039605, |
|
"grad_norm": 0.8365175127983093, |
|
"learning_rate": 5.0016666666666665e-05, |
|
"loss": 0.3328, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 396.03960396039605, |
|
"eval_loss": 0.40650251507759094, |
|
"eval_runtime": 7.4758, |
|
"eval_samples_per_second": 24.078, |
|
"eval_steps_per_second": 3.077, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 400.0, |
|
"grad_norm": 0.7176100611686707, |
|
"learning_rate": 4.9183333333333334e-05, |
|
"loss": 0.3346, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 403.96039603960395, |
|
"grad_norm": 0.9245086312294006, |
|
"learning_rate": 4.835e-05, |
|
"loss": 0.3319, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 407.9207920792079, |
|
"grad_norm": 0.7508360743522644, |
|
"learning_rate": 4.751666666666667e-05, |
|
"loss": 0.3344, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 411.8811881188119, |
|
"grad_norm": 0.8295847177505493, |
|
"learning_rate": 4.6683333333333334e-05, |
|
"loss": 0.3311, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 415.84158415841586, |
|
"grad_norm": 0.9669463634490967, |
|
"learning_rate": 4.585e-05, |
|
"loss": 0.3307, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 419.8019801980198, |
|
"grad_norm": 0.6602340340614319, |
|
"learning_rate": 4.5016666666666665e-05, |
|
"loss": 0.3302, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 423.76237623762376, |
|
"grad_norm": 0.7802284955978394, |
|
"learning_rate": 4.4183333333333334e-05, |
|
"loss": 0.332, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 427.7227722772277, |
|
"grad_norm": 0.6156259179115295, |
|
"learning_rate": 4.335e-05, |
|
"loss": 0.3305, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 431.6831683168317, |
|
"grad_norm": 0.636411726474762, |
|
"learning_rate": 4.251666666666667e-05, |
|
"loss": 0.3309, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 435.64356435643566, |
|
"grad_norm": 0.7911040186882019, |
|
"learning_rate": 4.1683333333333335e-05, |
|
"loss": 0.3277, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 435.64356435643566, |
|
"eval_loss": 0.40623539686203003, |
|
"eval_runtime": 7.0665, |
|
"eval_samples_per_second": 25.472, |
|
"eval_steps_per_second": 3.255, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 439.6039603960396, |
|
"grad_norm": 0.4689953625202179, |
|
"learning_rate": 4.085e-05, |
|
"loss": 0.3281, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 443.56435643564356, |
|
"grad_norm": 0.57826828956604, |
|
"learning_rate": 4.0016666666666666e-05, |
|
"loss": 0.3316, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 447.5247524752475, |
|
"grad_norm": 0.5948452949523926, |
|
"learning_rate": 3.9183333333333335e-05, |
|
"loss": 0.3287, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 451.48514851485146, |
|
"grad_norm": 0.6122576594352722, |
|
"learning_rate": 3.8350000000000004e-05, |
|
"loss": 0.3267, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 455.44554455445547, |
|
"grad_norm": 0.48124071955680847, |
|
"learning_rate": 3.7516666666666666e-05, |
|
"loss": 0.3279, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 459.4059405940594, |
|
"grad_norm": 0.5144623517990112, |
|
"learning_rate": 3.6683333333333335e-05, |
|
"loss": 0.3257, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 463.36633663366337, |
|
"grad_norm": 1.026330828666687, |
|
"learning_rate": 3.585e-05, |
|
"loss": 0.3272, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 467.3267326732673, |
|
"grad_norm": 0.6319882869720459, |
|
"learning_rate": 3.501666666666667e-05, |
|
"loss": 0.3247, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 471.28712871287127, |
|
"grad_norm": 0.7657246589660645, |
|
"learning_rate": 3.4183333333333335e-05, |
|
"loss": 0.3257, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 475.2475247524753, |
|
"grad_norm": 0.67906254529953, |
|
"learning_rate": 3.3350000000000004e-05, |
|
"loss": 0.3248, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 475.2475247524753, |
|
"eval_loss": 0.40691491961479187, |
|
"eval_runtime": 8.6221, |
|
"eval_samples_per_second": 20.876, |
|
"eval_steps_per_second": 2.668, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 479.2079207920792, |
|
"grad_norm": 0.5330384969711304, |
|
"learning_rate": 3.2516666666666666e-05, |
|
"loss": 0.3289, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 483.16831683168317, |
|
"grad_norm": 0.7305939793586731, |
|
"learning_rate": 3.1683333333333335e-05, |
|
"loss": 0.3267, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 487.1287128712871, |
|
"grad_norm": 1.1130585670471191, |
|
"learning_rate": 3.0850000000000004e-05, |
|
"loss": 0.3247, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 491.08910891089107, |
|
"grad_norm": 0.5422760844230652, |
|
"learning_rate": 3.001666666666667e-05, |
|
"loss": 0.3252, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 495.0495049504951, |
|
"grad_norm": 0.5144603252410889, |
|
"learning_rate": 2.9183333333333336e-05, |
|
"loss": 0.3276, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 499.009900990099, |
|
"grad_norm": 0.5953711867332458, |
|
"learning_rate": 2.8349999999999998e-05, |
|
"loss": 0.3259, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 502.970297029703, |
|
"grad_norm": 0.48666295409202576, |
|
"learning_rate": 2.7516666666666667e-05, |
|
"loss": 0.3233, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 506.9306930693069, |
|
"grad_norm": 0.44972455501556396, |
|
"learning_rate": 2.6683333333333333e-05, |
|
"loss": 0.3223, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 510.8910891089109, |
|
"grad_norm": 0.5804650783538818, |
|
"learning_rate": 2.585e-05, |
|
"loss": 0.3246, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 514.8514851485148, |
|
"grad_norm": 0.41495367884635925, |
|
"learning_rate": 2.5016666666666667e-05, |
|
"loss": 0.3245, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 514.8514851485148, |
|
"eval_loss": 0.4071478545665741, |
|
"eval_runtime": 8.0659, |
|
"eval_samples_per_second": 22.316, |
|
"eval_steps_per_second": 2.852, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 518.8118811881188, |
|
"grad_norm": 0.3836776912212372, |
|
"learning_rate": 2.4183333333333336e-05, |
|
"loss": 0.3236, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 522.7722772277227, |
|
"grad_norm": 0.45108506083488464, |
|
"learning_rate": 2.3350000000000002e-05, |
|
"loss": 0.3225, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 526.7326732673267, |
|
"grad_norm": 0.36493930220603943, |
|
"learning_rate": 2.2516666666666667e-05, |
|
"loss": 0.324, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 530.6930693069307, |
|
"grad_norm": 0.4647804796695709, |
|
"learning_rate": 2.1683333333333333e-05, |
|
"loss": 0.3236, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 534.6534653465346, |
|
"grad_norm": 0.3885102868080139, |
|
"learning_rate": 2.085e-05, |
|
"loss": 0.3207, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 538.6138613861386, |
|
"grad_norm": 0.6492519378662109, |
|
"learning_rate": 2.0016666666666668e-05, |
|
"loss": 0.3226, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 542.5742574257425, |
|
"grad_norm": 0.3782261610031128, |
|
"learning_rate": 1.9183333333333333e-05, |
|
"loss": 0.3211, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 546.5346534653465, |
|
"grad_norm": 0.5947457551956177, |
|
"learning_rate": 1.8350000000000002e-05, |
|
"loss": 0.3228, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 550.4950495049505, |
|
"grad_norm": 0.3643256425857544, |
|
"learning_rate": 1.7516666666666668e-05, |
|
"loss": 0.3206, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 554.4554455445544, |
|
"grad_norm": 0.4753130078315735, |
|
"learning_rate": 1.6683333333333333e-05, |
|
"loss": 0.3219, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 554.4554455445544, |
|
"eval_loss": 0.4096488654613495, |
|
"eval_runtime": 9.31, |
|
"eval_samples_per_second": 19.334, |
|
"eval_steps_per_second": 2.47, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 558.4158415841584, |
|
"grad_norm": 0.43716803193092346, |
|
"learning_rate": 1.5850000000000002e-05, |
|
"loss": 0.3221, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 562.3762376237623, |
|
"grad_norm": 0.5367900729179382, |
|
"learning_rate": 1.5016666666666668e-05, |
|
"loss": 0.321, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 566.3366336633663, |
|
"grad_norm": 0.4188914895057678, |
|
"learning_rate": 1.4183333333333335e-05, |
|
"loss": 0.3202, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 570.2970297029703, |
|
"grad_norm": 0.4808737635612488, |
|
"learning_rate": 1.3350000000000001e-05, |
|
"loss": 0.3213, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 574.2574257425742, |
|
"grad_norm": 0.3463107943534851, |
|
"learning_rate": 1.2516666666666668e-05, |
|
"loss": 0.3214, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 578.2178217821782, |
|
"grad_norm": 0.4874543249607086, |
|
"learning_rate": 1.1683333333333334e-05, |
|
"loss": 0.3188, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 582.1782178217821, |
|
"grad_norm": 0.44611576199531555, |
|
"learning_rate": 1.0850000000000001e-05, |
|
"loss": 0.3207, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 586.1386138613861, |
|
"grad_norm": 0.3931247293949127, |
|
"learning_rate": 1.0016666666666667e-05, |
|
"loss": 0.3207, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 590.0990099009902, |
|
"grad_norm": 0.4531846046447754, |
|
"learning_rate": 9.183333333333334e-06, |
|
"loss": 0.3218, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 594.059405940594, |
|
"grad_norm": 0.43562614917755127, |
|
"learning_rate": 8.350000000000001e-06, |
|
"loss": 0.3227, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 594.059405940594, |
|
"eval_loss": 0.41123372316360474, |
|
"eval_runtime": 7.7124, |
|
"eval_samples_per_second": 23.339, |
|
"eval_steps_per_second": 2.982, |
|
"step": 7500 |
|
} |
|
], |
|
"logging_steps": 50, |
|
"max_steps": 8000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 667, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.620635203788116e+17, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|