|
{ |
|
"best_metric": 0.4122372269630432, |
|
"best_model_checkpoint": "mikhail-panzo/zlm-fil-ceb_b64_le5_s8000/checkpoint-5000", |
|
"epoch": 260.8695652173913, |
|
"eval_steps": 500, |
|
"global_step": 6000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 2.1739130434782608, |
|
"grad_norm": 0.7357823252677917, |
|
"learning_rate": 2.5000000000000004e-07, |
|
"loss": 0.4055, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 4.3478260869565215, |
|
"grad_norm": 0.7944665551185608, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 0.3988, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 6.521739130434782, |
|
"grad_norm": 0.7226569652557373, |
|
"learning_rate": 7.5e-07, |
|
"loss": 0.4072, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 8.695652173913043, |
|
"grad_norm": 0.7845238447189331, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 0.4028, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 10.869565217391305, |
|
"grad_norm": 0.9769566655158997, |
|
"learning_rate": 1.25e-06, |
|
"loss": 0.4066, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 13.043478260869565, |
|
"grad_norm": 0.8634074330329895, |
|
"learning_rate": 1.5e-06, |
|
"loss": 0.405, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 15.217391304347826, |
|
"grad_norm": 0.9029327034950256, |
|
"learning_rate": 1.75e-06, |
|
"loss": 0.4074, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 17.391304347826086, |
|
"grad_norm": 0.7624074220657349, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.406, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 19.565217391304348, |
|
"grad_norm": 1.005196213722229, |
|
"learning_rate": 2.25e-06, |
|
"loss": 0.4052, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 21.73913043478261, |
|
"grad_norm": 1.0276380777359009, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.4019, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 21.73913043478261, |
|
"eval_loss": 0.4143177568912506, |
|
"eval_runtime": 6.4074, |
|
"eval_samples_per_second": 24.815, |
|
"eval_steps_per_second": 3.121, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 23.91304347826087, |
|
"grad_norm": 0.9067140817642212, |
|
"learning_rate": 2.7500000000000004e-06, |
|
"loss": 0.4021, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 26.08695652173913, |
|
"grad_norm": 0.8799042701721191, |
|
"learning_rate": 3e-06, |
|
"loss": 0.4071, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 28.26086956521739, |
|
"grad_norm": 1.0451533794403076, |
|
"learning_rate": 3.2500000000000002e-06, |
|
"loss": 0.4146, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 30.434782608695652, |
|
"grad_norm": 0.7998800277709961, |
|
"learning_rate": 3.5e-06, |
|
"loss": 0.4023, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 32.608695652173914, |
|
"grad_norm": 1.0301792621612549, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 0.4045, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 34.78260869565217, |
|
"grad_norm": 0.6388185620307922, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.4045, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 36.95652173913044, |
|
"grad_norm": 0.9676840901374817, |
|
"learning_rate": 4.25e-06, |
|
"loss": 0.4116, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 39.130434782608695, |
|
"grad_norm": 0.8355514407157898, |
|
"learning_rate": 4.5e-06, |
|
"loss": 0.4013, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 41.30434782608695, |
|
"grad_norm": 0.7534067630767822, |
|
"learning_rate": 4.75e-06, |
|
"loss": 0.4019, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 43.47826086956522, |
|
"grad_norm": 0.9366117119789124, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4064, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 43.47826086956522, |
|
"eval_loss": 0.41316837072372437, |
|
"eval_runtime": 6.5616, |
|
"eval_samples_per_second": 24.232, |
|
"eval_steps_per_second": 3.048, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 45.65217391304348, |
|
"grad_norm": 0.7025179266929626, |
|
"learning_rate": 5.2500000000000006e-06, |
|
"loss": 0.4061, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 47.82608695652174, |
|
"grad_norm": 0.9240646958351135, |
|
"learning_rate": 5.500000000000001e-06, |
|
"loss": 0.4061, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 50.0, |
|
"grad_norm": 0.9685391187667847, |
|
"learning_rate": 5.75e-06, |
|
"loss": 0.4063, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 52.17391304347826, |
|
"grad_norm": 0.8591094017028809, |
|
"learning_rate": 6e-06, |
|
"loss": 0.4013, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 54.34782608695652, |
|
"grad_norm": 0.9348228573799133, |
|
"learning_rate": 6.25e-06, |
|
"loss": 0.402, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 56.52173913043478, |
|
"grad_norm": 0.7908409237861633, |
|
"learning_rate": 6.5000000000000004e-06, |
|
"loss": 0.4046, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 58.69565217391305, |
|
"grad_norm": 1.109017014503479, |
|
"learning_rate": 6.750000000000001e-06, |
|
"loss": 0.3995, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 60.869565217391305, |
|
"grad_norm": 0.8529478311538696, |
|
"learning_rate": 7e-06, |
|
"loss": 0.4018, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 63.04347826086956, |
|
"grad_norm": 0.9947476387023926, |
|
"learning_rate": 7.25e-06, |
|
"loss": 0.401, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 65.21739130434783, |
|
"grad_norm": 1.2476813793182373, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.4034, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 65.21739130434783, |
|
"eval_loss": 0.41223829984664917, |
|
"eval_runtime": 6.5846, |
|
"eval_samples_per_second": 24.147, |
|
"eval_steps_per_second": 3.037, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 67.3913043478261, |
|
"grad_norm": 0.8658013343811035, |
|
"learning_rate": 7.75e-06, |
|
"loss": 0.4007, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 69.56521739130434, |
|
"grad_norm": 0.7932788133621216, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.4054, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 71.73913043478261, |
|
"grad_norm": 2.33347225189209, |
|
"learning_rate": 8.25e-06, |
|
"loss": 0.4075, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 73.91304347826087, |
|
"grad_norm": 1.0095164775848389, |
|
"learning_rate": 8.5e-06, |
|
"loss": 0.3982, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 76.08695652173913, |
|
"grad_norm": 0.8895902633666992, |
|
"learning_rate": 8.750000000000001e-06, |
|
"loss": 0.4018, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 78.26086956521739, |
|
"grad_norm": 1.2252330780029297, |
|
"learning_rate": 9e-06, |
|
"loss": 0.4024, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 80.43478260869566, |
|
"grad_norm": 0.7731898427009583, |
|
"learning_rate": 9.250000000000001e-06, |
|
"loss": 0.3994, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 82.6086956521739, |
|
"grad_norm": 1.047607660293579, |
|
"learning_rate": 9.5e-06, |
|
"loss": 0.4013, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 84.78260869565217, |
|
"grad_norm": 0.9284490346908569, |
|
"learning_rate": 9.75e-06, |
|
"loss": 0.3986, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 86.95652173913044, |
|
"grad_norm": 4.725491046905518, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3985, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 86.95652173913044, |
|
"eval_loss": 0.4147759974002838, |
|
"eval_runtime": 6.5475, |
|
"eval_samples_per_second": 24.284, |
|
"eval_steps_per_second": 3.055, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 89.1304347826087, |
|
"grad_norm": 1.1678683757781982, |
|
"learning_rate": 9.916666666666668e-06, |
|
"loss": 0.4066, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 91.30434782608695, |
|
"grad_norm": 1.988890528678894, |
|
"learning_rate": 9.835000000000002e-06, |
|
"loss": 0.3997, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 93.47826086956522, |
|
"grad_norm": 1.0278427600860596, |
|
"learning_rate": 9.751666666666667e-06, |
|
"loss": 0.3948, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 95.65217391304348, |
|
"grad_norm": 0.9606735110282898, |
|
"learning_rate": 9.67e-06, |
|
"loss": 0.42, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 97.82608695652173, |
|
"grad_norm": 1.0861903429031372, |
|
"learning_rate": 9.586666666666667e-06, |
|
"loss": 0.398, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 100.0, |
|
"grad_norm": 1.4872643947601318, |
|
"learning_rate": 9.503333333333334e-06, |
|
"loss": 0.4059, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 102.17391304347827, |
|
"grad_norm": 1.0460981130599976, |
|
"learning_rate": 9.42e-06, |
|
"loss": 0.4014, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 104.34782608695652, |
|
"grad_norm": 1.906476378440857, |
|
"learning_rate": 9.336666666666666e-06, |
|
"loss": 0.4015, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 106.52173913043478, |
|
"grad_norm": 1.3965051174163818, |
|
"learning_rate": 9.253333333333333e-06, |
|
"loss": 0.4063, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 108.69565217391305, |
|
"grad_norm": 1.7410107851028442, |
|
"learning_rate": 9.17e-06, |
|
"loss": 0.396, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 108.69565217391305, |
|
"eval_loss": 0.41233959794044495, |
|
"eval_runtime": 6.6031, |
|
"eval_samples_per_second": 24.08, |
|
"eval_steps_per_second": 3.029, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 110.8695652173913, |
|
"grad_norm": 1.4562053680419922, |
|
"learning_rate": 9.088333333333334e-06, |
|
"loss": 0.4042, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 113.04347826086956, |
|
"grad_norm": 0.8741047978401184, |
|
"learning_rate": 9.005000000000001e-06, |
|
"loss": 0.3985, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 115.21739130434783, |
|
"grad_norm": 1.1817028522491455, |
|
"learning_rate": 8.921666666666668e-06, |
|
"loss": 0.4036, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 117.3913043478261, |
|
"grad_norm": 0.8515534996986389, |
|
"learning_rate": 8.838333333333335e-06, |
|
"loss": 0.3986, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 119.56521739130434, |
|
"grad_norm": 0.9545428156852722, |
|
"learning_rate": 8.755e-06, |
|
"loss": 0.3946, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 121.73913043478261, |
|
"grad_norm": 0.6653321385383606, |
|
"learning_rate": 8.671666666666667e-06, |
|
"loss": 0.3957, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 123.91304347826087, |
|
"grad_norm": 0.8100114464759827, |
|
"learning_rate": 8.588333333333334e-06, |
|
"loss": 0.3953, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 126.08695652173913, |
|
"grad_norm": 1.4174237251281738, |
|
"learning_rate": 8.505e-06, |
|
"loss": 0.3933, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 128.2608695652174, |
|
"grad_norm": 1.6101123094558716, |
|
"learning_rate": 8.421666666666668e-06, |
|
"loss": 0.4027, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 130.43478260869566, |
|
"grad_norm": 0.7453967332839966, |
|
"learning_rate": 8.338333333333335e-06, |
|
"loss": 0.3958, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 130.43478260869566, |
|
"eval_loss": 0.4138609766960144, |
|
"eval_runtime": 6.6093, |
|
"eval_samples_per_second": 24.057, |
|
"eval_steps_per_second": 3.026, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 132.6086956521739, |
|
"grad_norm": 1.061684012413025, |
|
"learning_rate": 8.255000000000001e-06, |
|
"loss": 0.3966, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 134.7826086956522, |
|
"grad_norm": 1.0820105075836182, |
|
"learning_rate": 8.171666666666668e-06, |
|
"loss": 0.3937, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 136.95652173913044, |
|
"grad_norm": 0.8147062063217163, |
|
"learning_rate": 8.088333333333334e-06, |
|
"loss": 0.3961, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 139.1304347826087, |
|
"grad_norm": 0.7450351715087891, |
|
"learning_rate": 8.005e-06, |
|
"loss": 0.3923, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 141.30434782608697, |
|
"grad_norm": 1.05226731300354, |
|
"learning_rate": 7.921666666666667e-06, |
|
"loss": 0.3951, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 143.47826086956522, |
|
"grad_norm": 1.5119163990020752, |
|
"learning_rate": 7.838333333333334e-06, |
|
"loss": 0.3957, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 145.65217391304347, |
|
"grad_norm": 1.0422383546829224, |
|
"learning_rate": 7.755000000000001e-06, |
|
"loss": 0.3942, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 147.82608695652175, |
|
"grad_norm": 1.2120935916900635, |
|
"learning_rate": 7.671666666666668e-06, |
|
"loss": 0.3946, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 150.0, |
|
"grad_norm": 1.3127976655960083, |
|
"learning_rate": 7.588333333333334e-06, |
|
"loss": 0.3933, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 152.17391304347825, |
|
"grad_norm": 0.7704339027404785, |
|
"learning_rate": 7.505e-06, |
|
"loss": 0.3928, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 152.17391304347825, |
|
"eval_loss": 0.4132215976715088, |
|
"eval_runtime": 6.6162, |
|
"eval_samples_per_second": 24.032, |
|
"eval_steps_per_second": 3.023, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 154.34782608695653, |
|
"grad_norm": 1.4827122688293457, |
|
"learning_rate": 7.421666666666667e-06, |
|
"loss": 0.3908, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 156.52173913043478, |
|
"grad_norm": 1.0464049577713013, |
|
"learning_rate": 7.338333333333334e-06, |
|
"loss": 0.3993, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 158.69565217391303, |
|
"grad_norm": 0.8279913663864136, |
|
"learning_rate": 7.255000000000001e-06, |
|
"loss": 0.3967, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 160.8695652173913, |
|
"grad_norm": 0.9175034165382385, |
|
"learning_rate": 7.171666666666667e-06, |
|
"loss": 0.3911, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 163.04347826086956, |
|
"grad_norm": 1.0080233812332153, |
|
"learning_rate": 7.088333333333334e-06, |
|
"loss": 0.3898, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 165.2173913043478, |
|
"grad_norm": 0.9944823980331421, |
|
"learning_rate": 7.005000000000001e-06, |
|
"loss": 0.3901, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 167.3913043478261, |
|
"grad_norm": 1.8222392797470093, |
|
"learning_rate": 6.921666666666668e-06, |
|
"loss": 0.3901, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 169.56521739130434, |
|
"grad_norm": 0.9506675004959106, |
|
"learning_rate": 6.838333333333334e-06, |
|
"loss": 0.3945, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 171.7391304347826, |
|
"grad_norm": 4.512270450592041, |
|
"learning_rate": 6.7550000000000005e-06, |
|
"loss": 0.3913, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 173.91304347826087, |
|
"grad_norm": 0.74826580286026, |
|
"learning_rate": 6.6716666666666674e-06, |
|
"loss": 0.3914, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 173.91304347826087, |
|
"eval_loss": 0.4138501286506653, |
|
"eval_runtime": 6.5399, |
|
"eval_samples_per_second": 24.312, |
|
"eval_steps_per_second": 3.058, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 176.08695652173913, |
|
"grad_norm": 1.4560046195983887, |
|
"learning_rate": 6.588333333333334e-06, |
|
"loss": 0.3903, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 178.2608695652174, |
|
"grad_norm": 1.183271050453186, |
|
"learning_rate": 6.505e-06, |
|
"loss": 0.3901, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 180.43478260869566, |
|
"grad_norm": 0.6697027683258057, |
|
"learning_rate": 6.421666666666667e-06, |
|
"loss": 0.399, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 182.6086956521739, |
|
"grad_norm": 0.96327143907547, |
|
"learning_rate": 6.338333333333334e-06, |
|
"loss": 0.3895, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 184.7826086956522, |
|
"grad_norm": 0.9833754301071167, |
|
"learning_rate": 6.255e-06, |
|
"loss": 0.3903, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 186.95652173913044, |
|
"grad_norm": 0.857953667640686, |
|
"learning_rate": 6.171666666666667e-06, |
|
"loss": 0.39, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 189.1304347826087, |
|
"grad_norm": 0.7506987452507019, |
|
"learning_rate": 6.088333333333334e-06, |
|
"loss": 0.394, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 191.30434782608697, |
|
"grad_norm": 0.8283782601356506, |
|
"learning_rate": 6.005000000000001e-06, |
|
"loss": 0.3869, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 193.47826086956522, |
|
"grad_norm": 1.0252714157104492, |
|
"learning_rate": 5.921666666666667e-06, |
|
"loss": 0.3967, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 195.65217391304347, |
|
"grad_norm": 0.9400416612625122, |
|
"learning_rate": 5.838333333333334e-06, |
|
"loss": 0.3916, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 195.65217391304347, |
|
"eval_loss": 0.41232919692993164, |
|
"eval_runtime": 6.3912, |
|
"eval_samples_per_second": 24.878, |
|
"eval_steps_per_second": 3.129, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 197.82608695652175, |
|
"grad_norm": 0.9093385934829712, |
|
"learning_rate": 5.755000000000001e-06, |
|
"loss": 0.3894, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 200.0, |
|
"grad_norm": 1.078837513923645, |
|
"learning_rate": 5.671666666666668e-06, |
|
"loss": 0.3899, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 202.17391304347825, |
|
"grad_norm": 0.7748771905899048, |
|
"learning_rate": 5.588333333333334e-06, |
|
"loss": 0.3943, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 204.34782608695653, |
|
"grad_norm": 1.1522585153579712, |
|
"learning_rate": 5.505000000000001e-06, |
|
"loss": 0.389, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 206.52173913043478, |
|
"grad_norm": 1.6818900108337402, |
|
"learning_rate": 5.4216666666666676e-06, |
|
"loss": 0.3961, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 208.69565217391303, |
|
"grad_norm": 0.9630489349365234, |
|
"learning_rate": 5.3383333333333345e-06, |
|
"loss": 0.3926, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 210.8695652173913, |
|
"grad_norm": 0.915107250213623, |
|
"learning_rate": 5.2550000000000005e-06, |
|
"loss": 0.3843, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 213.04347826086956, |
|
"grad_norm": 0.7301034331321716, |
|
"learning_rate": 5.171666666666667e-06, |
|
"loss": 0.3929, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 215.2173913043478, |
|
"grad_norm": 0.8523257970809937, |
|
"learning_rate": 5.088333333333334e-06, |
|
"loss": 0.3873, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 217.3913043478261, |
|
"grad_norm": 0.7538474798202515, |
|
"learning_rate": 5.0049999999999995e-06, |
|
"loss": 0.3919, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 217.3913043478261, |
|
"eval_loss": 0.4122372269630432, |
|
"eval_runtime": 6.5201, |
|
"eval_samples_per_second": 24.386, |
|
"eval_steps_per_second": 3.067, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 219.56521739130434, |
|
"grad_norm": 1.006597638130188, |
|
"learning_rate": 4.921666666666666e-06, |
|
"loss": 0.3844, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 221.7391304347826, |
|
"grad_norm": 0.6486532092094421, |
|
"learning_rate": 4.838333333333334e-06, |
|
"loss": 0.3853, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 223.91304347826087, |
|
"grad_norm": 0.7955175638198853, |
|
"learning_rate": 4.755e-06, |
|
"loss": 0.3853, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 226.08695652173913, |
|
"grad_norm": 0.8011429309844971, |
|
"learning_rate": 4.671666666666667e-06, |
|
"loss": 0.3888, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 228.2608695652174, |
|
"grad_norm": 0.7421734929084778, |
|
"learning_rate": 4.588333333333333e-06, |
|
"loss": 0.3945, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 230.43478260869566, |
|
"grad_norm": 1.5567547082901, |
|
"learning_rate": 4.505e-06, |
|
"loss": 0.3916, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 232.6086956521739, |
|
"grad_norm": 0.8348387479782104, |
|
"learning_rate": 4.421666666666667e-06, |
|
"loss": 0.3887, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 234.7826086956522, |
|
"grad_norm": 0.965840756893158, |
|
"learning_rate": 4.338333333333334e-06, |
|
"loss": 0.3907, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 236.95652173913044, |
|
"grad_norm": 1.0057460069656372, |
|
"learning_rate": 4.255e-06, |
|
"loss": 0.4004, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 239.1304347826087, |
|
"grad_norm": 1.2100751399993896, |
|
"learning_rate": 4.171666666666667e-06, |
|
"loss": 0.3844, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 239.1304347826087, |
|
"eval_loss": 0.41257867217063904, |
|
"eval_runtime": 6.47, |
|
"eval_samples_per_second": 24.575, |
|
"eval_steps_per_second": 3.091, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 241.30434782608697, |
|
"grad_norm": 0.7117742896080017, |
|
"learning_rate": 4.088333333333334e-06, |
|
"loss": 0.3845, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 243.47826086956522, |
|
"grad_norm": 0.6881281733512878, |
|
"learning_rate": 4.005000000000001e-06, |
|
"loss": 0.3858, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 245.65217391304347, |
|
"grad_norm": 1.3413081169128418, |
|
"learning_rate": 3.921666666666667e-06, |
|
"loss": 0.3842, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 247.82608695652175, |
|
"grad_norm": 0.7762352228164673, |
|
"learning_rate": 3.8383333333333336e-06, |
|
"loss": 0.3839, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 250.0, |
|
"grad_norm": 1.2861838340759277, |
|
"learning_rate": 3.7550000000000005e-06, |
|
"loss": 0.3836, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 252.17391304347825, |
|
"grad_norm": 1.12875497341156, |
|
"learning_rate": 3.6716666666666665e-06, |
|
"loss": 0.3828, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 254.34782608695653, |
|
"grad_norm": 0.9641019105911255, |
|
"learning_rate": 3.588333333333334e-06, |
|
"loss": 0.3815, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 256.5217391304348, |
|
"grad_norm": 0.7063067555427551, |
|
"learning_rate": 3.505e-06, |
|
"loss": 0.3838, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 258.69565217391306, |
|
"grad_norm": 1.0992182493209839, |
|
"learning_rate": 3.4216666666666672e-06, |
|
"loss": 0.3824, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 260.8695652173913, |
|
"grad_norm": 0.6153731942176819, |
|
"learning_rate": 3.3383333333333333e-06, |
|
"loss": 0.3908, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 260.8695652173913, |
|
"eval_loss": 0.4136714041233063, |
|
"eval_runtime": 6.4416, |
|
"eval_samples_per_second": 24.683, |
|
"eval_steps_per_second": 3.105, |
|
"step": 6000 |
|
} |
|
], |
|
"logging_steps": 50, |
|
"max_steps": 8000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 348, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 8.880192835774762e+16, |
|
"train_batch_size": 64, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|