|
{ |
|
"best_metric": 0.3926040828227997, |
|
"best_model_checkpoint": "mikhail-panzo/zlm-fil-ceb_b64_le5_s8000/checkpoint-6500", |
|
"epoch": 277.2277227722772, |
|
"eval_steps": 500, |
|
"global_step": 7000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 1.9801980198019802, |
|
"grad_norm": 1.2683665752410889, |
|
"learning_rate": 2.5000000000000004e-07, |
|
"loss": 0.4932, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 3.9603960396039604, |
|
"grad_norm": 1.1266463994979858, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 0.4898, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 5.9405940594059405, |
|
"grad_norm": 1.1800968647003174, |
|
"learning_rate": 7.5e-07, |
|
"loss": 0.4839, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 7.920792079207921, |
|
"grad_norm": 0.8235568404197693, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 0.4785, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 9.900990099009901, |
|
"grad_norm": 1.313211441040039, |
|
"learning_rate": 1.25e-06, |
|
"loss": 0.4767, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 11.881188118811881, |
|
"grad_norm": 0.7831560373306274, |
|
"learning_rate": 1.5e-06, |
|
"loss": 0.4681, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 13.861386138613861, |
|
"grad_norm": 0.7987237572669983, |
|
"learning_rate": 1.75e-06, |
|
"loss": 0.4658, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 15.841584158415841, |
|
"grad_norm": 0.7143813371658325, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.4627, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 17.821782178217823, |
|
"grad_norm": 0.8037531971931458, |
|
"learning_rate": 2.25e-06, |
|
"loss": 0.461, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 19.801980198019802, |
|
"grad_norm": 0.79031902551651, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.4592, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 19.801980198019802, |
|
"eval_loss": 0.42528408765792847, |
|
"eval_runtime": 7.8982, |
|
"eval_samples_per_second": 22.79, |
|
"eval_steps_per_second": 2.912, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 21.782178217821784, |
|
"grad_norm": 0.7966288328170776, |
|
"learning_rate": 2.7500000000000004e-06, |
|
"loss": 0.4551, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 23.762376237623762, |
|
"grad_norm": 0.8126183152198792, |
|
"learning_rate": 3e-06, |
|
"loss": 0.4542, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 25.742574257425744, |
|
"grad_norm": 0.6602805852890015, |
|
"learning_rate": 3.2500000000000002e-06, |
|
"loss": 0.4535, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 27.722772277227723, |
|
"grad_norm": 0.7177000641822815, |
|
"learning_rate": 3.5e-06, |
|
"loss": 0.4484, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 29.702970297029704, |
|
"grad_norm": 0.7912996411323547, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 0.4486, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 31.683168316831683, |
|
"grad_norm": 0.6251747608184814, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.4463, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 33.663366336633665, |
|
"grad_norm": 1.003780722618103, |
|
"learning_rate": 4.25e-06, |
|
"loss": 0.4474, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 35.64356435643565, |
|
"grad_norm": 0.742171585559845, |
|
"learning_rate": 4.5e-06, |
|
"loss": 0.4434, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 37.62376237623762, |
|
"grad_norm": 0.6034647822380066, |
|
"learning_rate": 4.75e-06, |
|
"loss": 0.4425, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 39.603960396039604, |
|
"grad_norm": 0.6990635395050049, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4381, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 39.603960396039604, |
|
"eval_loss": 0.41002073884010315, |
|
"eval_runtime": 6.7782, |
|
"eval_samples_per_second": 26.556, |
|
"eval_steps_per_second": 3.393, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 41.584158415841586, |
|
"grad_norm": 0.9420008659362793, |
|
"learning_rate": 5.2500000000000006e-06, |
|
"loss": 0.4421, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 43.56435643564357, |
|
"grad_norm": 0.7143107652664185, |
|
"learning_rate": 5.500000000000001e-06, |
|
"loss": 0.4389, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 45.54455445544554, |
|
"grad_norm": 0.7934616804122925, |
|
"learning_rate": 5.75e-06, |
|
"loss": 0.4369, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 47.524752475247524, |
|
"grad_norm": 0.9682563543319702, |
|
"learning_rate": 6e-06, |
|
"loss": 0.4355, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 49.504950495049506, |
|
"grad_norm": 0.7880070209503174, |
|
"learning_rate": 6.25e-06, |
|
"loss": 0.4359, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 51.48514851485149, |
|
"grad_norm": 0.8443828821182251, |
|
"learning_rate": 6.5000000000000004e-06, |
|
"loss": 0.4324, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 53.46534653465346, |
|
"grad_norm": 0.724743127822876, |
|
"learning_rate": 6.750000000000001e-06, |
|
"loss": 0.4348, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 55.445544554455445, |
|
"grad_norm": 0.7557791471481323, |
|
"learning_rate": 7e-06, |
|
"loss": 0.433, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 57.42574257425743, |
|
"grad_norm": 0.7936097383499146, |
|
"learning_rate": 7.25e-06, |
|
"loss": 0.4273, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 59.40594059405941, |
|
"grad_norm": 0.7041624784469604, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.4281, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 59.40594059405941, |
|
"eval_loss": 0.40218469500541687, |
|
"eval_runtime": 7.4309, |
|
"eval_samples_per_second": 24.223, |
|
"eval_steps_per_second": 3.095, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 61.386138613861384, |
|
"grad_norm": 0.9194286465644836, |
|
"learning_rate": 7.75e-06, |
|
"loss": 0.4275, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 63.366336633663366, |
|
"grad_norm": 0.7239477634429932, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.4245, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 65.34653465346534, |
|
"grad_norm": 0.7235706448554993, |
|
"learning_rate": 8.25e-06, |
|
"loss": 0.4248, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 67.32673267326733, |
|
"grad_norm": 0.7281640768051147, |
|
"learning_rate": 8.5e-06, |
|
"loss": 0.425, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 69.3069306930693, |
|
"grad_norm": 1.011294960975647, |
|
"learning_rate": 8.750000000000001e-06, |
|
"loss": 0.4238, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 71.2871287128713, |
|
"grad_norm": 1.1249107122421265, |
|
"learning_rate": 9e-06, |
|
"loss": 0.4203, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 73.26732673267327, |
|
"grad_norm": 0.7369679808616638, |
|
"learning_rate": 9.250000000000001e-06, |
|
"loss": 0.422, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 75.24752475247524, |
|
"grad_norm": 0.704367995262146, |
|
"learning_rate": 9.5e-06, |
|
"loss": 0.4201, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 77.22772277227723, |
|
"grad_norm": 0.8307437896728516, |
|
"learning_rate": 9.75e-06, |
|
"loss": 0.4203, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 79.20792079207921, |
|
"grad_norm": 0.7827133536338806, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4195, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 79.20792079207921, |
|
"eval_loss": 0.39878609776496887, |
|
"eval_runtime": 8.1579, |
|
"eval_samples_per_second": 22.064, |
|
"eval_steps_per_second": 2.819, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 81.18811881188118, |
|
"grad_norm": 0.8909313082695007, |
|
"learning_rate": 9.916666666666668e-06, |
|
"loss": 0.4203, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 83.16831683168317, |
|
"grad_norm": 0.7147483825683594, |
|
"learning_rate": 9.833333333333333e-06, |
|
"loss": 0.418, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 85.14851485148515, |
|
"grad_norm": 0.6844545006752014, |
|
"learning_rate": 9.75e-06, |
|
"loss": 0.4167, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 87.12871287128714, |
|
"grad_norm": 0.88512122631073, |
|
"learning_rate": 9.666666666666667e-06, |
|
"loss": 0.4108, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 89.10891089108911, |
|
"grad_norm": 0.7097252607345581, |
|
"learning_rate": 9.583333333333335e-06, |
|
"loss": 0.4135, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 91.08910891089108, |
|
"grad_norm": 0.8400272727012634, |
|
"learning_rate": 9.5e-06, |
|
"loss": 0.4147, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 93.06930693069307, |
|
"grad_norm": 0.7614939212799072, |
|
"learning_rate": 9.416666666666667e-06, |
|
"loss": 0.4119, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 95.04950495049505, |
|
"grad_norm": 0.8099732995033264, |
|
"learning_rate": 9.333333333333334e-06, |
|
"loss": 0.4108, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 97.02970297029702, |
|
"grad_norm": 1.0697815418243408, |
|
"learning_rate": 9.250000000000001e-06, |
|
"loss": 0.4095, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 99.00990099009901, |
|
"grad_norm": 0.7235235571861267, |
|
"learning_rate": 9.166666666666666e-06, |
|
"loss": 0.4134, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 99.00990099009901, |
|
"eval_loss": 0.39545077085494995, |
|
"eval_runtime": 7.724, |
|
"eval_samples_per_second": 23.304, |
|
"eval_steps_per_second": 2.978, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 100.99009900990099, |
|
"grad_norm": 0.6166990399360657, |
|
"learning_rate": 9.083333333333333e-06, |
|
"loss": 0.4097, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 102.97029702970298, |
|
"grad_norm": 0.6944295763969421, |
|
"learning_rate": 9e-06, |
|
"loss": 0.4107, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 104.95049504950495, |
|
"grad_norm": 0.8469858765602112, |
|
"learning_rate": 8.916666666666667e-06, |
|
"loss": 0.4056, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 106.93069306930693, |
|
"grad_norm": 0.6261239647865295, |
|
"learning_rate": 8.833333333333334e-06, |
|
"loss": 0.409, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 108.91089108910892, |
|
"grad_norm": 0.6304221749305725, |
|
"learning_rate": 8.750000000000001e-06, |
|
"loss": 0.4081, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 110.89108910891089, |
|
"grad_norm": 0.6513387560844421, |
|
"learning_rate": 8.666666666666668e-06, |
|
"loss": 0.4071, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 112.87128712871286, |
|
"grad_norm": 0.9311261177062988, |
|
"learning_rate": 8.583333333333333e-06, |
|
"loss": 0.4091, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 114.85148514851485, |
|
"grad_norm": 0.7296612858772278, |
|
"learning_rate": 8.5e-06, |
|
"loss": 0.4055, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 116.83168316831683, |
|
"grad_norm": 0.8149676322937012, |
|
"learning_rate": 8.416666666666667e-06, |
|
"loss": 0.4052, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 118.81188118811882, |
|
"grad_norm": 0.8266366124153137, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 0.4049, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 118.81188118811882, |
|
"eval_loss": 0.39345207810401917, |
|
"eval_runtime": 7.2151, |
|
"eval_samples_per_second": 24.948, |
|
"eval_steps_per_second": 3.188, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 120.79207920792079, |
|
"grad_norm": 0.6681864261627197, |
|
"learning_rate": 8.25e-06, |
|
"loss": 0.4022, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 122.77227722772277, |
|
"grad_norm": 0.5798916220664978, |
|
"learning_rate": 8.166666666666668e-06, |
|
"loss": 0.4026, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 124.75247524752476, |
|
"grad_norm": 0.8516507744789124, |
|
"learning_rate": 8.083333333333334e-06, |
|
"loss": 0.4064, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 126.73267326732673, |
|
"grad_norm": 0.6938304901123047, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.4057, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 128.7128712871287, |
|
"grad_norm": 0.6889196038246155, |
|
"learning_rate": 7.916666666666667e-06, |
|
"loss": 0.4022, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 130.69306930693068, |
|
"grad_norm": 0.735680103302002, |
|
"learning_rate": 7.833333333333333e-06, |
|
"loss": 0.4019, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 132.67326732673268, |
|
"grad_norm": 1.0390517711639404, |
|
"learning_rate": 7.75e-06, |
|
"loss": 0.4014, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 134.65346534653466, |
|
"grad_norm": 0.807412326335907, |
|
"learning_rate": 7.666666666666667e-06, |
|
"loss": 0.3989, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 136.63366336633663, |
|
"grad_norm": 0.6928243637084961, |
|
"learning_rate": 7.583333333333333e-06, |
|
"loss": 0.4011, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 138.6138613861386, |
|
"grad_norm": 0.6894844770431519, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.4016, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 138.6138613861386, |
|
"eval_loss": 0.3931116759777069, |
|
"eval_runtime": 7.7493, |
|
"eval_samples_per_second": 23.228, |
|
"eval_steps_per_second": 2.968, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 140.59405940594058, |
|
"grad_norm": 0.9431400895118713, |
|
"learning_rate": 7.416666666666668e-06, |
|
"loss": 0.4028, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 142.5742574257426, |
|
"grad_norm": 0.6703277230262756, |
|
"learning_rate": 7.333333333333333e-06, |
|
"loss": 0.3992, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 144.55445544554456, |
|
"grad_norm": 0.8334361910820007, |
|
"learning_rate": 7.25e-06, |
|
"loss": 0.3979, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 146.53465346534654, |
|
"grad_norm": 0.8266313672065735, |
|
"learning_rate": 7.166666666666667e-06, |
|
"loss": 0.3964, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 148.5148514851485, |
|
"grad_norm": 0.7688102126121521, |
|
"learning_rate": 7.083333333333335e-06, |
|
"loss": 0.3968, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 150.4950495049505, |
|
"grad_norm": 0.7516873478889465, |
|
"learning_rate": 7e-06, |
|
"loss": 0.3971, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 152.47524752475246, |
|
"grad_norm": 0.8034970760345459, |
|
"learning_rate": 6.916666666666667e-06, |
|
"loss": 0.3926, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 154.45544554455446, |
|
"grad_norm": 0.681676983833313, |
|
"learning_rate": 6.833333333333334e-06, |
|
"loss": 0.3975, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 156.43564356435644, |
|
"grad_norm": 0.6939652562141418, |
|
"learning_rate": 6.750000000000001e-06, |
|
"loss": 0.4005, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 158.41584158415841, |
|
"grad_norm": 0.6093599200248718, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.3937, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 158.41584158415841, |
|
"eval_loss": 0.39361944794654846, |
|
"eval_runtime": 7.0786, |
|
"eval_samples_per_second": 25.429, |
|
"eval_steps_per_second": 3.249, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 160.3960396039604, |
|
"grad_norm": 0.6037696003913879, |
|
"learning_rate": 6.5833333333333335e-06, |
|
"loss": 0.3953, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 162.37623762376236, |
|
"grad_norm": 0.6949267387390137, |
|
"learning_rate": 6.5000000000000004e-06, |
|
"loss": 0.398, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 164.35643564356437, |
|
"grad_norm": 0.7271093726158142, |
|
"learning_rate": 6.418333333333334e-06, |
|
"loss": 0.3945, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 166.33663366336634, |
|
"grad_norm": 0.6959238052368164, |
|
"learning_rate": 6.335e-06, |
|
"loss": 0.3957, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 168.31683168316832, |
|
"grad_norm": 0.6987124681472778, |
|
"learning_rate": 6.251666666666667e-06, |
|
"loss": 0.3973, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 170.2970297029703, |
|
"grad_norm": 0.6888285875320435, |
|
"learning_rate": 6.168333333333334e-06, |
|
"loss": 0.3971, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 172.27722772277227, |
|
"grad_norm": 0.6536576151847839, |
|
"learning_rate": 6.085000000000001e-06, |
|
"loss": 0.3939, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 174.25742574257427, |
|
"grad_norm": 0.6235485672950745, |
|
"learning_rate": 6.001666666666667e-06, |
|
"loss": 0.3949, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 176.23762376237624, |
|
"grad_norm": 0.7131723761558533, |
|
"learning_rate": 5.918333333333334e-06, |
|
"loss": 0.3944, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 178.21782178217822, |
|
"grad_norm": 0.5429028272628784, |
|
"learning_rate": 5.835000000000001e-06, |
|
"loss": 0.3953, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 178.21782178217822, |
|
"eval_loss": 0.3935137093067169, |
|
"eval_runtime": 6.8281, |
|
"eval_samples_per_second": 26.362, |
|
"eval_steps_per_second": 3.368, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 180.1980198019802, |
|
"grad_norm": 0.6293529272079468, |
|
"learning_rate": 5.751666666666668e-06, |
|
"loss": 0.3923, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 182.17821782178217, |
|
"grad_norm": 0.7379683256149292, |
|
"learning_rate": 5.668333333333334e-06, |
|
"loss": 0.393, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 184.15841584158414, |
|
"grad_norm": 0.6412554979324341, |
|
"learning_rate": 5.585000000000001e-06, |
|
"loss": 0.3915, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 186.13861386138615, |
|
"grad_norm": 0.7281537652015686, |
|
"learning_rate": 5.501666666666668e-06, |
|
"loss": 0.3928, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 188.11881188118812, |
|
"grad_norm": 0.640044093132019, |
|
"learning_rate": 5.418333333333333e-06, |
|
"loss": 0.3935, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 190.0990099009901, |
|
"grad_norm": 0.6560197472572327, |
|
"learning_rate": 5.335000000000001e-06, |
|
"loss": 0.3922, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 192.07920792079207, |
|
"grad_norm": 0.5895935893058777, |
|
"learning_rate": 5.2516666666666675e-06, |
|
"loss": 0.3939, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 194.05940594059405, |
|
"grad_norm": 0.7578818202018738, |
|
"learning_rate": 5.168333333333334e-06, |
|
"loss": 0.3939, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 196.03960396039605, |
|
"grad_norm": 0.6305426955223083, |
|
"learning_rate": 5.085e-06, |
|
"loss": 0.3917, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 198.01980198019803, |
|
"grad_norm": 0.8592019081115723, |
|
"learning_rate": 5.0016666666666665e-06, |
|
"loss": 0.3933, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 198.01980198019803, |
|
"eval_loss": 0.39355847239494324, |
|
"eval_runtime": 7.4596, |
|
"eval_samples_per_second": 24.13, |
|
"eval_steps_per_second": 3.083, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 200.0, |
|
"grad_norm": 0.9197565317153931, |
|
"learning_rate": 4.918333333333334e-06, |
|
"loss": 0.3947, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 201.98019801980197, |
|
"grad_norm": 0.675254225730896, |
|
"learning_rate": 4.835e-06, |
|
"loss": 0.3921, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 203.96039603960395, |
|
"grad_norm": 0.7896837592124939, |
|
"learning_rate": 4.751666666666667e-06, |
|
"loss": 0.3937, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 205.94059405940595, |
|
"grad_norm": 0.6978718042373657, |
|
"learning_rate": 4.668333333333333e-06, |
|
"loss": 0.3912, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 207.92079207920793, |
|
"grad_norm": 0.6968542337417603, |
|
"learning_rate": 4.585e-06, |
|
"loss": 0.3902, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 209.9009900990099, |
|
"grad_norm": 0.6489793062210083, |
|
"learning_rate": 4.501666666666667e-06, |
|
"loss": 0.3911, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 211.88118811881188, |
|
"grad_norm": 0.6600732803344727, |
|
"learning_rate": 4.418333333333334e-06, |
|
"loss": 0.3912, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 213.86138613861385, |
|
"grad_norm": 0.6637029051780701, |
|
"learning_rate": 4.335e-06, |
|
"loss": 0.3903, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 215.84158415841586, |
|
"grad_norm": 0.9407865405082703, |
|
"learning_rate": 4.251666666666667e-06, |
|
"loss": 0.3913, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 217.82178217821783, |
|
"grad_norm": 0.674074649810791, |
|
"learning_rate": 4.168333333333334e-06, |
|
"loss": 0.3932, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 217.82178217821783, |
|
"eval_loss": 0.3936135172843933, |
|
"eval_runtime": 7.411, |
|
"eval_samples_per_second": 24.288, |
|
"eval_steps_per_second": 3.103, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 219.8019801980198, |
|
"grad_norm": 0.5686890482902527, |
|
"learning_rate": 4.085e-06, |
|
"loss": 0.391, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 221.78217821782178, |
|
"grad_norm": 0.5418526530265808, |
|
"learning_rate": 4.001666666666667e-06, |
|
"loss": 0.3863, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 223.76237623762376, |
|
"grad_norm": 0.7057704925537109, |
|
"learning_rate": 3.918333333333334e-06, |
|
"loss": 0.394, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 225.74257425742573, |
|
"grad_norm": 0.6317301392555237, |
|
"learning_rate": 3.8350000000000006e-06, |
|
"loss": 0.3906, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 227.72277227722773, |
|
"grad_norm": 0.6327551007270813, |
|
"learning_rate": 3.7516666666666666e-06, |
|
"loss": 0.3902, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 229.7029702970297, |
|
"grad_norm": 0.6199086904525757, |
|
"learning_rate": 3.668333333333334e-06, |
|
"loss": 0.3912, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 231.68316831683168, |
|
"grad_norm": 0.6341702938079834, |
|
"learning_rate": 3.585e-06, |
|
"loss": 0.391, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 233.66336633663366, |
|
"grad_norm": 0.7226659655570984, |
|
"learning_rate": 3.5016666666666673e-06, |
|
"loss": 0.3908, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 235.64356435643563, |
|
"grad_norm": 0.6628900170326233, |
|
"learning_rate": 3.4183333333333334e-06, |
|
"loss": 0.3899, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 237.62376237623764, |
|
"grad_norm": 0.6972154378890991, |
|
"learning_rate": 3.3350000000000003e-06, |
|
"loss": 0.3884, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 237.62376237623764, |
|
"eval_loss": 0.3934413194656372, |
|
"eval_runtime": 7.3097, |
|
"eval_samples_per_second": 24.625, |
|
"eval_steps_per_second": 3.146, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 239.6039603960396, |
|
"grad_norm": 0.7429386377334595, |
|
"learning_rate": 3.2516666666666667e-06, |
|
"loss": 0.3902, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 241.58415841584159, |
|
"grad_norm": 0.6832717061042786, |
|
"learning_rate": 3.1683333333333336e-06, |
|
"loss": 0.3896, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 243.56435643564356, |
|
"grad_norm": 0.7591320872306824, |
|
"learning_rate": 3.085e-06, |
|
"loss": 0.3905, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 245.54455445544554, |
|
"grad_norm": 0.5904133319854736, |
|
"learning_rate": 3.001666666666667e-06, |
|
"loss": 0.3876, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 247.52475247524754, |
|
"grad_norm": 0.6338399052619934, |
|
"learning_rate": 2.9183333333333335e-06, |
|
"loss": 0.3891, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 249.5049504950495, |
|
"grad_norm": 0.6359020471572876, |
|
"learning_rate": 2.835e-06, |
|
"loss": 0.3905, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 251.4851485148515, |
|
"grad_norm": 0.6320521831512451, |
|
"learning_rate": 2.751666666666667e-06, |
|
"loss": 0.3885, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 253.46534653465346, |
|
"grad_norm": 0.6006565690040588, |
|
"learning_rate": 2.6683333333333333e-06, |
|
"loss": 0.3861, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 255.44554455445544, |
|
"grad_norm": 0.5949265360832214, |
|
"learning_rate": 2.5866666666666667e-06, |
|
"loss": 0.3898, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 257.4257425742574, |
|
"grad_norm": 0.6407713294029236, |
|
"learning_rate": 2.5033333333333336e-06, |
|
"loss": 0.3889, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 257.4257425742574, |
|
"eval_loss": 0.3926040828227997, |
|
"eval_runtime": 7.8449, |
|
"eval_samples_per_second": 22.945, |
|
"eval_steps_per_second": 2.932, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 259.4059405940594, |
|
"grad_norm": 0.6776269674301147, |
|
"learning_rate": 2.42e-06, |
|
"loss": 0.3863, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 261.38613861386136, |
|
"grad_norm": 0.7183690667152405, |
|
"learning_rate": 2.3366666666666666e-06, |
|
"loss": 0.388, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 263.36633663366337, |
|
"grad_norm": 0.683351993560791, |
|
"learning_rate": 2.2533333333333335e-06, |
|
"loss": 0.3861, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 265.34653465346537, |
|
"grad_norm": 0.5622109770774841, |
|
"learning_rate": 2.17e-06, |
|
"loss": 0.3872, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 267.3267326732673, |
|
"grad_norm": 0.6307435035705566, |
|
"learning_rate": 2.086666666666667e-06, |
|
"loss": 0.3851, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 269.3069306930693, |
|
"grad_norm": 0.5858733654022217, |
|
"learning_rate": 2.0033333333333334e-06, |
|
"loss": 0.3847, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 271.28712871287127, |
|
"grad_norm": 0.7978197336196899, |
|
"learning_rate": 1.9200000000000003e-06, |
|
"loss": 0.3876, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 273.26732673267327, |
|
"grad_norm": 0.6843935251235962, |
|
"learning_rate": 1.836666666666667e-06, |
|
"loss": 0.3871, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 275.2475247524753, |
|
"grad_norm": 0.8366850018501282, |
|
"learning_rate": 1.7533333333333336e-06, |
|
"loss": 0.3909, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 277.2277227722772, |
|
"grad_norm": 0.584172248840332, |
|
"learning_rate": 1.6700000000000003e-06, |
|
"loss": 0.3855, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 277.2277227722772, |
|
"eval_loss": 0.39400026202201843, |
|
"eval_runtime": 6.8593, |
|
"eval_samples_per_second": 26.242, |
|
"eval_steps_per_second": 3.353, |
|
"step": 7000 |
|
} |
|
], |
|
"logging_steps": 50, |
|
"max_steps": 8000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 320, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 7.563870212869475e+16, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|