|
{ |
|
"best_metric": 0.4078540802001953, |
|
"best_model_checkpoint": "mikhail-panzo/fil_b128_le4_s8000/checkpoint-1000", |
|
"epoch": 530.3867403314918, |
|
"eval_steps": 500, |
|
"global_step": 6000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 4.419889502762431, |
|
"grad_norm": 2.1200406551361084, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.77, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 8.839779005524862, |
|
"grad_norm": 0.8919622302055359, |
|
"learning_rate": 5e-06, |
|
"loss": 0.695, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 13.259668508287293, |
|
"grad_norm": 0.9864558577537537, |
|
"learning_rate": 7.5e-06, |
|
"loss": 0.6278, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 17.679558011049725, |
|
"grad_norm": 0.9195986986160278, |
|
"learning_rate": 1e-05, |
|
"loss": 0.5431, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 22.099447513812155, |
|
"grad_norm": 1.0175281763076782, |
|
"learning_rate": 1.25e-05, |
|
"loss": 0.5155, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 26.519337016574585, |
|
"grad_norm": 1.0303566455841064, |
|
"learning_rate": 1.5e-05, |
|
"loss": 0.502, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 30.939226519337016, |
|
"grad_norm": 1.005897879600525, |
|
"learning_rate": 1.75e-05, |
|
"loss": 0.4889, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 35.35911602209945, |
|
"grad_norm": 1.0381360054016113, |
|
"learning_rate": 2e-05, |
|
"loss": 0.477, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 39.77900552486188, |
|
"grad_norm": 1.2148911952972412, |
|
"learning_rate": 2.25e-05, |
|
"loss": 0.4711, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 44.19889502762431, |
|
"grad_norm": 1.5404785871505737, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.463, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 44.19889502762431, |
|
"eval_loss": 0.42523157596588135, |
|
"eval_runtime": 15.6706, |
|
"eval_samples_per_second": 10.274, |
|
"eval_steps_per_second": 1.34, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 48.61878453038674, |
|
"grad_norm": 1.1097612380981445, |
|
"learning_rate": 2.7500000000000004e-05, |
|
"loss": 0.4601, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 53.03867403314917, |
|
"grad_norm": 1.2796109914779663, |
|
"learning_rate": 3e-05, |
|
"loss": 0.454, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 57.4585635359116, |
|
"grad_norm": 1.0656301975250244, |
|
"learning_rate": 3.2500000000000004e-05, |
|
"loss": 0.4498, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 61.87845303867403, |
|
"grad_norm": 1.3971511125564575, |
|
"learning_rate": 3.5e-05, |
|
"loss": 0.4439, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 66.29834254143647, |
|
"grad_norm": 1.9344899654388428, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 0.4418, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 70.7182320441989, |
|
"grad_norm": 1.080334186553955, |
|
"learning_rate": 4e-05, |
|
"loss": 0.4379, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 75.13812154696133, |
|
"grad_norm": 1.9891581535339355, |
|
"learning_rate": 4.25e-05, |
|
"loss": 0.4334, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 79.55801104972376, |
|
"grad_norm": 1.3854402303695679, |
|
"learning_rate": 4.5e-05, |
|
"loss": 0.4331, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 83.97790055248619, |
|
"grad_norm": 1.5818183422088623, |
|
"learning_rate": 4.75e-05, |
|
"loss": 0.428, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 88.39779005524862, |
|
"grad_norm": 1.8474106788635254, |
|
"learning_rate": 5e-05, |
|
"loss": 0.4238, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 88.39779005524862, |
|
"eval_loss": 0.4078540802001953, |
|
"eval_runtime": 14.8436, |
|
"eval_samples_per_second": 10.846, |
|
"eval_steps_per_second": 1.415, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 92.81767955801105, |
|
"grad_norm": 3.1851751804351807, |
|
"learning_rate": 5.25e-05, |
|
"loss": 0.4234, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 97.23756906077348, |
|
"grad_norm": 1.3754140138626099, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 0.4209, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 101.65745856353591, |
|
"grad_norm": 1.6143475770950317, |
|
"learning_rate": 5.7499999999999995e-05, |
|
"loss": 0.4228, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 106.07734806629834, |
|
"grad_norm": 1.9961720705032349, |
|
"learning_rate": 6e-05, |
|
"loss": 0.4162, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 110.49723756906077, |
|
"grad_norm": 1.9125347137451172, |
|
"learning_rate": 6.25e-05, |
|
"loss": 0.4178, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 114.9171270718232, |
|
"grad_norm": 3.1913959980010986, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 0.4143, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 119.33701657458563, |
|
"grad_norm": 2.1010069847106934, |
|
"learning_rate": 6.750000000000001e-05, |
|
"loss": 0.4118, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 123.75690607734806, |
|
"grad_norm": 1.3079431056976318, |
|
"learning_rate": 7e-05, |
|
"loss": 0.4091, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 128.1767955801105, |
|
"grad_norm": 2.3663132190704346, |
|
"learning_rate": 7.25e-05, |
|
"loss": 0.4092, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 132.59668508287294, |
|
"grad_norm": 1.031698226928711, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 0.4083, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 132.59668508287294, |
|
"eval_loss": 0.4082767069339752, |
|
"eval_runtime": 15.1068, |
|
"eval_samples_per_second": 10.657, |
|
"eval_steps_per_second": 1.39, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 137.01657458563537, |
|
"grad_norm": 2.7781426906585693, |
|
"learning_rate": 7.75e-05, |
|
"loss": 0.4079, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 141.4364640883978, |
|
"grad_norm": 1.136169672012329, |
|
"learning_rate": 8e-05, |
|
"loss": 0.4028, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 145.85635359116023, |
|
"grad_norm": 1.7299158573150635, |
|
"learning_rate": 8.25e-05, |
|
"loss": 0.4034, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 150.27624309392266, |
|
"grad_norm": 2.4281983375549316, |
|
"learning_rate": 8.5e-05, |
|
"loss": 0.3992, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 154.6961325966851, |
|
"grad_norm": 3.6834373474121094, |
|
"learning_rate": 8.75e-05, |
|
"loss": 0.3997, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 159.11602209944752, |
|
"grad_norm": 3.1484293937683105, |
|
"learning_rate": 9e-05, |
|
"loss": 0.396, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 163.53591160220995, |
|
"grad_norm": 3.978259563446045, |
|
"learning_rate": 9.250000000000001e-05, |
|
"loss": 0.3969, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 167.95580110497238, |
|
"grad_norm": 2.906266212463379, |
|
"learning_rate": 9.5e-05, |
|
"loss": 0.4017, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 172.3756906077348, |
|
"grad_norm": 1.1569880247116089, |
|
"learning_rate": 9.75e-05, |
|
"loss": 0.396, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 176.79558011049724, |
|
"grad_norm": 2.2872982025146484, |
|
"learning_rate": 0.0001, |
|
"loss": 0.3928, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 176.79558011049724, |
|
"eval_loss": 0.414584219455719, |
|
"eval_runtime": 15.2895, |
|
"eval_samples_per_second": 10.53, |
|
"eval_steps_per_second": 1.373, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 181.21546961325967, |
|
"grad_norm": 1.1768990755081177, |
|
"learning_rate": 9.916666666666667e-05, |
|
"loss": 0.3928, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 185.6353591160221, |
|
"grad_norm": 1.2087491750717163, |
|
"learning_rate": 9.833333333333333e-05, |
|
"loss": 0.3901, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 190.05524861878453, |
|
"grad_norm": 1.17588210105896, |
|
"learning_rate": 9.75e-05, |
|
"loss": 0.3877, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 194.47513812154696, |
|
"grad_norm": 1.5979018211364746, |
|
"learning_rate": 9.666666666666667e-05, |
|
"loss": 0.3879, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 198.8950276243094, |
|
"grad_norm": 1.0489567518234253, |
|
"learning_rate": 9.583333333333334e-05, |
|
"loss": 0.3869, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 203.31491712707182, |
|
"grad_norm": 1.833038091659546, |
|
"learning_rate": 9.501666666666668e-05, |
|
"loss": 0.3859, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 207.73480662983425, |
|
"grad_norm": 2.075849771499634, |
|
"learning_rate": 9.418333333333334e-05, |
|
"loss": 0.3843, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 212.15469613259668, |
|
"grad_norm": 2.8613317012786865, |
|
"learning_rate": 9.335e-05, |
|
"loss": 0.381, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 216.5745856353591, |
|
"grad_norm": 1.6291433572769165, |
|
"learning_rate": 9.251666666666667e-05, |
|
"loss": 0.3802, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 220.99447513812154, |
|
"grad_norm": 1.1548006534576416, |
|
"learning_rate": 9.168333333333333e-05, |
|
"loss": 0.381, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 220.99447513812154, |
|
"eval_loss": 0.4127160906791687, |
|
"eval_runtime": 15.1026, |
|
"eval_samples_per_second": 10.66, |
|
"eval_steps_per_second": 1.39, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 225.41436464088397, |
|
"grad_norm": 1.3956973552703857, |
|
"learning_rate": 9.085e-05, |
|
"loss": 0.3795, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 229.8342541436464, |
|
"grad_norm": 1.6613599061965942, |
|
"learning_rate": 9.001666666666667e-05, |
|
"loss": 0.3757, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 234.25414364640883, |
|
"grad_norm": 1.4951831102371216, |
|
"learning_rate": 8.918333333333334e-05, |
|
"loss": 0.3763, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 238.67403314917127, |
|
"grad_norm": 3.0197391510009766, |
|
"learning_rate": 8.834999999999999e-05, |
|
"loss": 0.3765, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 243.0939226519337, |
|
"grad_norm": 0.9016386866569519, |
|
"learning_rate": 8.751666666666668e-05, |
|
"loss": 0.3724, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 247.51381215469613, |
|
"grad_norm": 1.6974703073501587, |
|
"learning_rate": 8.668333333333334e-05, |
|
"loss": 0.3729, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 251.93370165745856, |
|
"grad_norm": 2.8571081161499023, |
|
"learning_rate": 8.585000000000001e-05, |
|
"loss": 0.3715, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 256.353591160221, |
|
"grad_norm": 2.8940412998199463, |
|
"learning_rate": 8.501666666666667e-05, |
|
"loss": 0.3716, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 260.77348066298345, |
|
"grad_norm": 1.1075533628463745, |
|
"learning_rate": 8.418333333333334e-05, |
|
"loss": 0.3691, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 265.1933701657459, |
|
"grad_norm": 1.145349383354187, |
|
"learning_rate": 8.335e-05, |
|
"loss": 0.3712, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 265.1933701657459, |
|
"eval_loss": 0.40815719962120056, |
|
"eval_runtime": 15.0404, |
|
"eval_samples_per_second": 10.705, |
|
"eval_steps_per_second": 1.396, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 269.6132596685083, |
|
"grad_norm": 0.788912832736969, |
|
"learning_rate": 8.251666666666668e-05, |
|
"loss": 0.3691, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 274.03314917127074, |
|
"grad_norm": 1.5521596670150757, |
|
"learning_rate": 8.168333333333333e-05, |
|
"loss": 0.3687, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 278.45303867403317, |
|
"grad_norm": 1.2638827562332153, |
|
"learning_rate": 8.085e-05, |
|
"loss": 0.3681, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 282.8729281767956, |
|
"grad_norm": 1.6849925518035889, |
|
"learning_rate": 8.001666666666667e-05, |
|
"loss": 0.3664, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 287.292817679558, |
|
"grad_norm": 1.9946844577789307, |
|
"learning_rate": 7.918333333333334e-05, |
|
"loss": 0.3671, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 291.71270718232046, |
|
"grad_norm": 1.3560407161712646, |
|
"learning_rate": 7.835000000000001e-05, |
|
"loss": 0.3631, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 296.1325966850829, |
|
"grad_norm": 1.4300700426101685, |
|
"learning_rate": 7.751666666666668e-05, |
|
"loss": 0.3616, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 300.5524861878453, |
|
"grad_norm": 1.0440477132797241, |
|
"learning_rate": 7.668333333333335e-05, |
|
"loss": 0.3627, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 304.97237569060775, |
|
"grad_norm": 0.78243088722229, |
|
"learning_rate": 7.585e-05, |
|
"loss": 0.3624, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 309.3922651933702, |
|
"grad_norm": 1.4992115497589111, |
|
"learning_rate": 7.501666666666667e-05, |
|
"loss": 0.3589, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 309.3922651933702, |
|
"eval_loss": 0.4111907482147217, |
|
"eval_runtime": 15.163, |
|
"eval_samples_per_second": 10.618, |
|
"eval_steps_per_second": 1.385, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 313.8121546961326, |
|
"grad_norm": 1.0604934692382812, |
|
"learning_rate": 7.418333333333334e-05, |
|
"loss": 0.3599, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 318.23204419889504, |
|
"grad_norm": 0.8896504640579224, |
|
"learning_rate": 7.335000000000001e-05, |
|
"loss": 0.361, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 322.65193370165747, |
|
"grad_norm": 1.3247913122177124, |
|
"learning_rate": 7.251666666666666e-05, |
|
"loss": 0.3575, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 327.0718232044199, |
|
"grad_norm": 0.6475388407707214, |
|
"learning_rate": 7.168333333333333e-05, |
|
"loss": 0.3575, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 331.49171270718233, |
|
"grad_norm": 0.6724014282226562, |
|
"learning_rate": 7.085e-05, |
|
"loss": 0.3558, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 335.91160220994476, |
|
"grad_norm": 1.0705429315567017, |
|
"learning_rate": 7.001666666666667e-05, |
|
"loss": 0.3564, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 340.3314917127072, |
|
"grad_norm": 1.0692503452301025, |
|
"learning_rate": 6.918333333333334e-05, |
|
"loss": 0.3601, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 344.7513812154696, |
|
"grad_norm": 0.8828334212303162, |
|
"learning_rate": 6.835000000000001e-05, |
|
"loss": 0.3548, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 349.17127071823205, |
|
"grad_norm": 0.6390587091445923, |
|
"learning_rate": 6.751666666666668e-05, |
|
"loss": 0.3529, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 353.5911602209945, |
|
"grad_norm": 0.7502044439315796, |
|
"learning_rate": 6.668333333333333e-05, |
|
"loss": 0.3544, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 353.5911602209945, |
|
"eval_loss": 0.4128935635089874, |
|
"eval_runtime": 16.0241, |
|
"eval_samples_per_second": 10.047, |
|
"eval_steps_per_second": 1.311, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 358.0110497237569, |
|
"grad_norm": 0.8119373917579651, |
|
"learning_rate": 6.585e-05, |
|
"loss": 0.3527, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 362.43093922651934, |
|
"grad_norm": 0.7546930313110352, |
|
"learning_rate": 6.501666666666667e-05, |
|
"loss": 0.352, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 366.8508287292818, |
|
"grad_norm": 1.1060476303100586, |
|
"learning_rate": 6.418333333333334e-05, |
|
"loss": 0.3514, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 371.2707182320442, |
|
"grad_norm": 1.3087377548217773, |
|
"learning_rate": 6.335e-05, |
|
"loss": 0.3508, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 375.69060773480663, |
|
"grad_norm": 0.7953481078147888, |
|
"learning_rate": 6.251666666666666e-05, |
|
"loss": 0.3493, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 380.11049723756906, |
|
"grad_norm": 1.1858787536621094, |
|
"learning_rate": 6.168333333333333e-05, |
|
"loss": 0.3493, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 384.5303867403315, |
|
"grad_norm": 0.9929729700088501, |
|
"learning_rate": 6.085000000000001e-05, |
|
"loss": 0.3485, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 388.9502762430939, |
|
"grad_norm": 0.8156938552856445, |
|
"learning_rate": 6.0016666666666664e-05, |
|
"loss": 0.347, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 393.37016574585635, |
|
"grad_norm": 1.0693029165267944, |
|
"learning_rate": 5.918333333333333e-05, |
|
"loss": 0.3489, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 397.7900552486188, |
|
"grad_norm": 1.1495543718338013, |
|
"learning_rate": 5.835e-05, |
|
"loss": 0.347, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 397.7900552486188, |
|
"eval_loss": 0.41448456048965454, |
|
"eval_runtime": 15.2027, |
|
"eval_samples_per_second": 10.59, |
|
"eval_steps_per_second": 1.381, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 402.2099447513812, |
|
"grad_norm": 0.9324333071708679, |
|
"learning_rate": 5.751666666666667e-05, |
|
"loss": 0.3473, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 406.62983425414365, |
|
"grad_norm": 0.6897173523902893, |
|
"learning_rate": 5.668333333333333e-05, |
|
"loss": 0.3441, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 411.0497237569061, |
|
"grad_norm": 1.1474602222442627, |
|
"learning_rate": 5.585e-05, |
|
"loss": 0.346, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 415.4696132596685, |
|
"grad_norm": 1.4295645952224731, |
|
"learning_rate": 5.501666666666667e-05, |
|
"loss": 0.3442, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 419.88950276243094, |
|
"grad_norm": 0.8301011323928833, |
|
"learning_rate": 5.4183333333333334e-05, |
|
"loss": 0.344, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 424.30939226519337, |
|
"grad_norm": 1.2894037961959839, |
|
"learning_rate": 5.335e-05, |
|
"loss": 0.3431, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 428.7292817679558, |
|
"grad_norm": 0.6597224473953247, |
|
"learning_rate": 5.251666666666667e-05, |
|
"loss": 0.3443, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 433.1491712707182, |
|
"grad_norm": 0.8772273063659668, |
|
"learning_rate": 5.168333333333334e-05, |
|
"loss": 0.3447, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 437.56906077348066, |
|
"grad_norm": 0.6241065263748169, |
|
"learning_rate": 5.0849999999999996e-05, |
|
"loss": 0.3412, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 441.9889502762431, |
|
"grad_norm": 0.8275649547576904, |
|
"learning_rate": 5.0016666666666665e-05, |
|
"loss": 0.3417, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 441.9889502762431, |
|
"eval_loss": 0.42017292976379395, |
|
"eval_runtime": 15.281, |
|
"eval_samples_per_second": 10.536, |
|
"eval_steps_per_second": 1.374, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 446.4088397790055, |
|
"grad_norm": 0.9026231169700623, |
|
"learning_rate": 4.9183333333333334e-05, |
|
"loss": 0.3417, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 450.82872928176795, |
|
"grad_norm": 1.248936414718628, |
|
"learning_rate": 4.835e-05, |
|
"loss": 0.3396, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 455.2486187845304, |
|
"grad_norm": 1.257866621017456, |
|
"learning_rate": 4.751666666666667e-05, |
|
"loss": 0.3414, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 459.6685082872928, |
|
"grad_norm": 1.029221534729004, |
|
"learning_rate": 4.6683333333333334e-05, |
|
"loss": 0.3396, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 464.08839779005524, |
|
"grad_norm": 1.1000345945358276, |
|
"learning_rate": 4.585e-05, |
|
"loss": 0.3392, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 468.50828729281767, |
|
"grad_norm": 0.7120194435119629, |
|
"learning_rate": 4.5016666666666665e-05, |
|
"loss": 0.3412, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 472.9281767955801, |
|
"grad_norm": 0.5869295597076416, |
|
"learning_rate": 4.4183333333333334e-05, |
|
"loss": 0.3398, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 477.34806629834253, |
|
"grad_norm": 0.8485609292984009, |
|
"learning_rate": 4.335e-05, |
|
"loss": 0.3385, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 481.76795580110496, |
|
"grad_norm": 0.520220160484314, |
|
"learning_rate": 4.251666666666667e-05, |
|
"loss": 0.3366, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 486.1878453038674, |
|
"grad_norm": 0.6742538809776306, |
|
"learning_rate": 4.1683333333333335e-05, |
|
"loss": 0.3372, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 486.1878453038674, |
|
"eval_loss": 0.41964003443717957, |
|
"eval_runtime": 16.3724, |
|
"eval_samples_per_second": 9.834, |
|
"eval_steps_per_second": 1.283, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 490.6077348066298, |
|
"grad_norm": 0.8028594255447388, |
|
"learning_rate": 4.085e-05, |
|
"loss": 0.3378, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 495.02762430939225, |
|
"grad_norm": 0.6300519704818726, |
|
"learning_rate": 4.0016666666666666e-05, |
|
"loss": 0.3373, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 499.4475138121547, |
|
"grad_norm": 0.6699241399765015, |
|
"learning_rate": 3.9183333333333335e-05, |
|
"loss": 0.3365, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 503.8674033149171, |
|
"grad_norm": 0.6375143527984619, |
|
"learning_rate": 3.8350000000000004e-05, |
|
"loss": 0.3358, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 508.28729281767954, |
|
"grad_norm": 0.6259489059448242, |
|
"learning_rate": 3.7516666666666666e-05, |
|
"loss": 0.3363, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 512.707182320442, |
|
"grad_norm": 0.5885080695152283, |
|
"learning_rate": 3.6683333333333335e-05, |
|
"loss": 0.3353, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 517.1270718232045, |
|
"grad_norm": 0.6459413766860962, |
|
"learning_rate": 3.585e-05, |
|
"loss": 0.3344, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 521.5469613259669, |
|
"grad_norm": 0.6893390417098999, |
|
"learning_rate": 3.501666666666667e-05, |
|
"loss": 0.3353, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 525.9668508287293, |
|
"grad_norm": 0.644479513168335, |
|
"learning_rate": 3.4183333333333335e-05, |
|
"loss": 0.3354, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 530.3867403314918, |
|
"grad_norm": 0.5544880628585815, |
|
"learning_rate": 3.3350000000000004e-05, |
|
"loss": 0.3335, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 530.3867403314918, |
|
"eval_loss": 0.4183831810951233, |
|
"eval_runtime": 15.2687, |
|
"eval_samples_per_second": 10.544, |
|
"eval_steps_per_second": 1.375, |
|
"step": 6000 |
|
} |
|
], |
|
"logging_steps": 50, |
|
"max_steps": 8000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 728, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.358564139384107e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|