|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 50.0, |
|
"global_step": 15900, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.75e-06, |
|
"loss": 19.9321, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 9.750000000000002e-06, |
|
"loss": 4.3153, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.475e-05, |
|
"loss": 4.0475, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.9750000000000002e-05, |
|
"loss": 3.3541, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.4750000000000002e-05, |
|
"loss": 2.4695, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 2.975e-05, |
|
"loss": 2.176, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.475e-05, |
|
"loss": 2.0514, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 3.9750000000000004e-05, |
|
"loss": 1.9917, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 4.4750000000000004e-05, |
|
"loss": 1.9838, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 4.975e-05, |
|
"loss": 1.9321, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"eval_cer": 0.2635160121292804, |
|
"eval_loss": 1.0115777254104614, |
|
"eval_runtime": 309.9157, |
|
"eval_samples_per_second": 14.41, |
|
"eval_steps_per_second": 1.804, |
|
"eval_wer": 0.9823226672633699, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 5.475e-05, |
|
"loss": 1.9148, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 5.975000000000001e-05, |
|
"loss": 1.89, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 6.475e-05, |
|
"loss": 1.8953, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 6.975e-05, |
|
"loss": 1.9038, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 7.475000000000001e-05, |
|
"loss": 1.9498, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 7.975e-05, |
|
"loss": 2.0113, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 8.475000000000001e-05, |
|
"loss": 2.0294, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 8.975e-05, |
|
"loss": 2.0839, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 9.475e-05, |
|
"loss": 2.056, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 9.975000000000001e-05, |
|
"loss": 2.0934, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"eval_cer": 0.29319823484456276, |
|
"eval_loss": 1.1241235733032227, |
|
"eval_runtime": 306.3033, |
|
"eval_samples_per_second": 14.58, |
|
"eval_steps_per_second": 1.825, |
|
"eval_wer": 1.0221526068471694, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 9.931654676258994e-05, |
|
"loss": 2.0845, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 6.92, |
|
"learning_rate": 9.859712230215827e-05, |
|
"loss": 2.0873, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"learning_rate": 9.787769784172662e-05, |
|
"loss": 2.0782, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"learning_rate": 9.715827338129497e-05, |
|
"loss": 2.1767, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 7.86, |
|
"learning_rate": 9.643884892086331e-05, |
|
"loss": 2.137, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 8.18, |
|
"learning_rate": 9.571942446043166e-05, |
|
"loss": 2.113, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 8.49, |
|
"learning_rate": 9.5e-05, |
|
"loss": 2.0582, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 8.81, |
|
"learning_rate": 9.428057553956835e-05, |
|
"loss": 2.0384, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 9.12, |
|
"learning_rate": 9.35611510791367e-05, |
|
"loss": 2.0837, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 9.43, |
|
"learning_rate": 9.284172661870504e-05, |
|
"loss": 2.0389, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 9.43, |
|
"eval_cer": 0.3344838070819877, |
|
"eval_loss": 1.2066822052001953, |
|
"eval_runtime": 306.7837, |
|
"eval_samples_per_second": 14.557, |
|
"eval_steps_per_second": 1.822, |
|
"eval_wer": 1.1324681136719623, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 9.75, |
|
"learning_rate": 9.212230215827339e-05, |
|
"loss": 2.0286, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 10.06, |
|
"learning_rate": 9.140287769784172e-05, |
|
"loss": 2.0073, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 10.38, |
|
"learning_rate": 9.068345323741007e-05, |
|
"loss": 1.9767, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 10.69, |
|
"learning_rate": 8.996402877697842e-05, |
|
"loss": 1.9924, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 11.01, |
|
"learning_rate": 8.924460431654677e-05, |
|
"loss": 2.003, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 11.32, |
|
"learning_rate": 8.852517985611512e-05, |
|
"loss": 1.9925, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 11.64, |
|
"learning_rate": 8.780575539568345e-05, |
|
"loss": 1.9533, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 11.95, |
|
"learning_rate": 8.70863309352518e-05, |
|
"loss": 1.9474, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 12.26, |
|
"learning_rate": 8.636690647482015e-05, |
|
"loss": 1.949, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 12.58, |
|
"learning_rate": 8.564748201438849e-05, |
|
"loss": 1.9569, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 12.58, |
|
"eval_cer": 0.26565260623392417, |
|
"eval_loss": 0.9817910194396973, |
|
"eval_runtime": 306.1977, |
|
"eval_samples_per_second": 14.585, |
|
"eval_steps_per_second": 1.826, |
|
"eval_wer": 1.0089505482210785, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 12.89, |
|
"learning_rate": 8.492805755395684e-05, |
|
"loss": 1.9532, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 13.21, |
|
"learning_rate": 8.420863309352519e-05, |
|
"loss": 1.9614, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 13.52, |
|
"learning_rate": 8.348920863309353e-05, |
|
"loss": 1.9632, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 13.84, |
|
"learning_rate": 8.276978417266188e-05, |
|
"loss": 1.9463, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 14.15, |
|
"learning_rate": 8.205035971223022e-05, |
|
"loss": 1.9153, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 14.47, |
|
"learning_rate": 8.133093525179857e-05, |
|
"loss": 1.8858, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 14.78, |
|
"learning_rate": 8.06115107913669e-05, |
|
"loss": 1.873, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 15.09, |
|
"learning_rate": 7.989208633093525e-05, |
|
"loss": 1.8554, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 15.41, |
|
"learning_rate": 7.91726618705036e-05, |
|
"loss": 1.8789, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 15.72, |
|
"learning_rate": 7.845323741007195e-05, |
|
"loss": 1.8409, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 15.72, |
|
"eval_cer": 0.3740518863660643, |
|
"eval_loss": 1.0381526947021484, |
|
"eval_runtime": 306.1278, |
|
"eval_samples_per_second": 14.589, |
|
"eval_steps_per_second": 1.826, |
|
"eval_wer": 1.6480196912060863, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 16.04, |
|
"learning_rate": 7.77338129496403e-05, |
|
"loss": 1.8599, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 16.35, |
|
"learning_rate": 7.701438848920864e-05, |
|
"loss": 1.8393, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 16.67, |
|
"learning_rate": 7.629496402877698e-05, |
|
"loss": 1.8435, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 16.98, |
|
"learning_rate": 7.557553956834533e-05, |
|
"loss": 1.8443, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 17.3, |
|
"learning_rate": 7.485611510791367e-05, |
|
"loss": 1.7856, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 17.61, |
|
"learning_rate": 7.413669064748202e-05, |
|
"loss": 1.7948, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 17.92, |
|
"learning_rate": 7.341726618705035e-05, |
|
"loss": 1.8169, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 18.24, |
|
"learning_rate": 7.269784172661872e-05, |
|
"loss": 1.7683, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 18.55, |
|
"learning_rate": 7.197841726618706e-05, |
|
"loss": 1.7461, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 18.87, |
|
"learning_rate": 7.12589928057554e-05, |
|
"loss": 1.7449, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 18.87, |
|
"eval_cer": 0.34543796070310384, |
|
"eval_loss": 0.9962468147277832, |
|
"eval_runtime": 306.247, |
|
"eval_samples_per_second": 14.583, |
|
"eval_steps_per_second": 1.825, |
|
"eval_wer": 1.6267621391810247, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 19.18, |
|
"learning_rate": 7.053956834532375e-05, |
|
"loss": 1.7904, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 19.5, |
|
"learning_rate": 6.982014388489208e-05, |
|
"loss": 1.7315, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 19.81, |
|
"learning_rate": 6.910071942446043e-05, |
|
"loss": 1.7591, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 20.13, |
|
"learning_rate": 6.83884892086331e-05, |
|
"loss": 1.7338, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 20.44, |
|
"learning_rate": 6.766906474820143e-05, |
|
"loss": 1.7211, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 20.75, |
|
"learning_rate": 6.694964028776978e-05, |
|
"loss": 1.7398, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 21.07, |
|
"learning_rate": 6.623021582733813e-05, |
|
"loss": 1.7244, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 21.38, |
|
"learning_rate": 6.551079136690648e-05, |
|
"loss": 1.7823, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 21.7, |
|
"learning_rate": 6.479136690647483e-05, |
|
"loss": 1.7602, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 22.01, |
|
"learning_rate": 6.407194244604317e-05, |
|
"loss": 1.7349, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 22.01, |
|
"eval_cer": 0.25974410176762075, |
|
"eval_loss": 0.9559761881828308, |
|
"eval_runtime": 307.762, |
|
"eval_samples_per_second": 14.511, |
|
"eval_steps_per_second": 1.816, |
|
"eval_wer": 0.9850078317296934, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 22.33, |
|
"learning_rate": 6.335251798561152e-05, |
|
"loss": 1.7176, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 22.64, |
|
"learning_rate": 6.263309352517986e-05, |
|
"loss": 1.7154, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 22.96, |
|
"learning_rate": 6.19136690647482e-05, |
|
"loss": 1.7136, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 23.27, |
|
"learning_rate": 6.119424460431655e-05, |
|
"loss": 1.6846, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 23.58, |
|
"learning_rate": 6.047482014388489e-05, |
|
"loss": 1.6878, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 23.9, |
|
"learning_rate": 5.975539568345324e-05, |
|
"loss": 1.7066, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 24.21, |
|
"learning_rate": 5.903597122302159e-05, |
|
"loss": 1.6664, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 24.53, |
|
"learning_rate": 5.831654676258993e-05, |
|
"loss": 1.6676, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 24.84, |
|
"learning_rate": 5.759712230215828e-05, |
|
"loss": 1.6377, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 25.16, |
|
"learning_rate": 5.6877697841726616e-05, |
|
"loss": 1.6857, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 25.16, |
|
"eval_cer": 0.23246143858524598, |
|
"eval_loss": 0.8722429275512695, |
|
"eval_runtime": 307.5785, |
|
"eval_samples_per_second": 14.52, |
|
"eval_steps_per_second": 1.817, |
|
"eval_wer": 0.9668829715820094, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 25.47, |
|
"learning_rate": 5.615827338129497e-05, |
|
"loss": 1.6433, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 25.79, |
|
"learning_rate": 5.543884892086331e-05, |
|
"loss": 1.6355, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 26.1, |
|
"learning_rate": 5.4719424460431656e-05, |
|
"loss": 1.6345, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 26.42, |
|
"learning_rate": 5.4000000000000005e-05, |
|
"loss": 1.6166, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 26.73, |
|
"learning_rate": 5.328057553956835e-05, |
|
"loss": 1.6357, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 27.04, |
|
"learning_rate": 5.2561151079136696e-05, |
|
"loss": 1.6294, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 27.36, |
|
"learning_rate": 5.1848920863309355e-05, |
|
"loss": 1.5973, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 27.67, |
|
"learning_rate": 5.11294964028777e-05, |
|
"loss": 1.5881, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 27.99, |
|
"learning_rate": 5.0410071942446046e-05, |
|
"loss": 1.6079, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 28.3, |
|
"learning_rate": 4.969064748201439e-05, |
|
"loss": 1.5637, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 28.3, |
|
"eval_cer": 0.3422495048854046, |
|
"eval_loss": 0.7635577321052551, |
|
"eval_runtime": 305.1268, |
|
"eval_samples_per_second": 14.637, |
|
"eval_steps_per_second": 1.832, |
|
"eval_wer": 1.8071156858357575, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 28.62, |
|
"learning_rate": 4.897122302158274e-05, |
|
"loss": 1.5611, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 28.93, |
|
"learning_rate": 4.825179856115108e-05, |
|
"loss": 1.5648, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 29.25, |
|
"learning_rate": 4.753237410071943e-05, |
|
"loss": 1.5724, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 29.56, |
|
"learning_rate": 4.681294964028777e-05, |
|
"loss": 1.5375, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 29.87, |
|
"learning_rate": 4.609352517985612e-05, |
|
"loss": 1.5468, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 30.19, |
|
"learning_rate": 4.537410071942446e-05, |
|
"loss": 1.5512, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 30.5, |
|
"learning_rate": 4.4654676258992804e-05, |
|
"loss": 1.5394, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 30.82, |
|
"learning_rate": 4.393525179856115e-05, |
|
"loss": 1.5147, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 31.13, |
|
"learning_rate": 4.32158273381295e-05, |
|
"loss": 1.5102, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 31.45, |
|
"learning_rate": 4.2496402877697845e-05, |
|
"loss": 1.5088, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 31.45, |
|
"eval_cer": 0.20850693160433564, |
|
"eval_loss": 0.7290384769439697, |
|
"eval_runtime": 306.7599, |
|
"eval_samples_per_second": 14.559, |
|
"eval_steps_per_second": 1.822, |
|
"eval_wer": 1.0398299395837995, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 31.76, |
|
"learning_rate": 4.177697841726619e-05, |
|
"loss": 1.503, |
|
"step": 10100 |
|
}, |
|
{ |
|
"epoch": 32.08, |
|
"learning_rate": 4.1057553956834536e-05, |
|
"loss": 1.4922, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 32.39, |
|
"learning_rate": 4.0338129496402885e-05, |
|
"loss": 1.5002, |
|
"step": 10300 |
|
}, |
|
{ |
|
"epoch": 32.7, |
|
"learning_rate": 3.961870503597123e-05, |
|
"loss": 1.4877, |
|
"step": 10400 |
|
}, |
|
{ |
|
"epoch": 33.02, |
|
"learning_rate": 3.889928057553957e-05, |
|
"loss": 1.5061, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 33.33, |
|
"learning_rate": 3.817985611510791e-05, |
|
"loss": 1.4751, |
|
"step": 10600 |
|
}, |
|
{ |
|
"epoch": 33.65, |
|
"learning_rate": 3.746043165467626e-05, |
|
"loss": 1.4671, |
|
"step": 10700 |
|
}, |
|
{ |
|
"epoch": 33.96, |
|
"learning_rate": 3.674820143884892e-05, |
|
"loss": 1.4496, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 34.28, |
|
"learning_rate": 3.602877697841727e-05, |
|
"loss": 1.4533, |
|
"step": 10900 |
|
}, |
|
{ |
|
"epoch": 34.59, |
|
"learning_rate": 3.530935251798561e-05, |
|
"loss": 1.4298, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 34.59, |
|
"eval_cer": 0.21042164862888182, |
|
"eval_loss": 0.7575956583023071, |
|
"eval_runtime": 307.6821, |
|
"eval_samples_per_second": 14.515, |
|
"eval_steps_per_second": 1.817, |
|
"eval_wer": 1.0165585142089952, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 34.91, |
|
"learning_rate": 3.458992805755396e-05, |
|
"loss": 1.4401, |
|
"step": 11100 |
|
}, |
|
{ |
|
"epoch": 35.22, |
|
"learning_rate": 3.38705035971223e-05, |
|
"loss": 1.4155, |
|
"step": 11200 |
|
}, |
|
{ |
|
"epoch": 35.53, |
|
"learning_rate": 3.315107913669065e-05, |
|
"loss": 1.4119, |
|
"step": 11300 |
|
}, |
|
{ |
|
"epoch": 35.85, |
|
"learning_rate": 3.243165467625899e-05, |
|
"loss": 1.4156, |
|
"step": 11400 |
|
}, |
|
{ |
|
"epoch": 36.16, |
|
"learning_rate": 3.1712230215827335e-05, |
|
"loss": 1.4316, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 36.48, |
|
"learning_rate": 3.0992805755395684e-05, |
|
"loss": 1.3815, |
|
"step": 11600 |
|
}, |
|
{ |
|
"epoch": 36.79, |
|
"learning_rate": 3.027338129496403e-05, |
|
"loss": 1.4087, |
|
"step": 11700 |
|
}, |
|
{ |
|
"epoch": 37.11, |
|
"learning_rate": 2.9553956834532376e-05, |
|
"loss": 1.3889, |
|
"step": 11800 |
|
}, |
|
{ |
|
"epoch": 37.42, |
|
"learning_rate": 2.883453237410072e-05, |
|
"loss": 1.3722, |
|
"step": 11900 |
|
}, |
|
{ |
|
"epoch": 37.74, |
|
"learning_rate": 2.8115107913669063e-05, |
|
"loss": 1.3716, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 37.74, |
|
"eval_cer": 0.21379089317851244, |
|
"eval_loss": 0.7045890092849731, |
|
"eval_runtime": 307.0323, |
|
"eval_samples_per_second": 14.546, |
|
"eval_steps_per_second": 1.821, |
|
"eval_wer": 1.1275453121503691, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 38.05, |
|
"learning_rate": 2.7395683453237412e-05, |
|
"loss": 1.3782, |
|
"step": 12100 |
|
}, |
|
{ |
|
"epoch": 38.36, |
|
"learning_rate": 2.6676258992805758e-05, |
|
"loss": 1.3496, |
|
"step": 12200 |
|
}, |
|
{ |
|
"epoch": 38.68, |
|
"learning_rate": 2.5956834532374104e-05, |
|
"loss": 1.3645, |
|
"step": 12300 |
|
}, |
|
{ |
|
"epoch": 38.99, |
|
"learning_rate": 2.5237410071942446e-05, |
|
"loss": 1.4, |
|
"step": 12400 |
|
}, |
|
{ |
|
"epoch": 39.31, |
|
"learning_rate": 2.4517985611510795e-05, |
|
"loss": 1.3441, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 39.62, |
|
"learning_rate": 2.3798561151079137e-05, |
|
"loss": 1.3512, |
|
"step": 12600 |
|
}, |
|
{ |
|
"epoch": 39.94, |
|
"learning_rate": 2.3079136690647483e-05, |
|
"loss": 1.3429, |
|
"step": 12700 |
|
}, |
|
{ |
|
"epoch": 40.25, |
|
"learning_rate": 2.2366906474820145e-05, |
|
"loss": 1.3285, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 40.57, |
|
"learning_rate": 2.164748201438849e-05, |
|
"loss": 1.3222, |
|
"step": 12900 |
|
}, |
|
{ |
|
"epoch": 40.88, |
|
"learning_rate": 2.0928057553956836e-05, |
|
"loss": 1.3185, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 40.88, |
|
"eval_cer": 0.21792438100403488, |
|
"eval_loss": 0.7010895013809204, |
|
"eval_runtime": 308.0387, |
|
"eval_samples_per_second": 14.498, |
|
"eval_steps_per_second": 1.815, |
|
"eval_wer": 1.1696128887894384, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 41.19, |
|
"learning_rate": 2.02158273381295e-05, |
|
"loss": 1.3151, |
|
"step": 13100 |
|
}, |
|
{ |
|
"epoch": 41.51, |
|
"learning_rate": 1.949640287769784e-05, |
|
"loss": 1.2889, |
|
"step": 13200 |
|
}, |
|
{ |
|
"epoch": 41.82, |
|
"learning_rate": 1.877697841726619e-05, |
|
"loss": 1.3339, |
|
"step": 13300 |
|
}, |
|
{ |
|
"epoch": 42.14, |
|
"learning_rate": 1.8057553956834532e-05, |
|
"loss": 1.3292, |
|
"step": 13400 |
|
}, |
|
{ |
|
"epoch": 42.45, |
|
"learning_rate": 1.7338129496402878e-05, |
|
"loss": 1.2849, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 42.77, |
|
"learning_rate": 1.6618705035971224e-05, |
|
"loss": 1.3046, |
|
"step": 13600 |
|
}, |
|
{ |
|
"epoch": 43.08, |
|
"learning_rate": 1.589928057553957e-05, |
|
"loss": 1.2722, |
|
"step": 13700 |
|
}, |
|
{ |
|
"epoch": 43.4, |
|
"learning_rate": 1.5179856115107915e-05, |
|
"loss": 1.2704, |
|
"step": 13800 |
|
}, |
|
{ |
|
"epoch": 43.71, |
|
"learning_rate": 1.446043165467626e-05, |
|
"loss": 1.279, |
|
"step": 13900 |
|
}, |
|
{ |
|
"epoch": 44.03, |
|
"learning_rate": 1.3741007194244604e-05, |
|
"loss": 1.28, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 44.03, |
|
"eval_cer": 0.2023847677275678, |
|
"eval_loss": 0.675375759601593, |
|
"eval_runtime": 307.5046, |
|
"eval_samples_per_second": 14.523, |
|
"eval_steps_per_second": 1.818, |
|
"eval_wer": 1.1315730588498545, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 44.34, |
|
"learning_rate": 1.3021582733812952e-05, |
|
"loss": 1.2604, |
|
"step": 14100 |
|
}, |
|
{ |
|
"epoch": 44.65, |
|
"learning_rate": 1.2302158273381296e-05, |
|
"loss": 1.2631, |
|
"step": 14200 |
|
}, |
|
{ |
|
"epoch": 44.97, |
|
"learning_rate": 1.1582733812949641e-05, |
|
"loss": 1.2742, |
|
"step": 14300 |
|
}, |
|
{ |
|
"epoch": 45.28, |
|
"learning_rate": 1.0863309352517987e-05, |
|
"loss": 1.2552, |
|
"step": 14400 |
|
}, |
|
{ |
|
"epoch": 45.6, |
|
"learning_rate": 1.0143884892086332e-05, |
|
"loss": 1.2328, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 45.91, |
|
"learning_rate": 9.424460431654676e-06, |
|
"loss": 1.2562, |
|
"step": 14600 |
|
}, |
|
{ |
|
"epoch": 46.23, |
|
"learning_rate": 8.705035971223022e-06, |
|
"loss": 1.2239, |
|
"step": 14700 |
|
}, |
|
{ |
|
"epoch": 46.54, |
|
"learning_rate": 7.985611510791367e-06, |
|
"loss": 1.2502, |
|
"step": 14800 |
|
}, |
|
{ |
|
"epoch": 46.86, |
|
"learning_rate": 7.266187050359713e-06, |
|
"loss": 1.2265, |
|
"step": 14900 |
|
}, |
|
{ |
|
"epoch": 47.17, |
|
"learning_rate": 6.546762589928059e-06, |
|
"loss": 1.2368, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 47.17, |
|
"eval_cer": 0.19226881640904273, |
|
"eval_loss": 0.6925497055053711, |
|
"eval_runtime": 307.2836, |
|
"eval_samples_per_second": 14.534, |
|
"eval_steps_per_second": 1.819, |
|
"eval_wer": 1.0516894159767285, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 47.48, |
|
"learning_rate": 5.8273381294964035e-06, |
|
"loss": 1.2265, |
|
"step": 15100 |
|
}, |
|
{ |
|
"epoch": 47.8, |
|
"learning_rate": 5.107913669064748e-06, |
|
"loss": 1.2267, |
|
"step": 15200 |
|
}, |
|
{ |
|
"epoch": 48.11, |
|
"learning_rate": 4.388489208633094e-06, |
|
"loss": 1.2267, |
|
"step": 15300 |
|
}, |
|
{ |
|
"epoch": 48.43, |
|
"learning_rate": 3.669064748201439e-06, |
|
"loss": 1.2095, |
|
"step": 15400 |
|
}, |
|
{ |
|
"epoch": 48.74, |
|
"learning_rate": 2.9496402877697846e-06, |
|
"loss": 1.1985, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 49.06, |
|
"learning_rate": 2.23021582733813e-06, |
|
"loss": 1.2141, |
|
"step": 15600 |
|
}, |
|
{ |
|
"epoch": 49.37, |
|
"learning_rate": 1.510791366906475e-06, |
|
"loss": 1.2087, |
|
"step": 15700 |
|
}, |
|
{ |
|
"epoch": 49.69, |
|
"learning_rate": 7.913669064748202e-07, |
|
"loss": 1.2109, |
|
"step": 15800 |
|
}, |
|
{ |
|
"epoch": 50.0, |
|
"learning_rate": 7.913669064748202e-08, |
|
"loss": 1.2123, |
|
"step": 15900 |
|
}, |
|
{ |
|
"epoch": 50.0, |
|
"step": 15900, |
|
"total_flos": 2.3890518741171333e+20, |
|
"train_loss": 1.801840565999349, |
|
"train_runtime": 49502.0749, |
|
"train_samples_per_second": 10.254, |
|
"train_steps_per_second": 0.321 |
|
} |
|
], |
|
"max_steps": 15900, |
|
"num_train_epochs": 50, |
|
"total_flos": 2.3890518741171333e+20, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|