|
{ |
|
"best_metric": 13.085066864784547, |
|
"best_model_checkpoint": "./whisper-medium-el/checkpoint-1000", |
|
"epoch": 9.0382, |
|
"global_step": 2000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 8.8645, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.000000000000001e-07, |
|
"loss": 5.8694, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.4000000000000001e-06, |
|
"loss": 3.1568, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.8800000000000002e-06, |
|
"loss": 1.9833, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.38e-06, |
|
"loss": 1.4934, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.88e-06, |
|
"loss": 1.1115, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.3800000000000007e-06, |
|
"loss": 0.9083, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.88e-06, |
|
"loss": 0.6967, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.38e-06, |
|
"loss": 0.5981, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.880000000000001e-06, |
|
"loss": 0.5506, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 5.380000000000001e-06, |
|
"loss": 0.453, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 5.8800000000000005e-06, |
|
"loss": 0.3967, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 6.380000000000001e-06, |
|
"loss": 0.4095, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 6.88e-06, |
|
"loss": 0.3197, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 7.3800000000000005e-06, |
|
"loss": 0.3176, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 7.88e-06, |
|
"loss": 0.278, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 8.380000000000001e-06, |
|
"loss": 0.2536, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 8.880000000000001e-06, |
|
"loss": 0.2529, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 9.38e-06, |
|
"loss": 0.2158, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 9.88e-06, |
|
"loss": 0.204, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 9.957777777777779e-06, |
|
"loss": 0.21, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 9.902222222222223e-06, |
|
"loss": 0.1758, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 9.846666666666668e-06, |
|
"loss": 0.1668, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 9.791111111111112e-06, |
|
"loss": 0.1475, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 9.735555555555556e-06, |
|
"loss": 0.1416, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 9.68e-06, |
|
"loss": 0.138, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 9.624444444444445e-06, |
|
"loss": 0.1225, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 9.56888888888889e-06, |
|
"loss": 0.1156, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 9.513333333333334e-06, |
|
"loss": 0.1093, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 9.457777777777778e-06, |
|
"loss": 0.1041, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 9.402222222222222e-06, |
|
"loss": 0.0971, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 9.346666666666666e-06, |
|
"loss": 0.0875, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 9.291111111111112e-06, |
|
"loss": 0.0837, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 9.235555555555556e-06, |
|
"loss": 0.0826, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 9.180000000000002e-06, |
|
"loss": 0.0737, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 9.124444444444444e-06, |
|
"loss": 0.0717, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 9.06888888888889e-06, |
|
"loss": 0.0707, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 9.013333333333334e-06, |
|
"loss": 0.0647, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 8.957777777777778e-06, |
|
"loss": 0.0593, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 8.902222222222224e-06, |
|
"loss": 0.0582, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"eval_loss": 0.3256979286670685, |
|
"eval_runtime": 1207.5418, |
|
"eval_samples_per_second": 1.405, |
|
"eval_steps_per_second": 0.088, |
|
"eval_wer": 13.085066864784547, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 8.846666666666668e-06, |
|
"loss": 0.0559, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 8.791111111111112e-06, |
|
"loss": 0.0549, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 8.735555555555556e-06, |
|
"loss": 0.0474, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"learning_rate": 8.68e-06, |
|
"loss": 0.046, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"learning_rate": 8.624444444444446e-06, |
|
"loss": 0.0518, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 8.56888888888889e-06, |
|
"loss": 0.042, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 8.513333333333335e-06, |
|
"loss": 0.0464, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 8.457777777777778e-06, |
|
"loss": 0.0391, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 8.402222222222223e-06, |
|
"loss": 0.0383, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 8.346666666666668e-06, |
|
"loss": 0.036, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 8.291111111111112e-06, |
|
"loss": 0.0326, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"learning_rate": 8.235555555555557e-06, |
|
"loss": 0.033, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"learning_rate": 8.18e-06, |
|
"loss": 0.0357, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 8.124444444444445e-06, |
|
"loss": 0.0321, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 8.06888888888889e-06, |
|
"loss": 0.0278, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"learning_rate": 8.013333333333333e-06, |
|
"loss": 0.0283, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 7.957777777777779e-06, |
|
"loss": 0.0295, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"learning_rate": 7.902222222222223e-06, |
|
"loss": 0.0245, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"learning_rate": 7.846666666666667e-06, |
|
"loss": 0.0271, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 7.02, |
|
"learning_rate": 7.791111111111111e-06, |
|
"loss": 0.028, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 7.02, |
|
"learning_rate": 7.735555555555557e-06, |
|
"loss": 0.0271, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 7.680000000000001e-06, |
|
"loss": 0.0238, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 7.624444444444445e-06, |
|
"loss": 0.0237, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 7.04, |
|
"learning_rate": 7.56888888888889e-06, |
|
"loss": 0.023, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"learning_rate": 7.513333333333334e-06, |
|
"loss": 0.0207, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"learning_rate": 7.457777777777778e-06, |
|
"loss": 0.0193, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"learning_rate": 7.402222222222223e-06, |
|
"loss": 0.0224, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 8.02, |
|
"learning_rate": 7.346666666666668e-06, |
|
"loss": 0.0184, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 8.02, |
|
"learning_rate": 7.291111111111112e-06, |
|
"loss": 0.021, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 8.03, |
|
"learning_rate": 7.235555555555556e-06, |
|
"loss": 0.0197, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 8.03, |
|
"learning_rate": 7.180000000000001e-06, |
|
"loss": 0.0167, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 8.04, |
|
"learning_rate": 7.124444444444445e-06, |
|
"loss": 0.0155, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"learning_rate": 7.06888888888889e-06, |
|
"loss": 0.0161, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"learning_rate": 7.0133333333333345e-06, |
|
"loss": 0.0181, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"learning_rate": 6.9577777777777785e-06, |
|
"loss": 0.0166, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 9.02, |
|
"learning_rate": 6.902222222222223e-06, |
|
"loss": 0.0149, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 9.02, |
|
"learning_rate": 6.846666666666667e-06, |
|
"loss": 0.0161, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 9.03, |
|
"learning_rate": 6.7911111111111115e-06, |
|
"loss": 0.0163, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 9.03, |
|
"learning_rate": 6.735555555555556e-06, |
|
"loss": 0.0155, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 9.04, |
|
"learning_rate": 6.680000000000001e-06, |
|
"loss": 0.0121, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 9.04, |
|
"eval_loss": 0.3753364384174347, |
|
"eval_runtime": 1217.4277, |
|
"eval_samples_per_second": 1.393, |
|
"eval_steps_per_second": 0.087, |
|
"eval_wer": 13.187221396731056, |
|
"step": 2000 |
|
} |
|
], |
|
"max_steps": 5000, |
|
"num_train_epochs": 9223372036854775807, |
|
"total_flos": 6.528197230460928e+19, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|