|
{ |
|
"best_metric": 103.08219178082192, |
|
"best_model_checkpoint": "./whisper-small-amet/checkpoint-2000", |
|
"epoch": 2000.0, |
|
"global_step": 2000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 25.0, |
|
"learning_rate": 4.2000000000000006e-07, |
|
"loss": 2.9938, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 50.0, |
|
"learning_rate": 9.200000000000001e-07, |
|
"loss": 2.2746, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 75.0, |
|
"learning_rate": 1.4000000000000001e-06, |
|
"loss": 1.7055, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 100.0, |
|
"learning_rate": 1.9000000000000002e-06, |
|
"loss": 0.9013, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 100.0, |
|
"eval_loss": 2.7089741230010986, |
|
"eval_runtime": 28.1806, |
|
"eval_samples_per_second": 0.71, |
|
"eval_steps_per_second": 0.035, |
|
"eval_wer": 171.57534246575344, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 125.0, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 0.1244, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 150.0, |
|
"learning_rate": 2.9e-06, |
|
"loss": 0.0031, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 175.0, |
|
"learning_rate": 3.4000000000000005e-06, |
|
"loss": 0.0004, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 200.0, |
|
"learning_rate": 3.900000000000001e-06, |
|
"loss": 0.0002, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 200.0, |
|
"eval_loss": 3.7302818298339844, |
|
"eval_runtime": 28.2207, |
|
"eval_samples_per_second": 0.709, |
|
"eval_steps_per_second": 0.035, |
|
"eval_wer": 298.63013698630135, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 225.0, |
|
"learning_rate": 4.4e-06, |
|
"loss": 0.0002, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 250.0, |
|
"learning_rate": 4.9000000000000005e-06, |
|
"loss": 0.0001, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 275.0, |
|
"learning_rate": 5.400000000000001e-06, |
|
"loss": 0.0001, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 300.0, |
|
"learning_rate": 5.9e-06, |
|
"loss": 0.0001, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 300.0, |
|
"eval_loss": 3.8286595344543457, |
|
"eval_runtime": 28.3319, |
|
"eval_samples_per_second": 0.706, |
|
"eval_steps_per_second": 0.035, |
|
"eval_wer": 239.3835616438356, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 325.0, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 0.0001, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 350.0, |
|
"learning_rate": 6.9e-06, |
|
"loss": 0.0001, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 375.0, |
|
"learning_rate": 7.4e-06, |
|
"loss": 0.0001, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 400.0, |
|
"learning_rate": 7.9e-06, |
|
"loss": 0.0001, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 400.0, |
|
"eval_loss": 3.8876891136169434, |
|
"eval_runtime": 28.3964, |
|
"eval_samples_per_second": 0.704, |
|
"eval_steps_per_second": 0.035, |
|
"eval_wer": 234.93150684931504, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 425.0, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 0.0001, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 450.0, |
|
"learning_rate": 8.900000000000001e-06, |
|
"loss": 0.0001, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 475.0, |
|
"learning_rate": 9.4e-06, |
|
"loss": 0.0001, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 500.0, |
|
"learning_rate": 9.9e-06, |
|
"loss": 0.0001, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 500.0, |
|
"eval_loss": 4.056136608123779, |
|
"eval_runtime": 28.2255, |
|
"eval_samples_per_second": 0.709, |
|
"eval_steps_per_second": 0.035, |
|
"eval_wer": 316.4383561643836, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 525.0, |
|
"learning_rate": 9.866666666666668e-06, |
|
"loss": 0.0001, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 550.0, |
|
"learning_rate": 9.7e-06, |
|
"loss": 0.0001, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 575.0, |
|
"learning_rate": 9.533333333333334e-06, |
|
"loss": 0.0001, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 600.0, |
|
"learning_rate": 9.366666666666668e-06, |
|
"loss": 0.0001, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 600.0, |
|
"eval_loss": 4.270617485046387, |
|
"eval_runtime": 28.1771, |
|
"eval_samples_per_second": 0.71, |
|
"eval_steps_per_second": 0.035, |
|
"eval_wer": 189.04109589041096, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 625.0, |
|
"learning_rate": 9.200000000000002e-06, |
|
"loss": 0.0001, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 650.0, |
|
"learning_rate": 9.033333333333334e-06, |
|
"loss": 0.0, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 675.0, |
|
"learning_rate": 8.866666666666668e-06, |
|
"loss": 0.0, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 700.0, |
|
"learning_rate": 8.700000000000001e-06, |
|
"loss": 0.0, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 700.0, |
|
"eval_loss": 4.452445030212402, |
|
"eval_runtime": 28.3415, |
|
"eval_samples_per_second": 0.706, |
|
"eval_steps_per_second": 0.035, |
|
"eval_wer": 229.45205479452056, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 725.0, |
|
"learning_rate": 8.533333333333335e-06, |
|
"loss": 0.0, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 750.0, |
|
"learning_rate": 8.366666666666667e-06, |
|
"loss": 0.0, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 775.0, |
|
"learning_rate": 8.2e-06, |
|
"loss": 0.0, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 800.0, |
|
"learning_rate": 8.033333333333335e-06, |
|
"loss": 0.0, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 800.0, |
|
"eval_loss": 4.625022888183594, |
|
"eval_runtime": 28.3479, |
|
"eval_samples_per_second": 0.706, |
|
"eval_steps_per_second": 0.035, |
|
"eval_wer": 308.5616438356164, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 825.0, |
|
"learning_rate": 7.866666666666667e-06, |
|
"loss": 0.0, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 850.0, |
|
"learning_rate": 7.7e-06, |
|
"loss": 0.0, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 875.0, |
|
"learning_rate": 7.533333333333334e-06, |
|
"loss": 0.0, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 900.0, |
|
"learning_rate": 7.3666666666666676e-06, |
|
"loss": 0.0, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 900.0, |
|
"eval_loss": 4.784408092498779, |
|
"eval_runtime": 28.2633, |
|
"eval_samples_per_second": 0.708, |
|
"eval_steps_per_second": 0.035, |
|
"eval_wer": 429.4520547945205, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 925.0, |
|
"learning_rate": 7.2000000000000005e-06, |
|
"loss": 0.0, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 950.0, |
|
"learning_rate": 7.033333333333334e-06, |
|
"loss": 0.0, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 975.0, |
|
"learning_rate": 6.886666666666667e-06, |
|
"loss": 0.0753, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1000.0, |
|
"learning_rate": 6.720000000000001e-06, |
|
"loss": 0.0405, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1000.0, |
|
"eval_loss": 4.618178367614746, |
|
"eval_runtime": 28.5252, |
|
"eval_samples_per_second": 0.701, |
|
"eval_steps_per_second": 0.035, |
|
"eval_wer": 206.84931506849313, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1025.0, |
|
"learning_rate": 6.553333333333334e-06, |
|
"loss": 0.0057, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 1050.0, |
|
"learning_rate": 6.386666666666668e-06, |
|
"loss": 0.0001, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1075.0, |
|
"learning_rate": 6.220000000000001e-06, |
|
"loss": 0.0001, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 1100.0, |
|
"learning_rate": 6.0533333333333335e-06, |
|
"loss": 0.0002, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1100.0, |
|
"eval_loss": 5.542290687561035, |
|
"eval_runtime": 28.5497, |
|
"eval_samples_per_second": 0.701, |
|
"eval_steps_per_second": 0.035, |
|
"eval_wer": 159.93150684931507, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1125.0, |
|
"learning_rate": 5.886666666666667e-06, |
|
"loss": 0.0002, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 1150.0, |
|
"learning_rate": 5.72e-06, |
|
"loss": 0.0002, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1175.0, |
|
"learning_rate": 5.553333333333334e-06, |
|
"loss": 0.0002, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 1200.0, |
|
"learning_rate": 5.386666666666667e-06, |
|
"loss": 0.0002, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1200.0, |
|
"eval_loss": 6.0516815185546875, |
|
"eval_runtime": 28.4295, |
|
"eval_samples_per_second": 0.703, |
|
"eval_steps_per_second": 0.035, |
|
"eval_wer": 151.7123287671233, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1225.0, |
|
"learning_rate": 5.220000000000001e-06, |
|
"loss": 0.0002, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 1250.0, |
|
"learning_rate": 5.053333333333334e-06, |
|
"loss": 0.0002, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1275.0, |
|
"learning_rate": 4.886666666666668e-06, |
|
"loss": 0.0002, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 1300.0, |
|
"learning_rate": 4.7200000000000005e-06, |
|
"loss": 0.0002, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1300.0, |
|
"eval_loss": 6.349332332611084, |
|
"eval_runtime": 28.5448, |
|
"eval_samples_per_second": 0.701, |
|
"eval_steps_per_second": 0.035, |
|
"eval_wer": 154.7945205479452, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1325.0, |
|
"learning_rate": 4.5533333333333335e-06, |
|
"loss": 0.0002, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 1350.0, |
|
"learning_rate": 4.3866666666666665e-06, |
|
"loss": 0.0002, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1375.0, |
|
"learning_rate": 4.22e-06, |
|
"loss": 0.0002, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 1400.0, |
|
"learning_rate": 4.053333333333333e-06, |
|
"loss": 0.0002, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1400.0, |
|
"eval_loss": 6.543065547943115, |
|
"eval_runtime": 28.5232, |
|
"eval_samples_per_second": 0.701, |
|
"eval_steps_per_second": 0.035, |
|
"eval_wer": 138.6986301369863, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1425.0, |
|
"learning_rate": 3.886666666666667e-06, |
|
"loss": 0.0002, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 1450.0, |
|
"learning_rate": 3.7200000000000004e-06, |
|
"loss": 0.0002, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1475.0, |
|
"learning_rate": 3.5533333333333338e-06, |
|
"loss": 0.0002, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 1500.0, |
|
"learning_rate": 3.386666666666667e-06, |
|
"loss": 0.0002, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1500.0, |
|
"eval_loss": 6.669939994812012, |
|
"eval_runtime": 28.5345, |
|
"eval_samples_per_second": 0.701, |
|
"eval_steps_per_second": 0.035, |
|
"eval_wer": 158.56164383561645, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1525.0, |
|
"learning_rate": 3.2200000000000005e-06, |
|
"loss": 0.0002, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 1550.0, |
|
"learning_rate": 3.053333333333334e-06, |
|
"loss": 0.0001, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1575.0, |
|
"learning_rate": 2.8866666666666673e-06, |
|
"loss": 0.0001, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 1600.0, |
|
"learning_rate": 2.7200000000000002e-06, |
|
"loss": 0.0001, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1600.0, |
|
"eval_loss": 6.759077548980713, |
|
"eval_runtime": 28.534, |
|
"eval_samples_per_second": 0.701, |
|
"eval_steps_per_second": 0.035, |
|
"eval_wer": 160.27397260273972, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1625.0, |
|
"learning_rate": 2.5533333333333336e-06, |
|
"loss": 0.0001, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 1650.0, |
|
"learning_rate": 2.386666666666667e-06, |
|
"loss": 0.0001, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1675.0, |
|
"learning_rate": 2.2200000000000003e-06, |
|
"loss": 0.0001, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 1700.0, |
|
"learning_rate": 2.0533333333333337e-06, |
|
"loss": 0.0001, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1700.0, |
|
"eval_loss": 6.82085657119751, |
|
"eval_runtime": 28.5933, |
|
"eval_samples_per_second": 0.699, |
|
"eval_steps_per_second": 0.035, |
|
"eval_wer": 103.08219178082192, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1725.0, |
|
"learning_rate": 1.8866666666666669e-06, |
|
"loss": 0.0001, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 1750.0, |
|
"learning_rate": 1.72e-06, |
|
"loss": 0.0001, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1775.0, |
|
"learning_rate": 1.5533333333333334e-06, |
|
"loss": 0.0001, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 1800.0, |
|
"learning_rate": 1.3866666666666668e-06, |
|
"loss": 0.0001, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1800.0, |
|
"eval_loss": 6.856205940246582, |
|
"eval_runtime": 28.571, |
|
"eval_samples_per_second": 0.7, |
|
"eval_steps_per_second": 0.035, |
|
"eval_wer": 103.08219178082192, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1825.0, |
|
"learning_rate": 1.2200000000000002e-06, |
|
"loss": 0.0001, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 1850.0, |
|
"learning_rate": 1.0533333333333333e-06, |
|
"loss": 0.0001, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1875.0, |
|
"learning_rate": 8.866666666666668e-07, |
|
"loss": 0.0001, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 1900.0, |
|
"learning_rate": 7.2e-07, |
|
"loss": 0.0001, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1900.0, |
|
"eval_loss": 6.875776767730713, |
|
"eval_runtime": 28.4345, |
|
"eval_samples_per_second": 0.703, |
|
"eval_steps_per_second": 0.035, |
|
"eval_wer": 103.08219178082192, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1925.0, |
|
"learning_rate": 5.533333333333334e-07, |
|
"loss": 0.0001, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 1950.0, |
|
"learning_rate": 3.8666666666666674e-07, |
|
"loss": 0.0001, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1975.0, |
|
"learning_rate": 2.2e-07, |
|
"loss": 0.0001, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 2000.0, |
|
"learning_rate": 5.3333333333333334e-08, |
|
"loss": 0.0001, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2000.0, |
|
"eval_loss": 6.883942604064941, |
|
"eval_runtime": 28.4855, |
|
"eval_samples_per_second": 0.702, |
|
"eval_steps_per_second": 0.035, |
|
"eval_wer": 103.08219178082192, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2000.0, |
|
"step": 2000, |
|
"total_flos": 5.7717080064e+17, |
|
"train_loss": 0.10164999849759625, |
|
"train_runtime": 954.9086, |
|
"train_samples_per_second": 134.044, |
|
"train_steps_per_second": 2.094 |
|
} |
|
], |
|
"max_steps": 2000, |
|
"num_train_epochs": 2000, |
|
"total_flos": 5.7717080064e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|