|
{ |
|
"best_metric": 0.46117448806762695, |
|
"best_model_checkpoint": "speecht5_finetuned_voxpopuli_pl/checkpoint-1000", |
|
"epoch": 5.025125628140704, |
|
"eval_steps": 1000, |
|
"global_step": 1000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.12562814070351758, |
|
"grad_norm": 34.14754104614258, |
|
"learning_rate": 4.6000000000000004e-07, |
|
"loss": 6.2754, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.25125628140703515, |
|
"grad_norm": 25.906333923339844, |
|
"learning_rate": 9.400000000000001e-07, |
|
"loss": 6.1497, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.3768844221105528, |
|
"grad_norm": 29.097463607788086, |
|
"learning_rate": 1.44e-06, |
|
"loss": 6.0335, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.5025125628140703, |
|
"grad_norm": 18.543378829956055, |
|
"learning_rate": 1.94e-06, |
|
"loss": 5.8815, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.628140703517588, |
|
"grad_norm": 24.147188186645508, |
|
"learning_rate": 2.4400000000000004e-06, |
|
"loss": 5.5659, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.7537688442211056, |
|
"grad_norm": 15.634539604187012, |
|
"learning_rate": 2.9400000000000002e-06, |
|
"loss": 5.3573, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.8793969849246231, |
|
"grad_norm": 14.31460189819336, |
|
"learning_rate": 3.44e-06, |
|
"loss": 5.2616, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.0050251256281406, |
|
"grad_norm": 19.75978660583496, |
|
"learning_rate": 3.94e-06, |
|
"loss": 5.1787, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.1306532663316582, |
|
"grad_norm": 12.639854431152344, |
|
"learning_rate": 4.440000000000001e-06, |
|
"loss": 5.1214, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.2562814070351758, |
|
"grad_norm": 17.792396545410156, |
|
"learning_rate": 4.94e-06, |
|
"loss": 5.0703, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.3819095477386933, |
|
"grad_norm": 22.859201431274414, |
|
"learning_rate": 5.4400000000000004e-06, |
|
"loss": 4.9714, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.507537688442211, |
|
"grad_norm": 16.62458610534668, |
|
"learning_rate": 5.94e-06, |
|
"loss": 4.8681, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.6331658291457285, |
|
"grad_norm": 17.881174087524414, |
|
"learning_rate": 6.440000000000001e-06, |
|
"loss": 4.8252, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.758793969849246, |
|
"grad_norm": 14.765595436096191, |
|
"learning_rate": 6.9400000000000005e-06, |
|
"loss": 4.5588, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.8844221105527639, |
|
"grad_norm": 13.808978080749512, |
|
"learning_rate": 7.440000000000001e-06, |
|
"loss": 4.5211, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 2.0100502512562812, |
|
"grad_norm": 10.674385070800781, |
|
"learning_rate": 7.94e-06, |
|
"loss": 4.4888, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.135678391959799, |
|
"grad_norm": 15.555588722229004, |
|
"learning_rate": 8.44e-06, |
|
"loss": 4.4513, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 2.2613065326633164, |
|
"grad_norm": 13.092530250549316, |
|
"learning_rate": 8.94e-06, |
|
"loss": 4.3646, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.3869346733668344, |
|
"grad_norm": 12.971352577209473, |
|
"learning_rate": 9.440000000000001e-06, |
|
"loss": 4.3459, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 2.5125628140703515, |
|
"grad_norm": 10.348517417907715, |
|
"learning_rate": 9.940000000000001e-06, |
|
"loss": 4.2915, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.6381909547738696, |
|
"grad_norm": 13.625853538513184, |
|
"learning_rate": 9.937142857142858e-06, |
|
"loss": 4.3095, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 2.7638190954773867, |
|
"grad_norm": 11.426923751831055, |
|
"learning_rate": 9.865714285714285e-06, |
|
"loss": 4.1899, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.8894472361809047, |
|
"grad_norm": 11.516312599182129, |
|
"learning_rate": 9.794285714285714e-06, |
|
"loss": 4.2233, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 3.0150753768844223, |
|
"grad_norm": 16.141698837280273, |
|
"learning_rate": 9.722857142857143e-06, |
|
"loss": 4.1831, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 3.14070351758794, |
|
"grad_norm": 14.935202598571777, |
|
"learning_rate": 9.651428571428572e-06, |
|
"loss": 4.1739, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 3.2663316582914574, |
|
"grad_norm": 10.119762420654297, |
|
"learning_rate": 9.58e-06, |
|
"loss": 4.1649, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 3.391959798994975, |
|
"grad_norm": 16.50948143005371, |
|
"learning_rate": 9.508571428571429e-06, |
|
"loss": 4.1627, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 3.5175879396984926, |
|
"grad_norm": 10.08300495147705, |
|
"learning_rate": 9.437142857142858e-06, |
|
"loss": 4.1915, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 3.64321608040201, |
|
"grad_norm": 9.375288963317871, |
|
"learning_rate": 9.365714285714287e-06, |
|
"loss": 4.1279, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 3.7688442211055277, |
|
"grad_norm": 13.562234878540039, |
|
"learning_rate": 9.294285714285714e-06, |
|
"loss": 4.1083, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 3.8944723618090453, |
|
"grad_norm": 15.115653991699219, |
|
"learning_rate": 9.222857142857143e-06, |
|
"loss": 4.1236, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 4.0201005025125625, |
|
"grad_norm": 9.049237251281738, |
|
"learning_rate": 9.151428571428572e-06, |
|
"loss": 4.0964, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 4.1457286432160805, |
|
"grad_norm": 9.250771522521973, |
|
"learning_rate": 9.080000000000001e-06, |
|
"loss": 4.0669, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 4.271356783919598, |
|
"grad_norm": 12.07302474975586, |
|
"learning_rate": 9.00857142857143e-06, |
|
"loss": 4.0587, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 4.396984924623116, |
|
"grad_norm": 9.538933753967285, |
|
"learning_rate": 8.937142857142857e-06, |
|
"loss": 4.0837, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 4.522613065326633, |
|
"grad_norm": 14.347626686096191, |
|
"learning_rate": 8.865714285714287e-06, |
|
"loss": 4.0086, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 4.648241206030151, |
|
"grad_norm": 10.831369400024414, |
|
"learning_rate": 8.794285714285716e-06, |
|
"loss": 4.0563, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 4.773869346733669, |
|
"grad_norm": 9.891343116760254, |
|
"learning_rate": 8.722857142857145e-06, |
|
"loss": 4.0057, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 4.899497487437186, |
|
"grad_norm": 9.572471618652344, |
|
"learning_rate": 8.651428571428572e-06, |
|
"loss": 4.0557, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 5.025125628140704, |
|
"grad_norm": 9.745298385620117, |
|
"learning_rate": 8.580000000000001e-06, |
|
"loss": 3.9902, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 5.025125628140704, |
|
"eval_loss": 0.46117448806762695, |
|
"eval_runtime": 75.4681, |
|
"eval_samples_per_second": 9.381, |
|
"eval_steps_per_second": 4.691, |
|
"step": 1000 |
|
} |
|
], |
|
"logging_steps": 25, |
|
"max_steps": 4000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 21, |
|
"save_steps": 1000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4502414650961304.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|