|
{ |
|
"best_metric": 2.9130738755534837, |
|
"best_model_checkpoint": "./final-whisper-for-initial-publish/checkpoint-2500", |
|
"epoch": 4.118616144975288, |
|
"eval_steps": 500, |
|
"global_step": 2500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.04118616144975288, |
|
"grad_norm": 67.08393096923828, |
|
"learning_rate": 4.2000000000000006e-07, |
|
"loss": 2.8367, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.08237232289950576, |
|
"grad_norm": 17.447174072265625, |
|
"learning_rate": 9.200000000000001e-07, |
|
"loss": 2.0282, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.12355848434925865, |
|
"grad_norm": 12.4807710647583, |
|
"learning_rate": 1.42e-06, |
|
"loss": 1.3152, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.16474464579901152, |
|
"grad_norm": 8.900789260864258, |
|
"learning_rate": 1.9200000000000003e-06, |
|
"loss": 0.7248, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.20593080724876442, |
|
"grad_norm": 7.897016525268555, |
|
"learning_rate": 2.42e-06, |
|
"loss": 0.5731, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.2471169686985173, |
|
"grad_norm": 9.260579109191895, |
|
"learning_rate": 2.92e-06, |
|
"loss": 0.474, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2883031301482702, |
|
"grad_norm": 6.941812515258789, |
|
"learning_rate": 3.4200000000000007e-06, |
|
"loss": 0.4304, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.32948929159802304, |
|
"grad_norm": 6.919547080993652, |
|
"learning_rate": 3.920000000000001e-06, |
|
"loss": 0.3514, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.37067545304777594, |
|
"grad_norm": 6.444908618927002, |
|
"learning_rate": 4.42e-06, |
|
"loss": 0.273, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.41186161449752884, |
|
"grad_norm": 5.828249454498291, |
|
"learning_rate": 4.92e-06, |
|
"loss": 0.1655, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.45304777594728174, |
|
"grad_norm": 2.492833137512207, |
|
"learning_rate": 5.420000000000001e-06, |
|
"loss": 0.0894, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.4942339373970346, |
|
"grad_norm": 3.24698805809021, |
|
"learning_rate": 5.92e-06, |
|
"loss": 0.076, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.5354200988467874, |
|
"grad_norm": 4.378988265991211, |
|
"learning_rate": 6.42e-06, |
|
"loss": 0.0763, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.5766062602965404, |
|
"grad_norm": 3.146955728530884, |
|
"learning_rate": 6.92e-06, |
|
"loss": 0.0679, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.6177924217462932, |
|
"grad_norm": 3.8782553672790527, |
|
"learning_rate": 7.420000000000001e-06, |
|
"loss": 0.0524, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.6589785831960461, |
|
"grad_norm": 6.15730094909668, |
|
"learning_rate": 7.92e-06, |
|
"loss": 0.0554, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.700164744645799, |
|
"grad_norm": 6.1213812828063965, |
|
"learning_rate": 8.42e-06, |
|
"loss": 0.0535, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.7413509060955519, |
|
"grad_norm": 4.010768890380859, |
|
"learning_rate": 8.920000000000001e-06, |
|
"loss": 0.0465, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.7825370675453048, |
|
"grad_norm": 3.509321451187134, |
|
"learning_rate": 9.42e-06, |
|
"loss": 0.0436, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.8237232289950577, |
|
"grad_norm": 2.744082450866699, |
|
"learning_rate": 9.920000000000002e-06, |
|
"loss": 0.0468, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.8237232289950577, |
|
"eval_loss": 0.04186907038092613, |
|
"eval_runtime": 1739.4189, |
|
"eval_samples_per_second": 1.395, |
|
"eval_steps_per_second": 0.175, |
|
"eval_wer": 10.859939408063388, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.8649093904448105, |
|
"grad_norm": 2.581739664077759, |
|
"learning_rate": 9.953333333333333e-06, |
|
"loss": 0.0387, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.9060955518945635, |
|
"grad_norm": 1.7439745664596558, |
|
"learning_rate": 9.89777777777778e-06, |
|
"loss": 0.0372, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.9472817133443163, |
|
"grad_norm": 3.0741066932678223, |
|
"learning_rate": 9.842222222222223e-06, |
|
"loss": 0.0344, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.9884678747940692, |
|
"grad_norm": 2.640840530395508, |
|
"learning_rate": 9.786666666666667e-06, |
|
"loss": 0.0331, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.029654036243822, |
|
"grad_norm": 0.908171534538269, |
|
"learning_rate": 9.731111111111113e-06, |
|
"loss": 0.0267, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.0708401976935749, |
|
"grad_norm": 1.0449379682540894, |
|
"learning_rate": 9.675555555555555e-06, |
|
"loss": 0.0203, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.112026359143328, |
|
"grad_norm": 2.090158700942993, |
|
"learning_rate": 9.620000000000001e-06, |
|
"loss": 0.0185, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.1532125205930808, |
|
"grad_norm": 3.4184322357177734, |
|
"learning_rate": 9.564444444444445e-06, |
|
"loss": 0.0226, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.1943986820428336, |
|
"grad_norm": 2.6463236808776855, |
|
"learning_rate": 9.508888888888889e-06, |
|
"loss": 0.0155, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.2355848434925865, |
|
"grad_norm": 0.7438070774078369, |
|
"learning_rate": 9.453333333333335e-06, |
|
"loss": 0.0184, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.2767710049423393, |
|
"grad_norm": 1.7319138050079346, |
|
"learning_rate": 9.397777777777779e-06, |
|
"loss": 0.0161, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.3179571663920924, |
|
"grad_norm": 1.9704327583312988, |
|
"learning_rate": 9.342222222222223e-06, |
|
"loss": 0.0137, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.3591433278418452, |
|
"grad_norm": 1.808190107345581, |
|
"learning_rate": 9.286666666666667e-06, |
|
"loss": 0.0169, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.400329489291598, |
|
"grad_norm": 2.1106882095336914, |
|
"learning_rate": 9.231111111111111e-06, |
|
"loss": 0.0141, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.441515650741351, |
|
"grad_norm": 1.5317939519882202, |
|
"learning_rate": 9.175555555555557e-06, |
|
"loss": 0.0114, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.4827018121911038, |
|
"grad_norm": 2.758979082107544, |
|
"learning_rate": 9.12e-06, |
|
"loss": 0.0123, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.5238879736408566, |
|
"grad_norm": 2.9624338150024414, |
|
"learning_rate": 9.064444444444447e-06, |
|
"loss": 0.014, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.5650741350906094, |
|
"grad_norm": 2.520521402359009, |
|
"learning_rate": 9.008888888888889e-06, |
|
"loss": 0.0107, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.6062602965403623, |
|
"grad_norm": 1.0015959739685059, |
|
"learning_rate": 8.953333333333335e-06, |
|
"loss": 0.0111, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1.6474464579901154, |
|
"grad_norm": 2.2675178050994873, |
|
"learning_rate": 8.897777777777779e-06, |
|
"loss": 0.0124, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.6474464579901154, |
|
"eval_loss": 0.02125450409948826, |
|
"eval_runtime": 1730.2385, |
|
"eval_samples_per_second": 1.403, |
|
"eval_steps_per_second": 0.176, |
|
"eval_wer": 5.243532975996271, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.6886326194398682, |
|
"grad_norm": 1.1241651773452759, |
|
"learning_rate": 8.842222222222223e-06, |
|
"loss": 0.0108, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 1.729818780889621, |
|
"grad_norm": 0.9443243145942688, |
|
"learning_rate": 8.786666666666668e-06, |
|
"loss": 0.01, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.771004942339374, |
|
"grad_norm": 1.7507250308990479, |
|
"learning_rate": 8.73111111111111e-06, |
|
"loss": 0.0108, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 1.812191103789127, |
|
"grad_norm": 0.8334024548530579, |
|
"learning_rate": 8.675555555555556e-06, |
|
"loss": 0.0108, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.8533772652388798, |
|
"grad_norm": 2.0746381282806396, |
|
"learning_rate": 8.62e-06, |
|
"loss": 0.009, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 1.8945634266886326, |
|
"grad_norm": 1.5471493005752563, |
|
"learning_rate": 8.564444444444445e-06, |
|
"loss": 0.0124, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.9357495881383855, |
|
"grad_norm": 1.4362578392028809, |
|
"learning_rate": 8.50888888888889e-06, |
|
"loss": 0.009, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 1.9769357495881383, |
|
"grad_norm": 1.5219734907150269, |
|
"learning_rate": 8.453333333333334e-06, |
|
"loss": 0.0108, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.018121911037891, |
|
"grad_norm": 0.7635548710823059, |
|
"learning_rate": 8.397777777777778e-06, |
|
"loss": 0.0069, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 2.059308072487644, |
|
"grad_norm": 0.622107982635498, |
|
"learning_rate": 8.342222222222222e-06, |
|
"loss": 0.0045, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.100494233937397, |
|
"grad_norm": 3.157203197479248, |
|
"learning_rate": 8.286666666666668e-06, |
|
"loss": 0.0055, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 2.1416803953871497, |
|
"grad_norm": 0.8712319731712341, |
|
"learning_rate": 8.231111111111112e-06, |
|
"loss": 0.0025, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.182866556836903, |
|
"grad_norm": 0.7438367605209351, |
|
"learning_rate": 8.175555555555556e-06, |
|
"loss": 0.0038, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 2.224052718286656, |
|
"grad_norm": 0.4088328778743744, |
|
"learning_rate": 8.120000000000002e-06, |
|
"loss": 0.004, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.2652388797364087, |
|
"grad_norm": 0.34847044944763184, |
|
"learning_rate": 8.064444444444444e-06, |
|
"loss": 0.003, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 2.3064250411861615, |
|
"grad_norm": 0.08477868884801865, |
|
"learning_rate": 8.00888888888889e-06, |
|
"loss": 0.0024, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.3476112026359144, |
|
"grad_norm": 0.5055556297302246, |
|
"learning_rate": 7.953333333333334e-06, |
|
"loss": 0.004, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 2.3887973640856672, |
|
"grad_norm": 0.27648621797561646, |
|
"learning_rate": 7.897777777777778e-06, |
|
"loss": 0.0037, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 2.42998352553542, |
|
"grad_norm": 2.3948631286621094, |
|
"learning_rate": 7.842222222222224e-06, |
|
"loss": 0.0022, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 2.471169686985173, |
|
"grad_norm": 0.3471105694770813, |
|
"learning_rate": 7.786666666666666e-06, |
|
"loss": 0.0034, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 2.471169686985173, |
|
"eval_loss": 0.017010504379868507, |
|
"eval_runtime": 1743.3725, |
|
"eval_samples_per_second": 1.392, |
|
"eval_steps_per_second": 0.174, |
|
"eval_wer": 3.821952924726171, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 2.5123558484349258, |
|
"grad_norm": 0.14169706404209137, |
|
"learning_rate": 7.731111111111112e-06, |
|
"loss": 0.0043, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 2.5535420098846786, |
|
"grad_norm": 0.4921598732471466, |
|
"learning_rate": 7.675555555555556e-06, |
|
"loss": 0.0024, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 2.594728171334432, |
|
"grad_norm": 0.20753732323646545, |
|
"learning_rate": 7.620000000000001e-06, |
|
"loss": 0.004, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 2.6359143327841847, |
|
"grad_norm": 0.10961499810218811, |
|
"learning_rate": 7.564444444444446e-06, |
|
"loss": 0.004, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 2.6771004942339376, |
|
"grad_norm": 1.0841516256332397, |
|
"learning_rate": 7.50888888888889e-06, |
|
"loss": 0.0046, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 2.7182866556836904, |
|
"grad_norm": 0.12003188580274582, |
|
"learning_rate": 7.453333333333334e-06, |
|
"loss": 0.0023, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 2.7594728171334433, |
|
"grad_norm": 0.055499833077192307, |
|
"learning_rate": 7.3977777777777786e-06, |
|
"loss": 0.0035, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 2.800658978583196, |
|
"grad_norm": 2.9141526222229004, |
|
"learning_rate": 7.342222222222223e-06, |
|
"loss": 0.0023, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 2.841845140032949, |
|
"grad_norm": 0.025740938261151314, |
|
"learning_rate": 7.2866666666666675e-06, |
|
"loss": 0.0024, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 2.883031301482702, |
|
"grad_norm": 0.6345125436782837, |
|
"learning_rate": 7.231111111111112e-06, |
|
"loss": 0.004, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 2.9242174629324547, |
|
"grad_norm": 0.5243808627128601, |
|
"learning_rate": 7.1755555555555556e-06, |
|
"loss": 0.0024, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 2.9654036243822075, |
|
"grad_norm": 0.44930708408355713, |
|
"learning_rate": 7.1200000000000004e-06, |
|
"loss": 0.0025, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 3.0065897858319603, |
|
"grad_norm": 1.6802186965942383, |
|
"learning_rate": 7.0644444444444445e-06, |
|
"loss": 0.0044, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 3.047775947281713, |
|
"grad_norm": 0.2793377637863159, |
|
"learning_rate": 7.008888888888889e-06, |
|
"loss": 0.0015, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 3.088962108731466, |
|
"grad_norm": 0.07968737185001373, |
|
"learning_rate": 6.953333333333334e-06, |
|
"loss": 0.0015, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 3.130148270181219, |
|
"grad_norm": 0.04705784469842911, |
|
"learning_rate": 6.897777777777779e-06, |
|
"loss": 0.001, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 3.171334431630972, |
|
"grad_norm": 0.1313919574022293, |
|
"learning_rate": 6.842222222222222e-06, |
|
"loss": 0.0008, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 3.212520593080725, |
|
"grad_norm": 0.4405129551887512, |
|
"learning_rate": 6.786666666666667e-06, |
|
"loss": 0.0011, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 3.253706754530478, |
|
"grad_norm": 1.2785202264785767, |
|
"learning_rate": 6.731111111111111e-06, |
|
"loss": 0.0017, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 3.2948929159802307, |
|
"grad_norm": 0.04460221529006958, |
|
"learning_rate": 6.675555555555556e-06, |
|
"loss": 0.0008, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 3.2948929159802307, |
|
"eval_loss": 0.015330803580582142, |
|
"eval_runtime": 1719.9199, |
|
"eval_samples_per_second": 1.411, |
|
"eval_steps_per_second": 0.177, |
|
"eval_wer": 3.5733706206789404, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 3.3360790774299836, |
|
"grad_norm": 0.10665885359048843, |
|
"learning_rate": 6.620000000000001e-06, |
|
"loss": 0.0022, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 3.3772652388797364, |
|
"grad_norm": 4.365774631500244, |
|
"learning_rate": 6.564444444444446e-06, |
|
"loss": 0.0013, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 3.4184514003294892, |
|
"grad_norm": 0.2422148883342743, |
|
"learning_rate": 6.508888888888889e-06, |
|
"loss": 0.0011, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 3.459637561779242, |
|
"grad_norm": 0.14427553117275238, |
|
"learning_rate": 6.453333333333334e-06, |
|
"loss": 0.0009, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 3.500823723228995, |
|
"grad_norm": 0.0224087405949831, |
|
"learning_rate": 6.397777777777778e-06, |
|
"loss": 0.0005, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 3.5420098846787478, |
|
"grad_norm": 0.052274756133556366, |
|
"learning_rate": 6.342222222222223e-06, |
|
"loss": 0.0012, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 3.583196046128501, |
|
"grad_norm": 0.12983651459217072, |
|
"learning_rate": 6.286666666666668e-06, |
|
"loss": 0.0011, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 3.624382207578254, |
|
"grad_norm": 0.17136234045028687, |
|
"learning_rate": 6.231111111111111e-06, |
|
"loss": 0.0004, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 3.6655683690280068, |
|
"grad_norm": 0.02376146800816059, |
|
"learning_rate": 6.175555555555556e-06, |
|
"loss": 0.0006, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 3.7067545304777596, |
|
"grad_norm": 0.020898204296827316, |
|
"learning_rate": 6.120000000000001e-06, |
|
"loss": 0.0004, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 3.7479406919275124, |
|
"grad_norm": 0.011864184401929379, |
|
"learning_rate": 6.064444444444445e-06, |
|
"loss": 0.0014, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 3.7891268533772653, |
|
"grad_norm": 0.07441503554582596, |
|
"learning_rate": 6.00888888888889e-06, |
|
"loss": 0.0005, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 3.830313014827018, |
|
"grad_norm": 0.09472518414258957, |
|
"learning_rate": 5.9533333333333345e-06, |
|
"loss": 0.0008, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 3.871499176276771, |
|
"grad_norm": 0.5632221698760986, |
|
"learning_rate": 5.897777777777778e-06, |
|
"loss": 0.001, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 3.912685337726524, |
|
"grad_norm": 0.02372151054441929, |
|
"learning_rate": 5.8422222222222226e-06, |
|
"loss": 0.0008, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 3.9538714991762767, |
|
"grad_norm": 0.324942022562027, |
|
"learning_rate": 5.7866666666666674e-06, |
|
"loss": 0.0004, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 3.9950576606260295, |
|
"grad_norm": 0.022629514336586, |
|
"learning_rate": 5.7311111111111115e-06, |
|
"loss": 0.0003, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 4.036243822075782, |
|
"grad_norm": 0.012355553917586803, |
|
"learning_rate": 5.675555555555556e-06, |
|
"loss": 0.0005, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 4.077429983525535, |
|
"grad_norm": 0.021213717758655548, |
|
"learning_rate": 5.620000000000001e-06, |
|
"loss": 0.0005, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 4.118616144975288, |
|
"grad_norm": 0.01894320733845234, |
|
"learning_rate": 5.5644444444444444e-06, |
|
"loss": 0.0003, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 4.118616144975288, |
|
"eval_loss": 0.013398992829024792, |
|
"eval_runtime": 1739.4064, |
|
"eval_samples_per_second": 1.395, |
|
"eval_steps_per_second": 0.175, |
|
"eval_wer": 2.9130738755534837, |
|
"step": 2500 |
|
} |
|
], |
|
"logging_steps": 25, |
|
"max_steps": 5000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 9, |
|
"save_steps": 500, |
|
"total_flos": 1.153879864639488e+19, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|