|
{ |
|
"best_metric": 8.634474343167287, |
|
"best_model_checkpoint": "./whisper-small-mix-es/checkpoint-5000", |
|
"epoch": 1.0, |
|
"eval_steps": 1000, |
|
"global_step": 5000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.005, |
|
"grad_norm": 8.203697204589844, |
|
"learning_rate": 4.6000000000000004e-07, |
|
"loss": 0.9882, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 4.3468732833862305, |
|
"learning_rate": 9.600000000000001e-07, |
|
"loss": 0.9068, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.015, |
|
"grad_norm": 3.7499096393585205, |
|
"learning_rate": 1.46e-06, |
|
"loss": 0.7332, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.43481183052063, |
|
"learning_rate": 1.9600000000000003e-06, |
|
"loss": 0.628, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.025, |
|
"grad_norm": 2.902010440826416, |
|
"learning_rate": 2.46e-06, |
|
"loss": 0.4929, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.6406397819519043, |
|
"learning_rate": 2.96e-06, |
|
"loss": 0.4353, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.035, |
|
"grad_norm": 2.446483850479126, |
|
"learning_rate": 3.46e-06, |
|
"loss": 0.3881, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.8479089736938477, |
|
"learning_rate": 3.96e-06, |
|
"loss": 0.3941, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.045, |
|
"grad_norm": 2.583815813064575, |
|
"learning_rate": 4.4600000000000005e-06, |
|
"loss": 0.431, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.4748358726501465, |
|
"learning_rate": 4.960000000000001e-06, |
|
"loss": 0.4073, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.055, |
|
"grad_norm": 2.5640480518341064, |
|
"learning_rate": 5.460000000000001e-06, |
|
"loss": 0.4266, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.221160650253296, |
|
"learning_rate": 5.9600000000000005e-06, |
|
"loss": 0.4025, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.065, |
|
"grad_norm": 2.4895880222320557, |
|
"learning_rate": 6.460000000000001e-06, |
|
"loss": 0.3504, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.318375825881958, |
|
"learning_rate": 6.96e-06, |
|
"loss": 0.311, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.075, |
|
"grad_norm": 3.063900947570801, |
|
"learning_rate": 7.4600000000000006e-06, |
|
"loss": 0.3309, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.898184061050415, |
|
"learning_rate": 7.960000000000002e-06, |
|
"loss": 0.3044, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.085, |
|
"grad_norm": 2.2967689037323, |
|
"learning_rate": 8.46e-06, |
|
"loss": 0.3071, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.990691900253296, |
|
"learning_rate": 8.96e-06, |
|
"loss": 0.3319, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.095, |
|
"grad_norm": 2.604433536529541, |
|
"learning_rate": 9.460000000000001e-06, |
|
"loss": 0.3606, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.8391056060791016, |
|
"learning_rate": 9.960000000000001e-06, |
|
"loss": 0.3517, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.105, |
|
"grad_norm": 2.3469793796539307, |
|
"learning_rate": 9.94888888888889e-06, |
|
"loss": 0.3338, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.7184339761734009, |
|
"learning_rate": 9.893333333333334e-06, |
|
"loss": 0.3195, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.115, |
|
"grad_norm": 2.248863697052002, |
|
"learning_rate": 9.837777777777778e-06, |
|
"loss": 0.2926, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.592674970626831, |
|
"learning_rate": 9.782222222222222e-06, |
|
"loss": 0.3141, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.125, |
|
"grad_norm": 1.9537230730056763, |
|
"learning_rate": 9.726666666666668e-06, |
|
"loss": 0.2778, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.4650800228118896, |
|
"learning_rate": 9.671111111111112e-06, |
|
"loss": 0.2952, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.135, |
|
"grad_norm": 2.378822088241577, |
|
"learning_rate": 9.615555555555558e-06, |
|
"loss": 0.3339, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.6371264457702637, |
|
"learning_rate": 9.56e-06, |
|
"loss": 0.3843, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.145, |
|
"grad_norm": 2.7936806678771973, |
|
"learning_rate": 9.504444444444446e-06, |
|
"loss": 0.3703, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.70261287689209, |
|
"learning_rate": 9.44888888888889e-06, |
|
"loss": 0.359, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.155, |
|
"grad_norm": 2.0972862243652344, |
|
"learning_rate": 9.393333333333334e-06, |
|
"loss": 0.2747, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.8365142345428467, |
|
"learning_rate": 9.33777777777778e-06, |
|
"loss": 0.2849, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.165, |
|
"grad_norm": 2.2393555641174316, |
|
"learning_rate": 9.282222222222222e-06, |
|
"loss": 0.2273, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.7303996086120605, |
|
"learning_rate": 9.226666666666668e-06, |
|
"loss": 0.2102, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.175, |
|
"grad_norm": 2.153916597366333, |
|
"learning_rate": 9.171111111111112e-06, |
|
"loss": 0.3088, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 2.3928215503692627, |
|
"learning_rate": 9.115555555555556e-06, |
|
"loss": 0.2912, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.185, |
|
"grad_norm": 2.6662864685058594, |
|
"learning_rate": 9.060000000000001e-06, |
|
"loss": 0.2926, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 2.274172067642212, |
|
"learning_rate": 9.004444444444445e-06, |
|
"loss": 0.2777, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.195, |
|
"grad_norm": 2.453396797180176, |
|
"learning_rate": 8.94888888888889e-06, |
|
"loss": 0.2869, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 2.171252489089966, |
|
"learning_rate": 8.893333333333333e-06, |
|
"loss": 0.247, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"eval_loss": 0.21598759293556213, |
|
"eval_runtime": 708.6287, |
|
"eval_samples_per_second": 22.377, |
|
"eval_steps_per_second": 2.798, |
|
"eval_wer": 10.397468000457591, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.205, |
|
"grad_norm": 2.494718313217163, |
|
"learning_rate": 8.83777777777778e-06, |
|
"loss": 0.2519, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.5458056926727295, |
|
"learning_rate": 8.782222222222223e-06, |
|
"loss": 0.2523, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.215, |
|
"grad_norm": 1.856636643409729, |
|
"learning_rate": 8.726666666666667e-06, |
|
"loss": 0.2481, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 2.324716091156006, |
|
"learning_rate": 8.671111111111113e-06, |
|
"loss": 0.2453, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.225, |
|
"grad_norm": 1.7981597185134888, |
|
"learning_rate": 8.615555555555555e-06, |
|
"loss": 0.2297, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 2.4602575302124023, |
|
"learning_rate": 8.560000000000001e-06, |
|
"loss": 0.2492, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.235, |
|
"grad_norm": 2.964137077331543, |
|
"learning_rate": 8.504444444444445e-06, |
|
"loss": 0.2793, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 2.031019687652588, |
|
"learning_rate": 8.448888888888889e-06, |
|
"loss": 0.3005, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.245, |
|
"grad_norm": 2.0715112686157227, |
|
"learning_rate": 8.393333333333335e-06, |
|
"loss": 0.2397, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 2.2880711555480957, |
|
"learning_rate": 8.337777777777777e-06, |
|
"loss": 0.251, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.255, |
|
"grad_norm": 2.0177316665649414, |
|
"learning_rate": 8.282222222222223e-06, |
|
"loss": 0.2234, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 2.3861050605773926, |
|
"learning_rate": 8.226666666666667e-06, |
|
"loss": 0.2855, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.265, |
|
"grad_norm": 2.8440380096435547, |
|
"learning_rate": 8.171111111111113e-06, |
|
"loss": 0.2608, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 2.476083993911743, |
|
"learning_rate": 8.115555555555557e-06, |
|
"loss": 0.255, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.275, |
|
"grad_norm": 2.3754682540893555, |
|
"learning_rate": 8.06e-06, |
|
"loss": 0.2479, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 2.507122755050659, |
|
"learning_rate": 8.004444444444445e-06, |
|
"loss": 0.2426, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.285, |
|
"grad_norm": 2.9778430461883545, |
|
"learning_rate": 7.948888888888889e-06, |
|
"loss": 0.2644, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 2.6550915241241455, |
|
"learning_rate": 7.893333333333335e-06, |
|
"loss": 0.2544, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.295, |
|
"grad_norm": 2.50137996673584, |
|
"learning_rate": 7.837777777777779e-06, |
|
"loss": 0.3037, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 2.3021984100341797, |
|
"learning_rate": 7.782222222222223e-06, |
|
"loss": 0.2831, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.305, |
|
"grad_norm": 2.2827396392822266, |
|
"learning_rate": 7.726666666666667e-06, |
|
"loss": 0.2387, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.9767779111862183, |
|
"learning_rate": 7.67111111111111e-06, |
|
"loss": 0.2293, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.315, |
|
"grad_norm": 2.132072687149048, |
|
"learning_rate": 7.6155555555555564e-06, |
|
"loss": 0.1855, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 2.581895112991333, |
|
"learning_rate": 7.5600000000000005e-06, |
|
"loss": 0.208, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.325, |
|
"grad_norm": 2.436007261276245, |
|
"learning_rate": 7.504444444444445e-06, |
|
"loss": 0.2112, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 2.5022451877593994, |
|
"learning_rate": 7.44888888888889e-06, |
|
"loss": 0.2001, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.335, |
|
"grad_norm": 2.7856762409210205, |
|
"learning_rate": 7.393333333333333e-06, |
|
"loss": 0.2304, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 3.307318925857544, |
|
"learning_rate": 7.337777777777778e-06, |
|
"loss": 0.2489, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.345, |
|
"grad_norm": 2.104020118713379, |
|
"learning_rate": 7.282222222222222e-06, |
|
"loss": 0.204, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 2.099266529083252, |
|
"learning_rate": 7.226666666666667e-06, |
|
"loss": 0.1787, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.355, |
|
"grad_norm": 2.0971200466156006, |
|
"learning_rate": 7.171111111111112e-06, |
|
"loss": 0.1672, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.7668758630752563, |
|
"learning_rate": 7.115555555555557e-06, |
|
"loss": 0.1537, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.365, |
|
"grad_norm": 2.1447267532348633, |
|
"learning_rate": 7.06e-06, |
|
"loss": 0.1568, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 2.092658758163452, |
|
"learning_rate": 7.004444444444445e-06, |
|
"loss": 0.1509, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.375, |
|
"grad_norm": 2.127138614654541, |
|
"learning_rate": 6.948888888888889e-06, |
|
"loss": 0.1447, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.7736611366271973, |
|
"learning_rate": 6.893333333333334e-06, |
|
"loss": 0.1479, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.385, |
|
"grad_norm": 1.845276951789856, |
|
"learning_rate": 6.837777777777779e-06, |
|
"loss": 0.1402, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 2.130812883377075, |
|
"learning_rate": 6.782222222222222e-06, |
|
"loss": 0.1394, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.395, |
|
"grad_norm": 1.9191032648086548, |
|
"learning_rate": 6.726666666666667e-06, |
|
"loss": 0.1491, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.805740237236023, |
|
"learning_rate": 6.671111111111112e-06, |
|
"loss": 0.1337, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"eval_loss": 0.20104137063026428, |
|
"eval_runtime": 702.6623, |
|
"eval_samples_per_second": 22.567, |
|
"eval_steps_per_second": 2.822, |
|
"eval_wer": 9.674856685266864, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.405, |
|
"grad_norm": 2.0551412105560303, |
|
"learning_rate": 6.615555555555556e-06, |
|
"loss": 0.1269, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 2.093939781188965, |
|
"learning_rate": 6.560000000000001e-06, |
|
"loss": 0.1412, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.415, |
|
"grad_norm": 2.695679187774658, |
|
"learning_rate": 6.504444444444446e-06, |
|
"loss": 0.1537, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 2.2882978916168213, |
|
"learning_rate": 6.448888888888889e-06, |
|
"loss": 0.1669, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.425, |
|
"grad_norm": 1.3621838092803955, |
|
"learning_rate": 6.393333333333334e-06, |
|
"loss": 0.1607, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 2.160801410675049, |
|
"learning_rate": 6.3377777777777786e-06, |
|
"loss": 0.1482, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.435, |
|
"grad_norm": 2.119140863418579, |
|
"learning_rate": 6.282222222222223e-06, |
|
"loss": 0.1552, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.6484750509262085, |
|
"learning_rate": 6.2266666666666675e-06, |
|
"loss": 0.1592, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.445, |
|
"grad_norm": 1.9934930801391602, |
|
"learning_rate": 6.171111111111112e-06, |
|
"loss": 0.1527, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.8050808906555176, |
|
"learning_rate": 6.1155555555555555e-06, |
|
"loss": 0.1487, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.455, |
|
"grad_norm": 2.1619839668273926, |
|
"learning_rate": 6.0600000000000004e-06, |
|
"loss": 0.1462, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 2.169475555419922, |
|
"learning_rate": 6.004444444444445e-06, |
|
"loss": 0.1566, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.465, |
|
"grad_norm": 2.3247809410095215, |
|
"learning_rate": 5.948888888888889e-06, |
|
"loss": 0.1527, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.519680380821228, |
|
"learning_rate": 5.893333333333334e-06, |
|
"loss": 0.1368, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.475, |
|
"grad_norm": 1.9055320024490356, |
|
"learning_rate": 5.837777777777777e-06, |
|
"loss": 0.13, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.674070954322815, |
|
"learning_rate": 5.782222222222222e-06, |
|
"loss": 0.1236, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.485, |
|
"grad_norm": 2.0586557388305664, |
|
"learning_rate": 5.726666666666667e-06, |
|
"loss": 0.1353, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 2.190614700317383, |
|
"learning_rate": 5.671111111111112e-06, |
|
"loss": 0.1294, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.495, |
|
"grad_norm": 1.7952390909194946, |
|
"learning_rate": 5.615555555555556e-06, |
|
"loss": 0.1371, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.9759557247161865, |
|
"learning_rate": 5.560000000000001e-06, |
|
"loss": 0.1525, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.505, |
|
"grad_norm": 1.5587725639343262, |
|
"learning_rate": 5.504444444444444e-06, |
|
"loss": 0.1334, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.9309734106063843, |
|
"learning_rate": 5.448888888888889e-06, |
|
"loss": 0.1277, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.515, |
|
"grad_norm": 1.6689786911010742, |
|
"learning_rate": 5.393333333333334e-06, |
|
"loss": 0.1312, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.5617754459381104, |
|
"learning_rate": 5.337777777777779e-06, |
|
"loss": 0.1421, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.525, |
|
"grad_norm": 2.1239089965820312, |
|
"learning_rate": 5.282222222222223e-06, |
|
"loss": 0.1338, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.6004153490066528, |
|
"learning_rate": 5.226666666666667e-06, |
|
"loss": 0.1363, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.535, |
|
"grad_norm": 1.5653680562973022, |
|
"learning_rate": 5.171111111111111e-06, |
|
"loss": 0.1309, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.5964562892913818, |
|
"learning_rate": 5.115555555555556e-06, |
|
"loss": 0.133, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.545, |
|
"grad_norm": 1.9546246528625488, |
|
"learning_rate": 5.060000000000001e-06, |
|
"loss": 0.1281, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.6310886144638062, |
|
"learning_rate": 5.004444444444445e-06, |
|
"loss": 0.1361, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.555, |
|
"grad_norm": 1.7071685791015625, |
|
"learning_rate": 4.94888888888889e-06, |
|
"loss": 0.1382, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.6160541772842407, |
|
"learning_rate": 4.893333333333334e-06, |
|
"loss": 0.1394, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.565, |
|
"grad_norm": 1.4931234121322632, |
|
"learning_rate": 4.837777777777778e-06, |
|
"loss": 0.1221, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.4717397689819336, |
|
"learning_rate": 4.7822222222222226e-06, |
|
"loss": 0.1106, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.575, |
|
"grad_norm": 1.334092140197754, |
|
"learning_rate": 4.7266666666666674e-06, |
|
"loss": 0.1299, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.9642376899719238, |
|
"learning_rate": 4.6711111111111115e-06, |
|
"loss": 0.1509, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.585, |
|
"grad_norm": 1.5685168504714966, |
|
"learning_rate": 4.6155555555555555e-06, |
|
"loss": 0.1367, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.2653120756149292, |
|
"learning_rate": 4.56e-06, |
|
"loss": 0.1228, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.595, |
|
"grad_norm": 1.6056393384933472, |
|
"learning_rate": 4.504444444444444e-06, |
|
"loss": 0.1081, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 2.4478392601013184, |
|
"learning_rate": 4.448888888888889e-06, |
|
"loss": 0.1401, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"eval_loss": 0.19045059382915497, |
|
"eval_runtime": 711.5811, |
|
"eval_samples_per_second": 22.284, |
|
"eval_steps_per_second": 2.787, |
|
"eval_wer": 9.094606790131303, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.605, |
|
"grad_norm": 1.8749899864196777, |
|
"learning_rate": 4.393333333333334e-06, |
|
"loss": 0.1864, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.5327539443969727, |
|
"learning_rate": 4.337777777777778e-06, |
|
"loss": 0.184, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.615, |
|
"grad_norm": 2.0878913402557373, |
|
"learning_rate": 4.282222222222222e-06, |
|
"loss": 0.1969, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 2.027475118637085, |
|
"learning_rate": 4.226666666666667e-06, |
|
"loss": 0.1979, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.625, |
|
"grad_norm": 1.4763484001159668, |
|
"learning_rate": 4.171111111111111e-06, |
|
"loss": 0.1807, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 2.0090372562408447, |
|
"learning_rate": 4.115555555555556e-06, |
|
"loss": 0.1498, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.635, |
|
"grad_norm": 1.4188919067382812, |
|
"learning_rate": 4.060000000000001e-06, |
|
"loss": 0.1308, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.6923434734344482, |
|
"learning_rate": 4.004444444444445e-06, |
|
"loss": 0.1422, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.645, |
|
"grad_norm": 1.8720096349716187, |
|
"learning_rate": 3.948888888888889e-06, |
|
"loss": 0.133, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.367676854133606, |
|
"learning_rate": 3.893333333333333e-06, |
|
"loss": 0.1265, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.655, |
|
"grad_norm": 1.9834328889846802, |
|
"learning_rate": 3.837777777777778e-06, |
|
"loss": 0.1323, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 2.014779567718506, |
|
"learning_rate": 3.782222222222223e-06, |
|
"loss": 0.141, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.665, |
|
"grad_norm": 1.3820104598999023, |
|
"learning_rate": 3.726666666666667e-06, |
|
"loss": 0.1308, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.9776164293289185, |
|
"learning_rate": 3.6711111111111113e-06, |
|
"loss": 0.1331, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.675, |
|
"grad_norm": 1.625608205795288, |
|
"learning_rate": 3.615555555555556e-06, |
|
"loss": 0.116, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.7291717529296875, |
|
"learning_rate": 3.5600000000000002e-06, |
|
"loss": 0.1068, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.685, |
|
"grad_norm": 1.7692865133285522, |
|
"learning_rate": 3.5044444444444447e-06, |
|
"loss": 0.1134, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.9377959966659546, |
|
"learning_rate": 3.4488888888888896e-06, |
|
"loss": 0.1939, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.695, |
|
"grad_norm": 2.1209535598754883, |
|
"learning_rate": 3.3933333333333336e-06, |
|
"loss": 0.2623, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 2.0836637020111084, |
|
"learning_rate": 3.337777777777778e-06, |
|
"loss": 0.2272, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.705, |
|
"grad_norm": 1.8429718017578125, |
|
"learning_rate": 3.282222222222223e-06, |
|
"loss": 0.2041, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.7450307607650757, |
|
"learning_rate": 3.226666666666667e-06, |
|
"loss": 0.224, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.715, |
|
"grad_norm": 2.0255823135375977, |
|
"learning_rate": 3.1711111111111114e-06, |
|
"loss": 0.2067, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 2.1434662342071533, |
|
"learning_rate": 3.1155555555555555e-06, |
|
"loss": 0.182, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.725, |
|
"grad_norm": 1.3078871965408325, |
|
"learning_rate": 3.0600000000000003e-06, |
|
"loss": 0.1625, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.7805925607681274, |
|
"learning_rate": 3.004444444444445e-06, |
|
"loss": 0.1701, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.735, |
|
"grad_norm": 1.5756222009658813, |
|
"learning_rate": 2.948888888888889e-06, |
|
"loss": 0.1473, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.343863844871521, |
|
"learning_rate": 2.8933333333333337e-06, |
|
"loss": 0.1385, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.745, |
|
"grad_norm": 1.7856868505477905, |
|
"learning_rate": 2.837777777777778e-06, |
|
"loss": 0.156, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.6348956823349, |
|
"learning_rate": 2.7822222222222222e-06, |
|
"loss": 0.1477, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.755, |
|
"grad_norm": 1.698569655418396, |
|
"learning_rate": 2.726666666666667e-06, |
|
"loss": 0.1488, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 2.1124930381774902, |
|
"learning_rate": 2.6711111111111116e-06, |
|
"loss": 0.1365, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.765, |
|
"grad_norm": 1.5699976682662964, |
|
"learning_rate": 2.6155555555555556e-06, |
|
"loss": 0.1438, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.302807092666626, |
|
"learning_rate": 2.56e-06, |
|
"loss": 0.1238, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.775, |
|
"grad_norm": 1.5388458967208862, |
|
"learning_rate": 2.504444444444445e-06, |
|
"loss": 0.1264, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.3941725492477417, |
|
"learning_rate": 2.448888888888889e-06, |
|
"loss": 0.1182, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.785, |
|
"grad_norm": 1.8302580118179321, |
|
"learning_rate": 2.3933333333333334e-06, |
|
"loss": 0.1201, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.6596505641937256, |
|
"learning_rate": 2.337777777777778e-06, |
|
"loss": 0.114, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.795, |
|
"grad_norm": 1.5720983743667603, |
|
"learning_rate": 2.2822222222222223e-06, |
|
"loss": 0.1149, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 2.3143093585968018, |
|
"learning_rate": 2.226666666666667e-06, |
|
"loss": 0.1714, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"eval_loss": 0.18485930562019348, |
|
"eval_runtime": 702.3906, |
|
"eval_samples_per_second": 22.576, |
|
"eval_steps_per_second": 2.823, |
|
"eval_wer": 8.85500743584203, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.805, |
|
"grad_norm": 1.8013464212417603, |
|
"learning_rate": 2.1711111111111113e-06, |
|
"loss": 0.1782, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.6786607503890991, |
|
"learning_rate": 2.1155555555555557e-06, |
|
"loss": 0.1262, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.815, |
|
"grad_norm": 1.412192702293396, |
|
"learning_rate": 2.06e-06, |
|
"loss": 0.1257, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.5031375885009766, |
|
"learning_rate": 2.0044444444444446e-06, |
|
"loss": 0.1164, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.825, |
|
"grad_norm": 1.3566547632217407, |
|
"learning_rate": 1.948888888888889e-06, |
|
"loss": 0.1238, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.5606483221054077, |
|
"learning_rate": 1.8933333333333333e-06, |
|
"loss": 0.1029, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 0.835, |
|
"grad_norm": 2.154038429260254, |
|
"learning_rate": 1.837777777777778e-06, |
|
"loss": 0.1045, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 1.4622442722320557, |
|
"learning_rate": 1.7822222222222225e-06, |
|
"loss": 0.1091, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.845, |
|
"grad_norm": 1.7122653722763062, |
|
"learning_rate": 1.7266666666666667e-06, |
|
"loss": 0.1158, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 2.029569625854492, |
|
"learning_rate": 1.6711111111111112e-06, |
|
"loss": 0.1267, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 0.855, |
|
"grad_norm": 1.5599546432495117, |
|
"learning_rate": 1.6155555555555559e-06, |
|
"loss": 0.1084, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 1.735775351524353, |
|
"learning_rate": 1.56e-06, |
|
"loss": 0.1028, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.865, |
|
"grad_norm": 1.6177542209625244, |
|
"learning_rate": 1.5044444444444446e-06, |
|
"loss": 0.1174, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 1.7929385900497437, |
|
"learning_rate": 1.4488888888888892e-06, |
|
"loss": 0.1193, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 0.875, |
|
"grad_norm": 1.6142146587371826, |
|
"learning_rate": 1.3933333333333335e-06, |
|
"loss": 0.1243, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.6547541618347168, |
|
"learning_rate": 1.337777777777778e-06, |
|
"loss": 0.1413, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.885, |
|
"grad_norm": 1.3043129444122314, |
|
"learning_rate": 1.2822222222222222e-06, |
|
"loss": 0.1394, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 1.324398398399353, |
|
"learning_rate": 1.2266666666666666e-06, |
|
"loss": 0.1262, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 0.895, |
|
"grad_norm": 1.5393497943878174, |
|
"learning_rate": 1.171111111111111e-06, |
|
"loss": 0.0991, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 1.1875627040863037, |
|
"learning_rate": 1.1155555555555558e-06, |
|
"loss": 0.0824, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.905, |
|
"grad_norm": 1.2755403518676758, |
|
"learning_rate": 1.06e-06, |
|
"loss": 0.0862, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.2878875732421875, |
|
"learning_rate": 1.0044444444444445e-06, |
|
"loss": 0.0856, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 0.915, |
|
"grad_norm": 1.5324125289916992, |
|
"learning_rate": 9.488888888888889e-07, |
|
"loss": 0.0896, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 1.5242027044296265, |
|
"learning_rate": 8.933333333333334e-07, |
|
"loss": 0.0996, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.925, |
|
"grad_norm": 1.3442718982696533, |
|
"learning_rate": 8.37777777777778e-07, |
|
"loss": 0.0914, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 1.601271629333496, |
|
"learning_rate": 7.822222222222223e-07, |
|
"loss": 0.0891, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.935, |
|
"grad_norm": 1.5180890560150146, |
|
"learning_rate": 7.266666666666668e-07, |
|
"loss": 0.0912, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.3631670475006104, |
|
"learning_rate": 6.711111111111111e-07, |
|
"loss": 0.0952, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 0.945, |
|
"grad_norm": 1.251734733581543, |
|
"learning_rate": 6.155555555555556e-07, |
|
"loss": 0.0924, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.3705124855041504, |
|
"learning_rate": 5.6e-07, |
|
"loss": 0.085, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 0.955, |
|
"grad_norm": 1.46409273147583, |
|
"learning_rate": 5.044444444444445e-07, |
|
"loss": 0.0955, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.6144453287124634, |
|
"learning_rate": 4.488888888888889e-07, |
|
"loss": 0.0852, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.965, |
|
"grad_norm": 1.9570475816726685, |
|
"learning_rate": 3.9333333333333336e-07, |
|
"loss": 0.0979, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 1.666892409324646, |
|
"learning_rate": 3.3777777777777777e-07, |
|
"loss": 0.0954, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 0.975, |
|
"grad_norm": 1.4862993955612183, |
|
"learning_rate": 2.822222222222222e-07, |
|
"loss": 0.0869, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.2195048332214355, |
|
"learning_rate": 2.266666666666667e-07, |
|
"loss": 0.094, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 0.985, |
|
"grad_norm": 1.4709336757659912, |
|
"learning_rate": 1.7111111111111114e-07, |
|
"loss": 0.0929, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 1.3563309907913208, |
|
"learning_rate": 1.1555555555555556e-07, |
|
"loss": 0.0915, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 0.995, |
|
"grad_norm": 1.3233599662780762, |
|
"learning_rate": 6.000000000000001e-08, |
|
"loss": 0.0937, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.5326294898986816, |
|
"learning_rate": 4.444444444444445e-09, |
|
"loss": 0.1046, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.18091823160648346, |
|
"eval_runtime": 696.7327, |
|
"eval_samples_per_second": 22.759, |
|
"eval_steps_per_second": 2.846, |
|
"eval_wer": 8.634474343167287, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 5000, |
|
"total_flos": 9.23473281024e+19, |
|
"train_loss": 0.20034765486717224, |
|
"train_runtime": 18318.4818, |
|
"train_samples_per_second": 17.469, |
|
"train_steps_per_second": 0.273 |
|
} |
|
], |
|
"logging_steps": 25, |
|
"max_steps": 5000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 9223372036854775807, |
|
"save_steps": 1000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 9.23473281024e+19, |
|
"train_batch_size": 64, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|