|
{ |
|
"best_metric": 33.46883689165677, |
|
"best_model_checkpoint": "./whisper-small-fa-aug/checkpoint-5000", |
|
"epoch": 4.0249, |
|
"global_step": 8000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2000000000000006e-07, |
|
"loss": 3.8, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 9.200000000000001e-07, |
|
"loss": 3.0497, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.42e-06, |
|
"loss": 2.0102, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.9200000000000003e-06, |
|
"loss": 1.5948, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.42e-06, |
|
"loss": 1.4094, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.92e-06, |
|
"loss": 1.2611, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.4200000000000007e-06, |
|
"loss": 1.1366, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.920000000000001e-06, |
|
"loss": 1.0817, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.42e-06, |
|
"loss": 1.0324, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.92e-06, |
|
"loss": 0.9176, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.420000000000001e-06, |
|
"loss": 0.7975, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.92e-06, |
|
"loss": 0.7079, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.42e-06, |
|
"loss": 0.5832, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.92e-06, |
|
"loss": 0.4469, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.420000000000001e-06, |
|
"loss": 0.3913, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.92e-06, |
|
"loss": 0.4189, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.42e-06, |
|
"loss": 0.3756, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.920000000000001e-06, |
|
"loss": 0.3876, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.42e-06, |
|
"loss": 0.3802, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.920000000000002e-06, |
|
"loss": 0.4061, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.994683544303798e-06, |
|
"loss": 0.3885, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.988354430379748e-06, |
|
"loss": 0.3449, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.982025316455697e-06, |
|
"loss": 0.3292, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.975696202531647e-06, |
|
"loss": 0.3439, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.969367088607596e-06, |
|
"loss": 0.3031, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.963037974683545e-06, |
|
"loss": 0.3159, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.956708860759495e-06, |
|
"loss": 0.3304, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.950379746835444e-06, |
|
"loss": 0.2918, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.944050632911392e-06, |
|
"loss": 0.2785, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.937721518987341e-06, |
|
"loss": 0.2618, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.931392405063291e-06, |
|
"loss": 0.2862, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.92506329113924e-06, |
|
"loss": 0.2962, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.91873417721519e-06, |
|
"loss": 0.2935, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.912405063291141e-06, |
|
"loss": 0.2827, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.90607594936709e-06, |
|
"loss": 0.2702, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.89974683544304e-06, |
|
"loss": 0.2987, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.89341772151899e-06, |
|
"loss": 0.3067, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.887088607594939e-06, |
|
"loss": 0.2962, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.880759493670888e-06, |
|
"loss": 0.2807, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.874430379746836e-06, |
|
"loss": 0.2533, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 0.3274388611316681, |
|
"eval_runtime": 1481.9106, |
|
"eval_samples_per_second": 7.025, |
|
"eval_steps_per_second": 0.439, |
|
"eval_wer": 40.657571667832, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.868101265822785e-06, |
|
"loss": 0.21, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.861772151898735e-06, |
|
"loss": 0.2463, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.855443037974684e-06, |
|
"loss": 0.2688, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.849113924050633e-06, |
|
"loss": 0.2293, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.842784810126583e-06, |
|
"loss": 0.2283, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.836455696202532e-06, |
|
"loss": 0.228, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.830126582278482e-06, |
|
"loss": 0.1919, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.823797468354431e-06, |
|
"loss": 0.2173, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.81746835443038e-06, |
|
"loss": 0.2066, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.81113924050633e-06, |
|
"loss": 0.1826, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.80481012658228e-06, |
|
"loss": 0.1855, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.798481012658229e-06, |
|
"loss": 0.1702, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.792151898734178e-06, |
|
"loss": 0.1714, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.785822784810128e-06, |
|
"loss": 0.1686, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.779493670886077e-06, |
|
"loss": 0.1762, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.773164556962027e-06, |
|
"loss": 0.1722, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.766835443037976e-06, |
|
"loss": 0.2051, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.760506329113924e-06, |
|
"loss": 0.2071, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.754177215189873e-06, |
|
"loss": 0.1664, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.747848101265823e-06, |
|
"loss": 0.1871, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.741518987341772e-06, |
|
"loss": 0.1893, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.735189873417721e-06, |
|
"loss": 0.1756, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.728860759493671e-06, |
|
"loss": 0.1687, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.72253164556962e-06, |
|
"loss": 0.1685, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.71620253164557e-06, |
|
"loss": 0.1805, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.70987341772152e-06, |
|
"loss": 0.1868, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.70354430379747e-06, |
|
"loss": 0.212, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.69721518987342e-06, |
|
"loss": 0.2019, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.69088607594937e-06, |
|
"loss": 0.2076, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.684556962025317e-06, |
|
"loss": 0.1971, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.678227848101266e-06, |
|
"loss": 0.2424, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.671898734177216e-06, |
|
"loss": 0.2346, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.665569620253165e-06, |
|
"loss": 0.2436, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.659240506329115e-06, |
|
"loss": 0.2613, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.652911392405064e-06, |
|
"loss": 0.2781, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.646582278481013e-06, |
|
"loss": 0.2465, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.640253164556963e-06, |
|
"loss": 0.2386, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.633924050632912e-06, |
|
"loss": 0.2298, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.627594936708862e-06, |
|
"loss": 0.264, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.621265822784811e-06, |
|
"loss": 0.2077, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"eval_loss": 0.25397753715515137, |
|
"eval_runtime": 1477.9319, |
|
"eval_samples_per_second": 7.044, |
|
"eval_steps_per_second": 0.44, |
|
"eval_wer": 35.12252231743606, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.61493670886076e-06, |
|
"loss": 0.192, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.60860759493671e-06, |
|
"loss": 0.1903, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.60227848101266e-06, |
|
"loss": 0.2135, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.595949367088609e-06, |
|
"loss": 0.1917, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.589620253164558e-06, |
|
"loss": 0.1579, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.583291139240508e-06, |
|
"loss": 0.1779, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.576962025316457e-06, |
|
"loss": 0.1504, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.570632911392405e-06, |
|
"loss": 0.1574, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.564303797468354e-06, |
|
"loss": 0.1757, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.557974683544304e-06, |
|
"loss": 0.1996, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.551645569620253e-06, |
|
"loss": 0.1716, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.545316455696203e-06, |
|
"loss": 0.1509, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.538987341772152e-06, |
|
"loss": 0.1402, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.532658227848101e-06, |
|
"loss": 0.1612, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.526329113924051e-06, |
|
"loss": 0.1357, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.52e-06, |
|
"loss": 0.1426, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.51367088607595e-06, |
|
"loss": 0.1644, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.5073417721519e-06, |
|
"loss": 0.1196, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.501012658227849e-06, |
|
"loss": 0.1332, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.494683544303798e-06, |
|
"loss": 0.1096, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.488354430379747e-06, |
|
"loss": 0.1263, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.482025316455697e-06, |
|
"loss": 0.1467, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.475696202531646e-06, |
|
"loss": 0.1372, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.469367088607596e-06, |
|
"loss": 0.1256, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.463037974683545e-06, |
|
"loss": 0.1319, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.456708860759495e-06, |
|
"loss": 0.1385, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.450379746835444e-06, |
|
"loss": 0.147, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.444050632911393e-06, |
|
"loss": 0.1527, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.437721518987343e-06, |
|
"loss": 0.1357, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.431392405063292e-06, |
|
"loss": 0.122, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.425063291139242e-06, |
|
"loss": 0.0918, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.418734177215191e-06, |
|
"loss": 0.1218, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.41240506329114e-06, |
|
"loss": 0.139, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.40607594936709e-06, |
|
"loss": 0.1205, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.39974683544304e-06, |
|
"loss": 0.1158, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.393417721518989e-06, |
|
"loss": 0.1156, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.387088607594937e-06, |
|
"loss": 0.1044, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.380759493670886e-06, |
|
"loss": 0.112, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.374430379746835e-06, |
|
"loss": 0.1023, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.368101265822785e-06, |
|
"loss": 0.0966, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 0.2638459801673889, |
|
"eval_runtime": 1500.936, |
|
"eval_samples_per_second": 6.936, |
|
"eval_steps_per_second": 0.434, |
|
"eval_wer": 37.93069806013106, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.361772151898734e-06, |
|
"loss": 0.0942, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.355443037974684e-06, |
|
"loss": 0.0826, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.349113924050633e-06, |
|
"loss": 0.086, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.342784810126583e-06, |
|
"loss": 0.0956, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.336455696202532e-06, |
|
"loss": 0.0935, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.330126582278481e-06, |
|
"loss": 0.0923, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.323797468354431e-06, |
|
"loss": 0.1163, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.31746835443038e-06, |
|
"loss": 0.1099, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.31113924050633e-06, |
|
"loss": 0.0879, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.30481012658228e-06, |
|
"loss": 0.0994, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.298481012658229e-06, |
|
"loss": 0.1069, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.292151898734178e-06, |
|
"loss": 0.0957, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.285822784810127e-06, |
|
"loss": 0.0866, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.279493670886077e-06, |
|
"loss": 0.0965, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.273164556962026e-06, |
|
"loss": 0.0952, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.266835443037976e-06, |
|
"loss": 0.1178, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.260506329113925e-06, |
|
"loss": 0.1154, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.254177215189875e-06, |
|
"loss": 0.1075, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.247848101265824e-06, |
|
"loss": 0.1161, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.241518987341773e-06, |
|
"loss": 0.1153, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 9.235189873417723e-06, |
|
"loss": 0.1343, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 9.228860759493672e-06, |
|
"loss": 0.1374, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 9.222531645569622e-06, |
|
"loss": 0.1436, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 9.216202531645571e-06, |
|
"loss": 0.1561, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 9.20987341772152e-06, |
|
"loss": 0.1709, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 9.20354430379747e-06, |
|
"loss": 0.142, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 9.197215189873418e-06, |
|
"loss": 0.1389, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 9.190886075949367e-06, |
|
"loss": 0.1398, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 9.184556962025317e-06, |
|
"loss": 0.1526, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 9.178227848101266e-06, |
|
"loss": 0.1273, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 9.171898734177215e-06, |
|
"loss": 0.1119, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 9.165569620253165e-06, |
|
"loss": 0.1088, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 9.159493670886076e-06, |
|
"loss": 0.1362, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 9.153164556962026e-06, |
|
"loss": 0.1247, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 9.146835443037975e-06, |
|
"loss": 0.0974, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 9.140506329113925e-06, |
|
"loss": 0.1218, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 9.134177215189874e-06, |
|
"loss": 0.0846, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 9.127848101265823e-06, |
|
"loss": 0.0964, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 9.121518987341773e-06, |
|
"loss": 0.1166, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 9.115189873417722e-06, |
|
"loss": 0.1212, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"eval_loss": 0.22517943382263184, |
|
"eval_runtime": 1537.556, |
|
"eval_samples_per_second": 6.771, |
|
"eval_steps_per_second": 0.423, |
|
"eval_wer": 69.04990325045935, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 9.108860759493672e-06, |
|
"loss": 0.1076, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 9.102531645569621e-06, |
|
"loss": 0.086, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 9.09620253164557e-06, |
|
"loss": 0.0773, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 9.08987341772152e-06, |
|
"loss": 0.098, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 9.08354430379747e-06, |
|
"loss": 0.0747, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 9.077215189873419e-06, |
|
"loss": 0.0895, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 9.070886075949368e-06, |
|
"loss": 0.0957, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 9.064556962025318e-06, |
|
"loss": 0.0789, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 9.058227848101267e-06, |
|
"loss": 0.0718, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 9.051898734177215e-06, |
|
"loss": 0.0737, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 9.045569620253164e-06, |
|
"loss": 0.0731, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 9.039240506329114e-06, |
|
"loss": 0.087, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 9.032911392405063e-06, |
|
"loss": 0.0875, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 9.026582278481013e-06, |
|
"loss": 0.0768, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 9.020253164556962e-06, |
|
"loss": 0.0842, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 9.013924050632911e-06, |
|
"loss": 0.092, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 9.007594936708861e-06, |
|
"loss": 0.0949, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 9.00126582278481e-06, |
|
"loss": 0.0989, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 8.99493670886076e-06, |
|
"loss": 0.0908, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 8.98860759493671e-06, |
|
"loss": 0.0783, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 8.982278481012659e-06, |
|
"loss": 0.0651, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 8.975949367088608e-06, |
|
"loss": 0.0791, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 8.969620253164557e-06, |
|
"loss": 0.0829, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 8.963291139240507e-06, |
|
"loss": 0.0797, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 8.956962025316456e-06, |
|
"loss": 0.0743, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 8.950632911392406e-06, |
|
"loss": 0.0814, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 8.944303797468355e-06, |
|
"loss": 0.0594, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 8.937974683544305e-06, |
|
"loss": 0.0709, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 8.931645569620254e-06, |
|
"loss": 0.0621, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 8.925316455696203e-06, |
|
"loss": 0.0599, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 8.918987341772153e-06, |
|
"loss": 0.0625, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 8.912658227848102e-06, |
|
"loss": 0.0541, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 8.906329113924052e-06, |
|
"loss": 0.059, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 8.900000000000001e-06, |
|
"loss": 0.0576, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 8.89367088607595e-06, |
|
"loss": 0.0623, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 8.8873417721519e-06, |
|
"loss": 0.0592, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 8.88101265822785e-06, |
|
"loss": 0.0692, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 8.874683544303799e-06, |
|
"loss": 0.077, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 8.868354430379748e-06, |
|
"loss": 0.0662, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 8.862025316455696e-06, |
|
"loss": 0.0605, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"eval_loss": 0.2468421906232834, |
|
"eval_runtime": 1442.9116, |
|
"eval_samples_per_second": 7.215, |
|
"eval_steps_per_second": 0.451, |
|
"eval_wer": 33.46883689165677, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 8.855696202531645e-06, |
|
"loss": 0.0769, |
|
"step": 5025 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 8.849367088607595e-06, |
|
"loss": 0.0601, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 8.843037974683544e-06, |
|
"loss": 0.0698, |
|
"step": 5075 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 8.836708860759494e-06, |
|
"loss": 0.0593, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 8.830379746835443e-06, |
|
"loss": 0.0521, |
|
"step": 5125 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 8.824050632911393e-06, |
|
"loss": 0.0687, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 8.817721518987342e-06, |
|
"loss": 0.0818, |
|
"step": 5175 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 8.811392405063291e-06, |
|
"loss": 0.0706, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 8.805063291139241e-06, |
|
"loss": 0.0734, |
|
"step": 5225 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 8.79873417721519e-06, |
|
"loss": 0.0795, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 8.79240506329114e-06, |
|
"loss": 0.0905, |
|
"step": 5275 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 8.78607594936709e-06, |
|
"loss": 0.086, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 8.779746835443039e-06, |
|
"loss": 0.105, |
|
"step": 5325 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 8.773417721518988e-06, |
|
"loss": 0.0981, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 8.767088607594937e-06, |
|
"loss": 0.1131, |
|
"step": 5375 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 8.760759493670887e-06, |
|
"loss": 0.0916, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 8.754430379746836e-06, |
|
"loss": 0.0974, |
|
"step": 5425 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 8.748101265822786e-06, |
|
"loss": 0.0899, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 8.741772151898735e-06, |
|
"loss": 0.1104, |
|
"step": 5475 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 8.735443037974685e-06, |
|
"loss": 0.0818, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 8.729113924050634e-06, |
|
"loss": 0.0764, |
|
"step": 5525 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 8.722784810126583e-06, |
|
"loss": 0.0709, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 8.716455696202533e-06, |
|
"loss": 0.0891, |
|
"step": 5575 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 8.710126582278482e-06, |
|
"loss": 0.0869, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 8.703797468354432e-06, |
|
"loss": 0.0631, |
|
"step": 5625 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 8.697468354430381e-06, |
|
"loss": 0.0795, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 8.69113924050633e-06, |
|
"loss": 0.0603, |
|
"step": 5675 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 8.68481012658228e-06, |
|
"loss": 0.064, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 8.67848101265823e-06, |
|
"loss": 0.0738, |
|
"step": 5725 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 8.672151898734177e-06, |
|
"loss": 0.0796, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 8.665822784810127e-06, |
|
"loss": 0.0723, |
|
"step": 5775 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 8.659493670886076e-06, |
|
"loss": 0.0592, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 8.653164556962025e-06, |
|
"loss": 0.051, |
|
"step": 5825 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 8.646835443037975e-06, |
|
"loss": 0.0716, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 8.640506329113924e-06, |
|
"loss": 0.0598, |
|
"step": 5875 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 8.634177215189874e-06, |
|
"loss": 0.0524, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 8.627848101265823e-06, |
|
"loss": 0.0623, |
|
"step": 5925 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 8.621518987341773e-06, |
|
"loss": 0.054, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 8.615189873417722e-06, |
|
"loss": 0.057, |
|
"step": 5975 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 8.608860759493671e-06, |
|
"loss": 0.0479, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"eval_loss": 0.24428367614746094, |
|
"eval_runtime": 1457.5568, |
|
"eval_samples_per_second": 7.143, |
|
"eval_steps_per_second": 0.447, |
|
"eval_wer": 36.51929299663409, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 8.602531645569621e-06, |
|
"loss": 0.0501, |
|
"step": 6025 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 8.59620253164557e-06, |
|
"loss": 0.0527, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 8.58987341772152e-06, |
|
"loss": 0.0558, |
|
"step": 6075 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 8.58354430379747e-06, |
|
"loss": 0.0548, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 8.577215189873419e-06, |
|
"loss": 0.0503, |
|
"step": 6125 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 8.570886075949368e-06, |
|
"loss": 0.0641, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 8.564556962025316e-06, |
|
"loss": 0.0748, |
|
"step": 6175 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 8.558227848101267e-06, |
|
"loss": 0.0775, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 8.551898734177216e-06, |
|
"loss": 0.0662, |
|
"step": 6225 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 8.545569620253166e-06, |
|
"loss": 0.0591, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 8.539240506329115e-06, |
|
"loss": 0.0421, |
|
"step": 6275 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 8.532911392405065e-06, |
|
"loss": 0.0565, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 8.526582278481014e-06, |
|
"loss": 0.0574, |
|
"step": 6325 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 8.520253164556963e-06, |
|
"loss": 0.0466, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 8.513924050632913e-06, |
|
"loss": 0.0486, |
|
"step": 6375 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 8.507594936708862e-06, |
|
"loss": 0.0502, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 8.501265822784812e-06, |
|
"loss": 0.0415, |
|
"step": 6425 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 8.494936708860761e-06, |
|
"loss": 0.0449, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 8.488607594936709e-06, |
|
"loss": 0.0525, |
|
"step": 6475 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 8.482278481012658e-06, |
|
"loss": 0.0401, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 8.475949367088608e-06, |
|
"loss": 0.0502, |
|
"step": 6525 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 8.469620253164557e-06, |
|
"loss": 0.0453, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 8.463291139240507e-06, |
|
"loss": 0.0442, |
|
"step": 6575 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 8.456962025316456e-06, |
|
"loss": 0.0423, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 8.450632911392405e-06, |
|
"loss": 0.0518, |
|
"step": 6625 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 8.444303797468355e-06, |
|
"loss": 0.0479, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 8.437974683544304e-06, |
|
"loss": 0.0521, |
|
"step": 6675 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 8.431645569620254e-06, |
|
"loss": 0.0436, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 8.425316455696203e-06, |
|
"loss": 0.0539, |
|
"step": 6725 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 8.418987341772153e-06, |
|
"loss": 0.0446, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 8.412658227848102e-06, |
|
"loss": 0.0521, |
|
"step": 6775 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 8.406329113924051e-06, |
|
"loss": 0.0391, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 0.0513, |
|
"step": 6825 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 8.39367088607595e-06, |
|
"loss": 0.0449, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 8.3873417721519e-06, |
|
"loss": 0.0463, |
|
"step": 6875 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 8.38101265822785e-06, |
|
"loss": 0.0495, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 8.374683544303797e-06, |
|
"loss": 0.0628, |
|
"step": 6925 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 8.368354430379746e-06, |
|
"loss": 0.0594, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 8.362025316455696e-06, |
|
"loss": 0.0573, |
|
"step": 6975 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 8.355696202531645e-06, |
|
"loss": 0.057, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"eval_loss": 0.25129273533821106, |
|
"eval_runtime": 1479.2043, |
|
"eval_samples_per_second": 7.038, |
|
"eval_steps_per_second": 0.44, |
|
"eval_wer": 57.010683100538216, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 8.349367088607596e-06, |
|
"loss": 0.0534, |
|
"step": 7025 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 8.343037974683546e-06, |
|
"loss": 0.0663, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 8.336708860759495e-06, |
|
"loss": 0.0698, |
|
"step": 7075 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 8.330379746835445e-06, |
|
"loss": 0.0633, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 8.324050632911394e-06, |
|
"loss": 0.0808, |
|
"step": 7125 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 8.317721518987343e-06, |
|
"loss": 0.0699, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 8.311392405063293e-06, |
|
"loss": 0.0736, |
|
"step": 7175 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 8.305063291139242e-06, |
|
"loss": 0.069, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 8.29873417721519e-06, |
|
"loss": 0.0718, |
|
"step": 7225 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 8.29240506329114e-06, |
|
"loss": 0.0674, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 8.286075949367089e-06, |
|
"loss": 0.0568, |
|
"step": 7275 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 8.279746835443038e-06, |
|
"loss": 0.0497, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 8.273417721518988e-06, |
|
"loss": 0.0563, |
|
"step": 7325 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 8.267088607594937e-06, |
|
"loss": 0.0649, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 8.260759493670887e-06, |
|
"loss": 0.042, |
|
"step": 7375 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 8.254430379746836e-06, |
|
"loss": 0.0559, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 8.248101265822785e-06, |
|
"loss": 0.0476, |
|
"step": 7425 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 8.241772151898735e-06, |
|
"loss": 0.0427, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 8.235443037974684e-06, |
|
"loss": 0.0533, |
|
"step": 7475 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 8.229113924050634e-06, |
|
"loss": 0.0633, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 8.222784810126583e-06, |
|
"loss": 0.055, |
|
"step": 7525 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 8.216455696202533e-06, |
|
"loss": 0.043, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 8.210126582278482e-06, |
|
"loss": 0.0421, |
|
"step": 7575 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 8.203797468354431e-06, |
|
"loss": 0.0519, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 8.197468354430381e-06, |
|
"loss": 0.0398, |
|
"step": 7625 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 8.19113924050633e-06, |
|
"loss": 0.041, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 8.184810126582278e-06, |
|
"loss": 0.0477, |
|
"step": 7675 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 8.178481012658227e-06, |
|
"loss": 0.046, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 8.172151898734177e-06, |
|
"loss": 0.0465, |
|
"step": 7725 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 8.165822784810126e-06, |
|
"loss": 0.0359, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 8.159493670886076e-06, |
|
"loss": 0.0362, |
|
"step": 7775 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 8.153164556962025e-06, |
|
"loss": 0.04, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 8.146835443037975e-06, |
|
"loss": 0.0433, |
|
"step": 7825 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 8.140506329113926e-06, |
|
"loss": 0.0399, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 8.134177215189875e-06, |
|
"loss": 0.0396, |
|
"step": 7875 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 8.127848101265825e-06, |
|
"loss": 0.0472, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 8.121518987341774e-06, |
|
"loss": 0.0533, |
|
"step": 7925 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 8.115189873417722e-06, |
|
"loss": 0.0564, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 8.108860759493671e-06, |
|
"loss": 0.0508, |
|
"step": 7975 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 8.10253164556962e-06, |
|
"loss": 0.0375, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"eval_loss": 0.252152681350708, |
|
"eval_runtime": 1456.2607, |
|
"eval_samples_per_second": 7.149, |
|
"eval_steps_per_second": 0.447, |
|
"eval_wer": 44.38120945056017, |
|
"step": 8000 |
|
} |
|
], |
|
"max_steps": 40000, |
|
"num_train_epochs": 9223372036854775807, |
|
"total_flos": 3.69273878249472e+19, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|