|
{ |
|
"best_metric": 0.4853431786842222, |
|
"best_model_checkpoint": "./whisper-small-ar_tsize_0.4/checkpoint-5000", |
|
"epoch": 3.219575016097875, |
|
"eval_steps": 1000, |
|
"global_step": 5000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 81.08666229248047, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 3.2986, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 43.92015838623047, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 2.5616, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 18.36177635192871, |
|
"learning_rate": 1.5e-06, |
|
"loss": 1.9124, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 14.436102867126465, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.3046, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 13.751386642456055, |
|
"learning_rate": 2.5e-06, |
|
"loss": 1.1089, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 13.907903671264648, |
|
"learning_rate": 3e-06, |
|
"loss": 0.9714, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 13.337739944458008, |
|
"learning_rate": 3.5e-06, |
|
"loss": 0.9561, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 13.429357528686523, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.8369, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 21.439300537109375, |
|
"learning_rate": 4.5e-06, |
|
"loss": 0.7732, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 21.75934600830078, |
|
"learning_rate": 5e-06, |
|
"loss": 0.8122, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 12.824952125549316, |
|
"learning_rate": 5.500000000000001e-06, |
|
"loss": 0.716, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 13.877304077148438, |
|
"learning_rate": 6e-06, |
|
"loss": 0.6072, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 10.231637954711914, |
|
"learning_rate": 6.5000000000000004e-06, |
|
"loss": 0.477, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 24.04966163635254, |
|
"learning_rate": 7e-06, |
|
"loss": 0.4967, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 13.837153434753418, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.4392, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 12.172282218933105, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.4187, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 11.377702713012695, |
|
"learning_rate": 8.5e-06, |
|
"loss": 0.457, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 14.580387115478516, |
|
"learning_rate": 9e-06, |
|
"loss": 0.4528, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 14.869205474853516, |
|
"learning_rate": 9.5e-06, |
|
"loss": 0.4612, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 7.454779148101807, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4023, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 12.038946151733398, |
|
"learning_rate": 9.944444444444445e-06, |
|
"loss": 0.4409, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 13.034636497497559, |
|
"learning_rate": 9.88888888888889e-06, |
|
"loss": 0.4154, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 12.590168952941895, |
|
"learning_rate": 9.833333333333333e-06, |
|
"loss": 0.3787, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 12.679019927978516, |
|
"learning_rate": 9.777777777777779e-06, |
|
"loss": 0.3979, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 12.680933952331543, |
|
"learning_rate": 9.722222222222223e-06, |
|
"loss": 0.4203, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 9.336934089660645, |
|
"learning_rate": 9.666666666666667e-06, |
|
"loss": 0.3725, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 16.3114013671875, |
|
"learning_rate": 9.611111111111112e-06, |
|
"loss": 0.3413, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 8.341588020324707, |
|
"learning_rate": 9.555555555555556e-06, |
|
"loss": 0.3831, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 14.150778770446777, |
|
"learning_rate": 9.5e-06, |
|
"loss": 0.3734, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 13.409160614013672, |
|
"learning_rate": 9.444444444444445e-06, |
|
"loss": 0.3605, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 9.671516418457031, |
|
"learning_rate": 9.38888888888889e-06, |
|
"loss": 0.3768, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 11.869982719421387, |
|
"learning_rate": 9.333333333333334e-06, |
|
"loss": 0.3529, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 14.277606964111328, |
|
"learning_rate": 9.277777777777778e-06, |
|
"loss": 0.388, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 10.158995628356934, |
|
"learning_rate": 9.222222222222224e-06, |
|
"loss": 0.3652, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 8.648341178894043, |
|
"learning_rate": 9.166666666666666e-06, |
|
"loss": 0.3141, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 12.581182479858398, |
|
"learning_rate": 9.111111111111112e-06, |
|
"loss": 0.3824, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 14.91859245300293, |
|
"learning_rate": 9.055555555555556e-06, |
|
"loss": 0.3818, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 11.872838973999023, |
|
"learning_rate": 9e-06, |
|
"loss": 0.354, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 10.915922164916992, |
|
"learning_rate": 8.944444444444446e-06, |
|
"loss": 0.3348, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 8.247742652893066, |
|
"learning_rate": 8.888888888888888e-06, |
|
"loss": 0.3704, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"eval_loss": 0.348444402217865, |
|
"eval_runtime": 2567.8232, |
|
"eval_samples_per_second": 3.022, |
|
"eval_steps_per_second": 0.378, |
|
"eval_wer": 0.5856278635292025, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 8.708548545837402, |
|
"learning_rate": 8.833333333333334e-06, |
|
"loss": 0.3689, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 13.660444259643555, |
|
"learning_rate": 8.777777777777778e-06, |
|
"loss": 0.3512, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 8.791279792785645, |
|
"learning_rate": 8.722222222222224e-06, |
|
"loss": 0.3847, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 7.456522464752197, |
|
"learning_rate": 8.666666666666668e-06, |
|
"loss": 0.3446, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 7.396511077880859, |
|
"learning_rate": 8.611111111111112e-06, |
|
"loss": 0.3254, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 18.812950134277344, |
|
"learning_rate": 8.555555555555556e-06, |
|
"loss": 0.3172, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 8.77145004272461, |
|
"learning_rate": 8.5e-06, |
|
"loss": 0.3608, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 10.726495742797852, |
|
"learning_rate": 8.444444444444446e-06, |
|
"loss": 0.3716, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 8.422333717346191, |
|
"learning_rate": 8.38888888888889e-06, |
|
"loss": 0.2729, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 8.867196083068848, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 0.3324, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 10.892127990722656, |
|
"learning_rate": 8.277777777777778e-06, |
|
"loss": 0.3403, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 9.399800300598145, |
|
"learning_rate": 8.222222222222222e-06, |
|
"loss": 0.326, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 7.993837356567383, |
|
"learning_rate": 8.166666666666668e-06, |
|
"loss": 0.3212, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 13.500585556030273, |
|
"learning_rate": 8.111111111111112e-06, |
|
"loss": 0.3149, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 9.825460433959961, |
|
"learning_rate": 8.055555555555557e-06, |
|
"loss": 0.3045, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 10.101466178894043, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.3271, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 11.21338939666748, |
|
"learning_rate": 7.944444444444445e-06, |
|
"loss": 0.3385, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 9.056229591369629, |
|
"learning_rate": 7.88888888888889e-06, |
|
"loss": 0.3268, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 10.06369400024414, |
|
"learning_rate": 7.833333333333333e-06, |
|
"loss": 0.3398, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 6.4756059646606445, |
|
"learning_rate": 7.77777777777778e-06, |
|
"loss": 0.3146, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 11.639076232910156, |
|
"learning_rate": 7.722222222222223e-06, |
|
"loss": 0.2991, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 12.927867889404297, |
|
"learning_rate": 7.666666666666667e-06, |
|
"loss": 0.3165, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 6.089904308319092, |
|
"learning_rate": 7.611111111111111e-06, |
|
"loss": 0.2003, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 6.897767543792725, |
|
"learning_rate": 7.555555555555556e-06, |
|
"loss": 0.187, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 7.848104476928711, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.183, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 7.197811126708984, |
|
"learning_rate": 7.444444444444445e-06, |
|
"loss": 0.2151, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 11.506584167480469, |
|
"learning_rate": 7.38888888888889e-06, |
|
"loss": 0.1922, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 7.342209339141846, |
|
"learning_rate": 7.333333333333333e-06, |
|
"loss": 0.1697, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 8.071680068969727, |
|
"learning_rate": 7.277777777777778e-06, |
|
"loss": 0.1945, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 7.5781049728393555, |
|
"learning_rate": 7.222222222222223e-06, |
|
"loss": 0.1848, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 7.194643020629883, |
|
"learning_rate": 7.166666666666667e-06, |
|
"loss": 0.2247, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 7.230910778045654, |
|
"learning_rate": 7.111111111111112e-06, |
|
"loss": 0.1858, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 6.7898736000061035, |
|
"learning_rate": 7.055555555555557e-06, |
|
"loss": 0.2032, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 6.09243106842041, |
|
"learning_rate": 7e-06, |
|
"loss": 0.1943, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 11.722480773925781, |
|
"learning_rate": 6.944444444444445e-06, |
|
"loss": 0.1958, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 10.483800888061523, |
|
"learning_rate": 6.88888888888889e-06, |
|
"loss": 0.1895, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 8.59669017791748, |
|
"learning_rate": 6.833333333333334e-06, |
|
"loss": 0.2182, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 7.266107559204102, |
|
"learning_rate": 6.777777777777779e-06, |
|
"loss": 0.2001, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 10.1314115524292, |
|
"learning_rate": 6.7222222222222235e-06, |
|
"loss": 0.2232, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 9.09283447265625, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.2089, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"eval_loss": 0.29850226640701294, |
|
"eval_runtime": 2408.0429, |
|
"eval_samples_per_second": 3.223, |
|
"eval_steps_per_second": 0.403, |
|
"eval_wer": 0.528601930519105, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 8.666448593139648, |
|
"learning_rate": 6.6111111111111115e-06, |
|
"loss": 0.2063, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 8.325794219970703, |
|
"learning_rate": 6.555555555555556e-06, |
|
"loss": 0.1991, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 7.720223903656006, |
|
"learning_rate": 6.5000000000000004e-06, |
|
"loss": 0.1971, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 8.479090690612793, |
|
"learning_rate": 6.444444444444445e-06, |
|
"loss": 0.1697, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 6.344616413116455, |
|
"learning_rate": 6.3888888888888885e-06, |
|
"loss": 0.1643, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 9.173255920410156, |
|
"learning_rate": 6.333333333333333e-06, |
|
"loss": 0.1961, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 7.795842170715332, |
|
"learning_rate": 6.277777777777778e-06, |
|
"loss": 0.1842, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 7.321842193603516, |
|
"learning_rate": 6.222222222222223e-06, |
|
"loss": 0.1836, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 8.700078964233398, |
|
"learning_rate": 6.166666666666667e-06, |
|
"loss": 0.2107, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 6.927859783172607, |
|
"learning_rate": 6.111111111111112e-06, |
|
"loss": 0.1769, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 7.689358711242676, |
|
"learning_rate": 6.055555555555555e-06, |
|
"loss": 0.2032, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 7.440878868103027, |
|
"learning_rate": 6e-06, |
|
"loss": 0.2003, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 7.402519226074219, |
|
"learning_rate": 5.944444444444445e-06, |
|
"loss": 0.2171, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 15.369744300842285, |
|
"learning_rate": 5.88888888888889e-06, |
|
"loss": 0.2071, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 8.982576370239258, |
|
"learning_rate": 5.833333333333334e-06, |
|
"loss": 0.1989, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 9.001676559448242, |
|
"learning_rate": 5.777777777777778e-06, |
|
"loss": 0.2069, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 6.500514507293701, |
|
"learning_rate": 5.722222222222222e-06, |
|
"loss": 0.1848, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 9.086186408996582, |
|
"learning_rate": 5.666666666666667e-06, |
|
"loss": 0.1997, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 5.81564998626709, |
|
"learning_rate": 5.611111111111112e-06, |
|
"loss": 0.1811, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 4.781924247741699, |
|
"learning_rate": 5.555555555555557e-06, |
|
"loss": 0.1797, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 6.238134384155273, |
|
"learning_rate": 5.500000000000001e-06, |
|
"loss": 0.1762, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 5.3570451736450195, |
|
"learning_rate": 5.444444444444445e-06, |
|
"loss": 0.1932, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 12.84581470489502, |
|
"learning_rate": 5.388888888888889e-06, |
|
"loss": 0.1796, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 5.359172344207764, |
|
"learning_rate": 5.333333333333334e-06, |
|
"loss": 0.1773, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 4.308635711669922, |
|
"learning_rate": 5.2777777777777785e-06, |
|
"loss": 0.172, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 8.421016693115234, |
|
"learning_rate": 5.2222222222222226e-06, |
|
"loss": 0.1787, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 8.52223014831543, |
|
"learning_rate": 5.1666666666666675e-06, |
|
"loss": 0.1679, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 4.151463031768799, |
|
"learning_rate": 5.1111111111111115e-06, |
|
"loss": 0.1728, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 9.563691139221191, |
|
"learning_rate": 5.0555555555555555e-06, |
|
"loss": 0.1669, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 8.052961349487305, |
|
"learning_rate": 5e-06, |
|
"loss": 0.1889, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 7.330240249633789, |
|
"learning_rate": 4.944444444444445e-06, |
|
"loss": 0.1831, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 8.667630195617676, |
|
"learning_rate": 4.888888888888889e-06, |
|
"loss": 0.1789, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 9.038121223449707, |
|
"learning_rate": 4.833333333333333e-06, |
|
"loss": 0.2056, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 6.9429802894592285, |
|
"learning_rate": 4.777777777777778e-06, |
|
"loss": 0.1827, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 8.04848575592041, |
|
"learning_rate": 4.722222222222222e-06, |
|
"loss": 0.1894, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 7.720078468322754, |
|
"learning_rate": 4.666666666666667e-06, |
|
"loss": 0.1802, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 7.583975791931152, |
|
"learning_rate": 4.611111111111112e-06, |
|
"loss": 0.1782, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 4.011235237121582, |
|
"learning_rate": 4.555555555555556e-06, |
|
"loss": 0.174, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 9.750682830810547, |
|
"learning_rate": 4.5e-06, |
|
"loss": 0.1699, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 5.716087341308594, |
|
"learning_rate": 4.444444444444444e-06, |
|
"loss": 0.1556, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"eval_loss": 0.2765233814716339, |
|
"eval_runtime": 2325.3197, |
|
"eval_samples_per_second": 3.338, |
|
"eval_steps_per_second": 0.418, |
|
"eval_wer": 0.5046261287309284, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 10.363564491271973, |
|
"learning_rate": 4.388888888888889e-06, |
|
"loss": 0.1735, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 6.764340877532959, |
|
"learning_rate": 4.333333333333334e-06, |
|
"loss": 0.1805, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 10.78950309753418, |
|
"learning_rate": 4.277777777777778e-06, |
|
"loss": 0.2041, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 7.050520420074463, |
|
"learning_rate": 4.222222222222223e-06, |
|
"loss": 0.1621, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 4.625450134277344, |
|
"learning_rate": 4.166666666666667e-06, |
|
"loss": 0.1441, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 4.653476715087891, |
|
"learning_rate": 4.111111111111111e-06, |
|
"loss": 0.0914, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 3.942208766937256, |
|
"learning_rate": 4.055555555555556e-06, |
|
"loss": 0.0935, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 4.222424507141113, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.085, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 2.962463140487671, |
|
"learning_rate": 3.944444444444445e-06, |
|
"loss": 0.1022, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 6.484899997711182, |
|
"learning_rate": 3.88888888888889e-06, |
|
"loss": 0.097, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 5.599306583404541, |
|
"learning_rate": 3.833333333333334e-06, |
|
"loss": 0.1059, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 5.55266809463501, |
|
"learning_rate": 3.777777777777778e-06, |
|
"loss": 0.0859, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 4.014285564422607, |
|
"learning_rate": 3.7222222222222225e-06, |
|
"loss": 0.0964, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 4.916045665740967, |
|
"learning_rate": 3.6666666666666666e-06, |
|
"loss": 0.0896, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 5.2690887451171875, |
|
"learning_rate": 3.6111111111111115e-06, |
|
"loss": 0.0836, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 5.532698154449463, |
|
"learning_rate": 3.555555555555556e-06, |
|
"loss": 0.0891, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 4.553203105926514, |
|
"learning_rate": 3.5e-06, |
|
"loss": 0.0759, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 7.247622013092041, |
|
"learning_rate": 3.444444444444445e-06, |
|
"loss": 0.1061, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 3.685699224472046, |
|
"learning_rate": 3.3888888888888893e-06, |
|
"loss": 0.0814, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 5.761671543121338, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 0.0851, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 4.881550312042236, |
|
"learning_rate": 3.277777777777778e-06, |
|
"loss": 0.0878, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 8.194666862487793, |
|
"learning_rate": 3.2222222222222227e-06, |
|
"loss": 0.0875, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 4.2310004234313965, |
|
"learning_rate": 3.1666666666666667e-06, |
|
"loss": 0.0984, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 3.0441014766693115, |
|
"learning_rate": 3.1111111111111116e-06, |
|
"loss": 0.0872, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 9.044891357421875, |
|
"learning_rate": 3.055555555555556e-06, |
|
"loss": 0.0934, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 6.037336826324463, |
|
"learning_rate": 3e-06, |
|
"loss": 0.0871, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 13.961265563964844, |
|
"learning_rate": 2.944444444444445e-06, |
|
"loss": 0.1119, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 5.0205888748168945, |
|
"learning_rate": 2.888888888888889e-06, |
|
"loss": 0.0823, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 5.017231464385986, |
|
"learning_rate": 2.8333333333333335e-06, |
|
"loss": 0.091, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 4.362540245056152, |
|
"learning_rate": 2.7777777777777783e-06, |
|
"loss": 0.0893, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 4.8689374923706055, |
|
"learning_rate": 2.7222222222222224e-06, |
|
"loss": 0.1006, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 7.2723870277404785, |
|
"learning_rate": 2.666666666666667e-06, |
|
"loss": 0.0776, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 5.421591281890869, |
|
"learning_rate": 2.6111111111111113e-06, |
|
"loss": 0.0931, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 4.110602855682373, |
|
"learning_rate": 2.5555555555555557e-06, |
|
"loss": 0.0815, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 6.327955722808838, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.0925, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 4.236114025115967, |
|
"learning_rate": 2.4444444444444447e-06, |
|
"loss": 0.0862, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 6.197454929351807, |
|
"learning_rate": 2.388888888888889e-06, |
|
"loss": 0.0784, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 2.226133108139038, |
|
"learning_rate": 2.3333333333333336e-06, |
|
"loss": 0.0853, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 7.012733459472656, |
|
"learning_rate": 2.277777777777778e-06, |
|
"loss": 0.0786, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 6.955764293670654, |
|
"learning_rate": 2.222222222222222e-06, |
|
"loss": 0.1009, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"eval_loss": 0.27859729528427124, |
|
"eval_runtime": 2118.3229, |
|
"eval_samples_per_second": 3.664, |
|
"eval_steps_per_second": 0.458, |
|
"eval_wer": 0.49408389306525513, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 6.0219502449035645, |
|
"learning_rate": 2.166666666666667e-06, |
|
"loss": 0.0825, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 4.783457279205322, |
|
"learning_rate": 2.1111111111111114e-06, |
|
"loss": 0.0903, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 4.37739372253418, |
|
"learning_rate": 2.0555555555555555e-06, |
|
"loss": 0.0876, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 4.563325881958008, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.0904, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 6.5224738121032715, |
|
"learning_rate": 1.944444444444445e-06, |
|
"loss": 0.088, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 4.984440326690674, |
|
"learning_rate": 1.888888888888889e-06, |
|
"loss": 0.0748, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 3.8407866954803467, |
|
"learning_rate": 1.8333333333333333e-06, |
|
"loss": 0.0802, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 5.057933330535889, |
|
"learning_rate": 1.777777777777778e-06, |
|
"loss": 0.0825, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"grad_norm": 5.724285125732422, |
|
"learning_rate": 1.7222222222222224e-06, |
|
"loss": 0.0886, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 4.131182670593262, |
|
"learning_rate": 1.6666666666666667e-06, |
|
"loss": 0.0696, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 7.058526039123535, |
|
"learning_rate": 1.6111111111111113e-06, |
|
"loss": 0.1036, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"grad_norm": 5.512073993682861, |
|
"learning_rate": 1.5555555555555558e-06, |
|
"loss": 0.084, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"grad_norm": 4.876647472381592, |
|
"learning_rate": 1.5e-06, |
|
"loss": 0.0824, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 4.75276517868042, |
|
"learning_rate": 1.4444444444444445e-06, |
|
"loss": 0.1014, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 6.7879638671875, |
|
"learning_rate": 1.3888888888888892e-06, |
|
"loss": 0.0763, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"grad_norm": 4.233926296234131, |
|
"learning_rate": 1.3333333333333334e-06, |
|
"loss": 0.0956, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 6.624825954437256, |
|
"learning_rate": 1.2777777777777779e-06, |
|
"loss": 0.0935, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 3.828364372253418, |
|
"learning_rate": 1.2222222222222223e-06, |
|
"loss": 0.081, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 2.6598658561706543, |
|
"learning_rate": 1.1666666666666668e-06, |
|
"loss": 0.0942, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 7.273928165435791, |
|
"learning_rate": 1.111111111111111e-06, |
|
"loss": 0.0958, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"grad_norm": 5.134991645812988, |
|
"learning_rate": 1.0555555555555557e-06, |
|
"loss": 0.0958, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 6.101174354553223, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 0.0771, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 8.107765197753906, |
|
"learning_rate": 9.444444444444445e-07, |
|
"loss": 0.0777, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 5.9032392501831055, |
|
"learning_rate": 8.88888888888889e-07, |
|
"loss": 0.0758, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 5.864885330200195, |
|
"learning_rate": 8.333333333333333e-07, |
|
"loss": 0.0806, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"grad_norm": 6.331638336181641, |
|
"learning_rate": 7.777777777777779e-07, |
|
"loss": 0.0921, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"grad_norm": 1.9154059886932373, |
|
"learning_rate": 7.222222222222222e-07, |
|
"loss": 0.0573, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"grad_norm": 5.074680328369141, |
|
"learning_rate": 6.666666666666667e-07, |
|
"loss": 0.0552, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"grad_norm": 2.974381685256958, |
|
"learning_rate": 6.111111111111112e-07, |
|
"loss": 0.0505, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"grad_norm": 5.69284725189209, |
|
"learning_rate": 5.555555555555555e-07, |
|
"loss": 0.0503, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"grad_norm": 2.9114811420440674, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 0.0475, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"grad_norm": 8.516266822814941, |
|
"learning_rate": 4.444444444444445e-07, |
|
"loss": 0.0549, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"grad_norm": 2.1860241889953613, |
|
"learning_rate": 3.8888888888888895e-07, |
|
"loss": 0.0499, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"grad_norm": 2.783489942550659, |
|
"learning_rate": 3.3333333333333335e-07, |
|
"loss": 0.0419, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"grad_norm": 4.006200790405273, |
|
"learning_rate": 2.7777777777777776e-07, |
|
"loss": 0.0635, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"grad_norm": 5.734996795654297, |
|
"learning_rate": 2.2222222222222224e-07, |
|
"loss": 0.0449, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"grad_norm": 2.960620880126953, |
|
"learning_rate": 1.6666666666666668e-07, |
|
"loss": 0.0553, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"grad_norm": 2.3535099029541016, |
|
"learning_rate": 1.1111111111111112e-07, |
|
"loss": 0.0361, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 4.550661087036133, |
|
"learning_rate": 5.555555555555556e-08, |
|
"loss": 0.0448, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"grad_norm": 2.250640630722046, |
|
"learning_rate": 0.0, |
|
"loss": 0.0559, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"eval_loss": 0.2770070731639862, |
|
"eval_runtime": 2111.9793, |
|
"eval_samples_per_second": 3.675, |
|
"eval_steps_per_second": 0.46, |
|
"eval_wer": 0.4853431786842222, |
|
"step": 5000 |
|
} |
|
], |
|
"logging_steps": 25, |
|
"max_steps": 5000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 1000, |
|
"total_flos": 1.153735571939328e+19, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|