|
{ |
|
"best_metric": 27.557846255071826, |
|
"best_model_checkpoint": "./whisper-small-vietnamese\\checkpoint-2000", |
|
"epoch": 17.24137931034483, |
|
"eval_steps": 1000, |
|
"global_step": 3000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 118.77095794677734, |
|
"learning_rate": 4.2000000000000006e-07, |
|
"loss": 5.0409, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 29.974971771240234, |
|
"learning_rate": 9.200000000000001e-07, |
|
"loss": 4.0132, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 25.654932022094727, |
|
"learning_rate": 1.42e-06, |
|
"loss": 2.9199, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 16.862260818481445, |
|
"learning_rate": 1.9200000000000003e-06, |
|
"loss": 1.7066, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 13.715407371520996, |
|
"learning_rate": 2.42e-06, |
|
"loss": 1.2867, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 16.711305618286133, |
|
"learning_rate": 2.92e-06, |
|
"loss": 1.1578, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 14.38102912902832, |
|
"learning_rate": 3.4200000000000007e-06, |
|
"loss": 1.0035, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 15.285762786865234, |
|
"learning_rate": 3.920000000000001e-06, |
|
"loss": 0.8052, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 8.91514778137207, |
|
"learning_rate": 4.42e-06, |
|
"loss": 0.286, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 7.789880275726318, |
|
"learning_rate": 4.92e-06, |
|
"loss": 0.2305, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 8.093725204467773, |
|
"learning_rate": 5.420000000000001e-06, |
|
"loss": 0.2184, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 4.7622785568237305, |
|
"learning_rate": 5.92e-06, |
|
"loss": 0.2219, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 7.469940662384033, |
|
"learning_rate": 6.42e-06, |
|
"loss": 0.2067, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 4.024804592132568, |
|
"learning_rate": 6.92e-06, |
|
"loss": 0.2128, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 4.569382667541504, |
|
"learning_rate": 7.420000000000001e-06, |
|
"loss": 0.1294, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 5.233477592468262, |
|
"learning_rate": 7.92e-06, |
|
"loss": 0.1217, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 6.401954174041748, |
|
"learning_rate": 8.42e-06, |
|
"loss": 0.1272, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 4.058222770690918, |
|
"learning_rate": 8.920000000000001e-06, |
|
"loss": 0.1188, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 4.228504657745361, |
|
"learning_rate": 9.42e-06, |
|
"loss": 0.1294, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 5.160878658294678, |
|
"learning_rate": 9.920000000000002e-06, |
|
"loss": 0.1241, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"grad_norm": 3.9591662883758545, |
|
"learning_rate": 9.940000000000001e-06, |
|
"loss": 0.1264, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"grad_norm": 4.170319080352783, |
|
"learning_rate": 9.86857142857143e-06, |
|
"loss": 0.0793, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"grad_norm": 3.4333674907684326, |
|
"learning_rate": 9.797142857142858e-06, |
|
"loss": 0.0643, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"grad_norm": 4.089048862457275, |
|
"learning_rate": 9.725714285714287e-06, |
|
"loss": 0.0668, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"grad_norm": 3.6274781227111816, |
|
"learning_rate": 9.654285714285716e-06, |
|
"loss": 0.069, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"grad_norm": 3.2134461402893066, |
|
"learning_rate": 9.582857142857143e-06, |
|
"loss": 0.0759, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"grad_norm": 3.4892189502716064, |
|
"learning_rate": 9.511428571428572e-06, |
|
"loss": 0.0698, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"grad_norm": 2.3172402381896973, |
|
"learning_rate": 9.440000000000001e-06, |
|
"loss": 0.0651, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"grad_norm": 2.6511449813842773, |
|
"learning_rate": 9.368571428571428e-06, |
|
"loss": 0.0373, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"grad_norm": 3.1017539501190186, |
|
"learning_rate": 9.297142857142857e-06, |
|
"loss": 0.0415, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"grad_norm": 3.3337509632110596, |
|
"learning_rate": 9.225714285714286e-06, |
|
"loss": 0.0388, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"grad_norm": 2.3944547176361084, |
|
"learning_rate": 9.154285714285715e-06, |
|
"loss": 0.0411, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"grad_norm": 3.1757237911224365, |
|
"learning_rate": 9.082857142857143e-06, |
|
"loss": 0.0381, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"grad_norm": 4.307478427886963, |
|
"learning_rate": 9.011428571428572e-06, |
|
"loss": 0.0434, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"grad_norm": 1.4969714879989624, |
|
"learning_rate": 8.94e-06, |
|
"loss": 0.0384, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"grad_norm": 1.416281819343567, |
|
"learning_rate": 8.86857142857143e-06, |
|
"loss": 0.0207, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"grad_norm": 4.1161322593688965, |
|
"learning_rate": 8.797142857142857e-06, |
|
"loss": 0.0222, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"grad_norm": 0.9343141317367554, |
|
"learning_rate": 8.725714285714286e-06, |
|
"loss": 0.0213, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"grad_norm": 3.1798384189605713, |
|
"learning_rate": 8.654285714285715e-06, |
|
"loss": 0.0239, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"grad_norm": 1.7415159940719604, |
|
"learning_rate": 8.582857142857144e-06, |
|
"loss": 0.0243, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"eval_loss": 0.6310169696807861, |
|
"eval_runtime": 482.8872, |
|
"eval_samples_per_second": 2.562, |
|
"eval_steps_per_second": 0.321, |
|
"eval_wer": 29.213729575611364, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"grad_norm": 2.3477253913879395, |
|
"learning_rate": 8.511428571428571e-06, |
|
"loss": 0.0215, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"grad_norm": 0.7934111952781677, |
|
"learning_rate": 8.44e-06, |
|
"loss": 0.019, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"grad_norm": 1.1651448011398315, |
|
"learning_rate": 8.36857142857143e-06, |
|
"loss": 0.0096, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"grad_norm": 1.3079229593276978, |
|
"learning_rate": 8.297142857142859e-06, |
|
"loss": 0.0117, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"grad_norm": 2.6112120151519775, |
|
"learning_rate": 8.225714285714288e-06, |
|
"loss": 0.0097, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"grad_norm": 0.8934247493743896, |
|
"learning_rate": 8.154285714285715e-06, |
|
"loss": 0.0085, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"grad_norm": 1.6696081161499023, |
|
"learning_rate": 8.082857142857144e-06, |
|
"loss": 0.011, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"grad_norm": 1.9043184518814087, |
|
"learning_rate": 8.011428571428573e-06, |
|
"loss": 0.0159, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 7.04, |
|
"grad_norm": 0.7788882255554199, |
|
"learning_rate": 7.94e-06, |
|
"loss": 0.0081, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"grad_norm": 0.44921088218688965, |
|
"learning_rate": 7.86857142857143e-06, |
|
"loss": 0.0075, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"grad_norm": 1.6392821073532104, |
|
"learning_rate": 7.797142857142858e-06, |
|
"loss": 0.0055, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"grad_norm": 2.472712278366089, |
|
"learning_rate": 7.725714285714286e-06, |
|
"loss": 0.0059, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"grad_norm": 0.16271165013313293, |
|
"learning_rate": 7.654285714285715e-06, |
|
"loss": 0.0046, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"grad_norm": 0.6483273506164551, |
|
"learning_rate": 7.5828571428571444e-06, |
|
"loss": 0.0044, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 7.9, |
|
"grad_norm": 1.0499032735824585, |
|
"learning_rate": 7.511428571428572e-06, |
|
"loss": 0.0046, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 8.05, |
|
"grad_norm": 0.6188054084777832, |
|
"learning_rate": 7.440000000000001e-06, |
|
"loss": 0.0039, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 8.19, |
|
"grad_norm": 0.11255456507205963, |
|
"learning_rate": 7.36857142857143e-06, |
|
"loss": 0.0035, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 8.33, |
|
"grad_norm": 0.21570441126823425, |
|
"learning_rate": 7.297142857142858e-06, |
|
"loss": 0.0044, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 8.48, |
|
"grad_norm": 0.2393558919429779, |
|
"learning_rate": 7.225714285714286e-06, |
|
"loss": 0.0025, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 8.62, |
|
"grad_norm": 0.8370733857154846, |
|
"learning_rate": 7.154285714285715e-06, |
|
"loss": 0.0022, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 8.76, |
|
"grad_norm": 0.25669318437576294, |
|
"learning_rate": 7.082857142857143e-06, |
|
"loss": 0.002, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 8.91, |
|
"grad_norm": 0.2188798040151596, |
|
"learning_rate": 7.011428571428572e-06, |
|
"loss": 0.002, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 9.05, |
|
"grad_norm": 0.06543828547000885, |
|
"learning_rate": 6.9400000000000005e-06, |
|
"loss": 0.0035, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 9.2, |
|
"grad_norm": 0.05953472480177879, |
|
"learning_rate": 6.868571428571429e-06, |
|
"loss": 0.0012, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"grad_norm": 0.05562310665845871, |
|
"learning_rate": 6.797142857142858e-06, |
|
"loss": 0.0012, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 9.48, |
|
"grad_norm": 0.06505300104618073, |
|
"learning_rate": 6.725714285714287e-06, |
|
"loss": 0.0011, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 9.63, |
|
"grad_norm": 0.05420365184545517, |
|
"learning_rate": 6.654285714285716e-06, |
|
"loss": 0.001, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 9.77, |
|
"grad_norm": 0.0459221787750721, |
|
"learning_rate": 6.582857142857143e-06, |
|
"loss": 0.0009, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"grad_norm": 0.05463138595223427, |
|
"learning_rate": 6.511428571428572e-06, |
|
"loss": 0.0009, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 10.06, |
|
"grad_norm": 0.2885764539241791, |
|
"learning_rate": 6.440000000000001e-06, |
|
"loss": 0.0009, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 10.2, |
|
"grad_norm": 0.03790697455406189, |
|
"learning_rate": 6.368571428571429e-06, |
|
"loss": 0.0007, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 10.34, |
|
"grad_norm": 0.04002631828188896, |
|
"learning_rate": 6.297142857142857e-06, |
|
"loss": 0.0007, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 10.49, |
|
"grad_norm": 0.04201885685324669, |
|
"learning_rate": 6.225714285714286e-06, |
|
"loss": 0.0006, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 10.63, |
|
"grad_norm": 0.042038705199956894, |
|
"learning_rate": 6.1542857142857145e-06, |
|
"loss": 0.0007, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 10.78, |
|
"grad_norm": 0.037008922547101974, |
|
"learning_rate": 6.0828571428571435e-06, |
|
"loss": 0.0006, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 10.92, |
|
"grad_norm": 0.03480172157287598, |
|
"learning_rate": 6.011428571428572e-06, |
|
"loss": 0.0006, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 11.06, |
|
"grad_norm": 0.0292174331843853, |
|
"learning_rate": 5.94e-06, |
|
"loss": 0.0006, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 11.21, |
|
"grad_norm": 0.03173697739839554, |
|
"learning_rate": 5.868571428571429e-06, |
|
"loss": 0.0005, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 11.35, |
|
"grad_norm": 0.026889758184552193, |
|
"learning_rate": 5.797142857142858e-06, |
|
"loss": 0.0005, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 11.49, |
|
"grad_norm": 0.02639606036245823, |
|
"learning_rate": 5.725714285714287e-06, |
|
"loss": 0.0005, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 11.49, |
|
"eval_loss": 0.6665691137313843, |
|
"eval_runtime": 480.6597, |
|
"eval_samples_per_second": 2.574, |
|
"eval_steps_per_second": 0.322, |
|
"eval_wer": 27.557846255071826, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 11.64, |
|
"grad_norm": 0.029212338849902153, |
|
"learning_rate": 5.654285714285714e-06, |
|
"loss": 0.0005, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 11.78, |
|
"grad_norm": 0.025965554639697075, |
|
"learning_rate": 5.582857142857143e-06, |
|
"loss": 0.0005, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 11.93, |
|
"grad_norm": 0.0334208607673645, |
|
"learning_rate": 5.511428571428572e-06, |
|
"loss": 0.0005, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 12.07, |
|
"grad_norm": 0.022900965064764023, |
|
"learning_rate": 5.4400000000000004e-06, |
|
"loss": 0.0005, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 12.21, |
|
"grad_norm": 0.024590345099568367, |
|
"learning_rate": 5.368571428571429e-06, |
|
"loss": 0.0004, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 12.36, |
|
"grad_norm": 0.02628406509757042, |
|
"learning_rate": 5.297142857142858e-06, |
|
"loss": 0.0005, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 12.5, |
|
"grad_norm": 0.037630677223205566, |
|
"learning_rate": 5.225714285714286e-06, |
|
"loss": 0.0004, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 12.64, |
|
"grad_norm": 0.020544828847050667, |
|
"learning_rate": 5.154285714285715e-06, |
|
"loss": 0.0004, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 12.79, |
|
"grad_norm": 0.02015620656311512, |
|
"learning_rate": 5.082857142857144e-06, |
|
"loss": 0.0004, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 12.93, |
|
"grad_norm": 0.027246667072176933, |
|
"learning_rate": 5.011428571428571e-06, |
|
"loss": 0.0005, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 13.07, |
|
"grad_norm": 0.023458700627088547, |
|
"learning_rate": 4.94e-06, |
|
"loss": 0.0004, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 13.22, |
|
"grad_norm": 0.020197845995426178, |
|
"learning_rate": 4.868571428571429e-06, |
|
"loss": 0.0004, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 13.36, |
|
"grad_norm": 0.020962951704859734, |
|
"learning_rate": 4.797142857142857e-06, |
|
"loss": 0.0004, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 13.51, |
|
"grad_norm": 0.024561889469623566, |
|
"learning_rate": 4.725714285714286e-06, |
|
"loss": 0.0004, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 13.65, |
|
"grad_norm": 0.021130310371518135, |
|
"learning_rate": 4.6542857142857145e-06, |
|
"loss": 0.0004, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 13.79, |
|
"grad_norm": 0.01879502646625042, |
|
"learning_rate": 4.5828571428571435e-06, |
|
"loss": 0.0004, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 13.94, |
|
"grad_norm": 0.020792394876480103, |
|
"learning_rate": 4.511428571428572e-06, |
|
"loss": 0.0004, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 14.08, |
|
"grad_norm": 0.015595002099871635, |
|
"learning_rate": 4.440000000000001e-06, |
|
"loss": 0.0004, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 14.22, |
|
"grad_norm": 0.017891034483909607, |
|
"learning_rate": 4.368571428571429e-06, |
|
"loss": 0.0003, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 14.37, |
|
"grad_norm": 0.02275862917304039, |
|
"learning_rate": 4.297142857142858e-06, |
|
"loss": 0.0004, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 14.51, |
|
"grad_norm": 0.019310912117362022, |
|
"learning_rate": 4.225714285714286e-06, |
|
"loss": 0.0004, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 14.66, |
|
"grad_norm": 0.018501998856663704, |
|
"learning_rate": 4.154285714285714e-06, |
|
"loss": 0.0003, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 14.8, |
|
"grad_norm": 0.024563327431678772, |
|
"learning_rate": 4.082857142857143e-06, |
|
"loss": 0.0004, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 14.94, |
|
"grad_norm": 0.015636250376701355, |
|
"learning_rate": 4.011428571428571e-06, |
|
"loss": 0.0003, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 15.09, |
|
"grad_norm": 0.017548007890582085, |
|
"learning_rate": 3.94e-06, |
|
"loss": 0.0003, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 15.23, |
|
"grad_norm": 0.016372185200452805, |
|
"learning_rate": 3.8685714285714286e-06, |
|
"loss": 0.0003, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 15.37, |
|
"grad_norm": 0.0156166460365057, |
|
"learning_rate": 3.7971428571428576e-06, |
|
"loss": 0.0003, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 15.52, |
|
"grad_norm": 0.017448540776968002, |
|
"learning_rate": 3.7257142857142857e-06, |
|
"loss": 0.0003, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 15.66, |
|
"grad_norm": 0.01800905540585518, |
|
"learning_rate": 3.6542857142857148e-06, |
|
"loss": 0.0003, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 15.8, |
|
"grad_norm": 0.01949344575405121, |
|
"learning_rate": 3.582857142857143e-06, |
|
"loss": 0.0003, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 15.95, |
|
"grad_norm": 0.01464366540312767, |
|
"learning_rate": 3.511428571428572e-06, |
|
"loss": 0.0003, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 16.09, |
|
"grad_norm": 0.017990361899137497, |
|
"learning_rate": 3.44e-06, |
|
"loss": 0.0003, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 16.24, |
|
"grad_norm": 0.013816111721098423, |
|
"learning_rate": 3.3685714285714287e-06, |
|
"loss": 0.0003, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 16.38, |
|
"grad_norm": 0.014646317809820175, |
|
"learning_rate": 3.2971428571428577e-06, |
|
"loss": 0.0003, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 16.52, |
|
"grad_norm": 0.014701823703944683, |
|
"learning_rate": 3.225714285714286e-06, |
|
"loss": 0.0003, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 16.67, |
|
"grad_norm": 0.016701674088835716, |
|
"learning_rate": 3.154285714285715e-06, |
|
"loss": 0.0003, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 16.81, |
|
"grad_norm": 0.015475871041417122, |
|
"learning_rate": 3.082857142857143e-06, |
|
"loss": 0.0003, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 16.95, |
|
"grad_norm": 0.013779123313724995, |
|
"learning_rate": 3.0114285714285716e-06, |
|
"loss": 0.0003, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 17.1, |
|
"grad_norm": 0.01210096850991249, |
|
"learning_rate": 2.9400000000000002e-06, |
|
"loss": 0.0003, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 17.24, |
|
"grad_norm": 0.01389345247298479, |
|
"learning_rate": 2.868571428571429e-06, |
|
"loss": 0.0003, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 17.24, |
|
"eval_loss": 0.6930021047592163, |
|
"eval_runtime": 480.9102, |
|
"eval_samples_per_second": 2.572, |
|
"eval_steps_per_second": 0.322, |
|
"eval_wer": 27.601710713894068, |
|
"step": 3000 |
|
} |
|
], |
|
"logging_steps": 25, |
|
"max_steps": 4000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 23, |
|
"save_steps": 1000, |
|
"total_flos": 1.379813374550016e+19, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|