|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 19.9992034411343, |
|
"global_step": 15680, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 11.2012, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.3e-06, |
|
"loss": 11.0313, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.3e-06, |
|
"loss": 9.915, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.3e-06, |
|
"loss": 8.2315, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.2999999999999995e-06, |
|
"loss": 7.1969, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.3e-06, |
|
"loss": 6.5699, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 6.300000000000001e-06, |
|
"loss": 5.9241, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 7.2999999999999996e-06, |
|
"loss": 5.4646, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 8.3e-06, |
|
"loss": 5.1939, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.3e-06, |
|
"loss": 5.0085, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.03e-05, |
|
"loss": 5.5541, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.13e-05, |
|
"loss": 4.9172, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.23e-05, |
|
"loss": 4.6835, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.3300000000000001e-05, |
|
"loss": 4.5962, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.43e-05, |
|
"loss": 4.3211, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.53e-05, |
|
"loss": 4.8684, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.63e-05, |
|
"loss": 4.4981, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.73e-05, |
|
"loss": 4.3235, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.83e-05, |
|
"loss": 4.2043, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.93e-05, |
|
"loss": 3.9079, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.0300000000000002e-05, |
|
"loss": 4.4068, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 2.13e-05, |
|
"loss": 4.0767, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.23e-05, |
|
"loss": 4.0105, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.3300000000000004e-05, |
|
"loss": 3.8937, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.43e-05, |
|
"loss": 3.4475, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.5300000000000002e-05, |
|
"loss": 4.1418, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.6300000000000002e-05, |
|
"loss": 3.8227, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.7300000000000003e-05, |
|
"loss": 3.693, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.83e-05, |
|
"loss": 3.6436, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.93e-05, |
|
"loss": 3.2461, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.03e-05, |
|
"loss": 3.9707, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 3.13e-05, |
|
"loss": 3.6917, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 3.2300000000000006e-05, |
|
"loss": 3.5928, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 3.33e-05, |
|
"loss": 3.5301, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 3.430000000000001e-05, |
|
"loss": 2.9921, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 3.53e-05, |
|
"loss": 3.8641, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 3.63e-05, |
|
"loss": 3.5432, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 3.73e-05, |
|
"loss": 3.487, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 3.83e-05, |
|
"loss": 3.3875, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 3.9300000000000007e-05, |
|
"loss": 2.888, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"eval_loss": 3.7320261001586914, |
|
"eval_runtime": 946.9905, |
|
"eval_samples_per_second": 6.582, |
|
"eval_steps_per_second": 0.824, |
|
"eval_wer": 0.9440106155491065, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.0300000000000004e-05, |
|
"loss": 3.8227, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.13e-05, |
|
"loss": 3.4862, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.23e-05, |
|
"loss": 3.3335, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.33e-05, |
|
"loss": 3.2408, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.43e-05, |
|
"loss": 2.5279, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.53e-05, |
|
"loss": 3.7253, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.630000000000001e-05, |
|
"loss": 3.3742, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.73e-05, |
|
"loss": 3.2955, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.83e-05, |
|
"loss": 3.1341, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.93e-05, |
|
"loss": 2.6154, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.03e-05, |
|
"loss": 3.5928, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.130000000000001e-05, |
|
"loss": 3.338, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.2300000000000004e-05, |
|
"loss": 3.2339, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 5.330000000000001e-05, |
|
"loss": 3.0817, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 5.4300000000000005e-05, |
|
"loss": 2.453, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 5.530000000000001e-05, |
|
"loss": 3.5553, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 5.63e-05, |
|
"loss": 3.248, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 5.73e-05, |
|
"loss": 3.1946, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 5.83e-05, |
|
"loss": 3.0223, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 5.93e-05, |
|
"loss": 2.3511, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 6.03e-05, |
|
"loss": 3.5242, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 6.13e-05, |
|
"loss": 3.1946, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 6.23e-05, |
|
"loss": 3.1166, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 6.330000000000001e-05, |
|
"loss": 2.9555, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 6.43e-05, |
|
"loss": 2.2808, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.53e-05, |
|
"loss": 3.5894, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.630000000000001e-05, |
|
"loss": 3.203, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.730000000000001e-05, |
|
"loss": 3.0484, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.83e-05, |
|
"loss": 2.8733, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.93e-05, |
|
"loss": 2.2043, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 7.03e-05, |
|
"loss": 3.4822, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 7.13e-05, |
|
"loss": 3.131, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 7.23e-05, |
|
"loss": 2.9721, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 7.33e-05, |
|
"loss": 2.8593, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.43e-05, |
|
"loss": 2.2461, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 7.53e-05, |
|
"loss": 3.4536, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 7.630000000000001e-05, |
|
"loss": 3.0393, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 7.730000000000001e-05, |
|
"loss": 2.7416, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 7.83e-05, |
|
"loss": 3.0346, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 7.93e-05, |
|
"loss": 3.1636, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"eval_loss": 2.918823480606079, |
|
"eval_runtime": 1358.8762, |
|
"eval_samples_per_second": 4.587, |
|
"eval_steps_per_second": 0.574, |
|
"eval_wer": 1.1915762944860242, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 8.030000000000001e-05, |
|
"loss": 2.9569, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 8.13e-05, |
|
"loss": 2.7792, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 8.23e-05, |
|
"loss": 2.3876, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 8.33e-05, |
|
"loss": 2.8558, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 8.43e-05, |
|
"loss": 3.059, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 8.53e-05, |
|
"loss": 2.9143, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 8.63e-05, |
|
"loss": 2.7392, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 8.730000000000001e-05, |
|
"loss": 2.2904, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 8.83e-05, |
|
"loss": 2.7131, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 8.93e-05, |
|
"loss": 3.0171, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 9.030000000000001e-05, |
|
"loss": 2.8487, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 9.130000000000001e-05, |
|
"loss": 2.675, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 9.230000000000001e-05, |
|
"loss": 2.1836, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 9.33e-05, |
|
"loss": 2.7424, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 9.43e-05, |
|
"loss": 3.0442, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 9.53e-05, |
|
"loss": 2.8557, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 9.63e-05, |
|
"loss": 2.6003, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 9.730000000000001e-05, |
|
"loss": 2.1095, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 9.83e-05, |
|
"loss": 2.6137, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 9.93e-05, |
|
"loss": 2.9493, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 9.997956403269755e-05, |
|
"loss": 2.8053, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 9.991144414168938e-05, |
|
"loss": 2.5713, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 9.984332425068121e-05, |
|
"loss": 2.0882, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 9.977520435967302e-05, |
|
"loss": 2.6747, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 9.970708446866486e-05, |
|
"loss": 2.9617, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 9.963896457765668e-05, |
|
"loss": 2.7173, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 9.95708446866485e-05, |
|
"loss": 2.5349, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 9.950272479564033e-05, |
|
"loss": 2.1039, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 9.943460490463216e-05, |
|
"loss": 2.5893, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 9.936648501362397e-05, |
|
"loss": 2.8321, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 9.929836512261581e-05, |
|
"loss": 2.6416, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 9.923024523160763e-05, |
|
"loss": 2.4261, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 9.916212534059946e-05, |
|
"loss": 1.9565, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 9.909400544959128e-05, |
|
"loss": 2.5986, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 9.902588555858311e-05, |
|
"loss": 2.7577, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 9.895776566757494e-05, |
|
"loss": 2.5457, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 9.888964577656676e-05, |
|
"loss": 2.2434, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 9.882152588555859e-05, |
|
"loss": 1.6913, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 9.875340599455042e-05, |
|
"loss": 2.6432, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 9.868528610354225e-05, |
|
"loss": 2.773, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"eval_loss": 2.3347249031066895, |
|
"eval_runtime": 1367.3502, |
|
"eval_samples_per_second": 4.558, |
|
"eval_steps_per_second": 0.57, |
|
"eval_wer": 1.013412631739728, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 9.861716621253406e-05, |
|
"loss": 2.3554, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.85490463215259e-05, |
|
"loss": 1.9262, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.848092643051771e-05, |
|
"loss": 1.4834, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.841280653950954e-05, |
|
"loss": 2.4816, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.834468664850137e-05, |
|
"loss": 2.415, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.82765667574932e-05, |
|
"loss": 1.9016, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.820844686648501e-05, |
|
"loss": 1.5463, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.814032697547685e-05, |
|
"loss": 1.2685, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 9.807220708446867e-05, |
|
"loss": 2.2386, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 9.800408719346049e-05, |
|
"loss": 1.9767, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 9.793596730245232e-05, |
|
"loss": 1.5275, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 9.786784741144415e-05, |
|
"loss": 1.2746, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 9.779972752043597e-05, |
|
"loss": 1.1684, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 9.77316076294278e-05, |
|
"loss": 2.0181, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 9.766348773841963e-05, |
|
"loss": 1.6714, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 9.759536784741144e-05, |
|
"loss": 1.2632, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 9.752724795640328e-05, |
|
"loss": 1.1007, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 9.74591280653951e-05, |
|
"loss": 1.0076, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 9.739100817438692e-05, |
|
"loss": 1.8759, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 9.732288828337875e-05, |
|
"loss": 1.4086, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 9.725476839237058e-05, |
|
"loss": 1.1063, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 9.718664850136239e-05, |
|
"loss": 1.0004, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 9.711852861035423e-05, |
|
"loss": 0.9749, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 9.705040871934605e-05, |
|
"loss": 1.5007, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 9.698228882833788e-05, |
|
"loss": 1.2057, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 9.69141689373297e-05, |
|
"loss": 1.0082, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 9.684604904632153e-05, |
|
"loss": 0.891, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 9.677792915531336e-05, |
|
"loss": 0.8808, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 9.670980926430518e-05, |
|
"loss": 1.4966, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 9.664168937329701e-05, |
|
"loss": 1.1203, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 9.657356948228883e-05, |
|
"loss": 0.914, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 9.650544959128067e-05, |
|
"loss": 0.8256, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 9.643732970027248e-05, |
|
"loss": 0.8674, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 9.636920980926431e-05, |
|
"loss": 1.2761, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 9.630108991825613e-05, |
|
"loss": 0.981, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 9.623297002724796e-05, |
|
"loss": 0.8637, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 9.616485013623978e-05, |
|
"loss": 1.0717, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 9.609673024523162e-05, |
|
"loss": 1.0224, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 9.602861035422343e-05, |
|
"loss": 0.76, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 9.596049046321526e-05, |
|
"loss": 0.7198, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"eval_loss": 0.6678358316421509, |
|
"eval_runtime": 1372.0406, |
|
"eval_samples_per_second": 4.543, |
|
"eval_steps_per_second": 0.568, |
|
"eval_wer": 0.4826351764166794, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 9.589237057220708e-05, |
|
"loss": 0.6698, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 9.582425068119891e-05, |
|
"loss": 0.8794, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 9.575613079019074e-05, |
|
"loss": 0.9707, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 9.568801089918257e-05, |
|
"loss": 0.7405, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 9.56198910081744e-05, |
|
"loss": 0.6886, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 9.555177111716622e-05, |
|
"loss": 0.6535, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 9.548365122615805e-05, |
|
"loss": 0.8923, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 9.541553133514986e-05, |
|
"loss": 0.8998, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 9.53474114441417e-05, |
|
"loss": 0.7139, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 9.527929155313352e-05, |
|
"loss": 0.6413, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 9.521117166212534e-05, |
|
"loss": 0.6463, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 9.514305177111717e-05, |
|
"loss": 0.7948, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 9.5074931880109e-05, |
|
"loss": 0.8649, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 9.500681198910081e-05, |
|
"loss": 0.7054, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 9.493869209809265e-05, |
|
"loss": 0.6181, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 9.487057220708447e-05, |
|
"loss": 0.5805, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 9.48024523160763e-05, |
|
"loss": 0.7917, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 9.473433242506812e-05, |
|
"loss": 0.796, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 9.466621253405995e-05, |
|
"loss": 0.6605, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 9.459809264305178e-05, |
|
"loss": 0.5949, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 9.45299727520436e-05, |
|
"loss": 0.603, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 9.446185286103543e-05, |
|
"loss": 0.7413, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 9.439373297002725e-05, |
|
"loss": 0.725, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 9.432561307901909e-05, |
|
"loss": 0.6509, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 9.42574931880109e-05, |
|
"loss": 0.5928, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 9.418937329700273e-05, |
|
"loss": 0.5688, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 9.412125340599455e-05, |
|
"loss": 0.7283, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 9.405313351498638e-05, |
|
"loss": 0.7412, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.39850136239782e-05, |
|
"loss": 0.5876, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.391689373297004e-05, |
|
"loss": 0.5753, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.384877384196185e-05, |
|
"loss": 0.5513, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.378065395095368e-05, |
|
"loss": 0.7226, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 9.37125340599455e-05, |
|
"loss": 0.7162, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 9.364441416893733e-05, |
|
"loss": 0.5622, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 9.357629427792916e-05, |
|
"loss": 0.5364, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 9.350817438692099e-05, |
|
"loss": 0.5314, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 9.344005449591281e-05, |
|
"loss": 0.7133, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 9.337193460490463e-05, |
|
"loss": 0.6619, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 9.330381471389647e-05, |
|
"loss": 0.5651, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 9.323569482288828e-05, |
|
"loss": 0.5255, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"eval_loss": 0.4605100154876709, |
|
"eval_runtime": 1363.3153, |
|
"eval_samples_per_second": 4.572, |
|
"eval_steps_per_second": 0.572, |
|
"eval_wer": 0.4135290972964717, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 9.316757493188011e-05, |
|
"loss": 0.5396, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 9.309945504087194e-05, |
|
"loss": 0.6388, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.303133514986376e-05, |
|
"loss": 0.6487, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.296321525885558e-05, |
|
"loss": 0.5491, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 9.289509536784742e-05, |
|
"loss": 0.5008, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 9.282697547683923e-05, |
|
"loss": 0.5197, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 9.275885558583107e-05, |
|
"loss": 0.6516, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 9.26907356948229e-05, |
|
"loss": 0.6303, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 9.262261580381471e-05, |
|
"loss": 0.5336, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 9.255449591280656e-05, |
|
"loss": 0.4828, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 9.248637602179837e-05, |
|
"loss": 0.5037, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 9.24182561307902e-05, |
|
"loss": 0.6519, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 9.235013623978202e-05, |
|
"loss": 0.6046, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 9.228201634877385e-05, |
|
"loss": 0.5294, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 9.221389645776567e-05, |
|
"loss": 0.4708, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 9.21457765667575e-05, |
|
"loss": 0.5075, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 9.207765667574932e-05, |
|
"loss": 0.5829, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 9.200953678474115e-05, |
|
"loss": 0.5873, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 9.194141689373297e-05, |
|
"loss": 0.4926, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 9.18732970027248e-05, |
|
"loss": 0.4843, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 9.180517711171663e-05, |
|
"loss": 0.4875, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 9.173705722070846e-05, |
|
"loss": 0.6328, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 9.166893732970028e-05, |
|
"loss": 0.5735, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 9.16008174386921e-05, |
|
"loss": 0.4783, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 9.153269754768394e-05, |
|
"loss": 0.4412, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 9.146457765667575e-05, |
|
"loss": 0.5008, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 9.139645776566758e-05, |
|
"loss": 0.5338, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 9.132833787465941e-05, |
|
"loss": 0.5799, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 9.126021798365123e-05, |
|
"loss": 0.4821, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 9.119209809264305e-05, |
|
"loss": 0.4763, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 9.112397820163489e-05, |
|
"loss": 0.4834, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 9.10558583106267e-05, |
|
"loss": 0.5971, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 9.098773841961853e-05, |
|
"loss": 0.531, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 9.091961852861036e-05, |
|
"loss": 0.4768, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 9.085149863760218e-05, |
|
"loss": 0.4849, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 9.078337874659401e-05, |
|
"loss": 0.6024, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 9.071525885558584e-05, |
|
"loss": 0.4439, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 9.064713896457767e-05, |
|
"loss": 0.3636, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 9.057901907356948e-05, |
|
"loss": 0.3485, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 9.051089918256132e-05, |
|
"loss": 0.3961, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"eval_loss": 0.4266389012336731, |
|
"eval_runtime": 1426.8916, |
|
"eval_samples_per_second": 4.368, |
|
"eval_steps_per_second": 0.547, |
|
"eval_wer": 0.3954769360012219, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 9.044277929155313e-05, |
|
"loss": 0.5748, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 9.037465940054496e-05, |
|
"loss": 0.4057, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 9.030653950953679e-05, |
|
"loss": 0.3898, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 9.023841961852862e-05, |
|
"loss": 0.3239, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 9.017029972752043e-05, |
|
"loss": 0.3935, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 9.010217983651227e-05, |
|
"loss": 0.5595, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 9.003405994550409e-05, |
|
"loss": 0.4205, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 8.996594005449591e-05, |
|
"loss": 0.355, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 8.989782016348774e-05, |
|
"loss": 0.3479, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 8.982970027247957e-05, |
|
"loss": 0.4093, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 8.97615803814714e-05, |
|
"loss": 0.542, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 8.969346049046322e-05, |
|
"loss": 0.3946, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 8.962534059945505e-05, |
|
"loss": 0.3697, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 8.955722070844688e-05, |
|
"loss": 0.3516, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 8.94891008174387e-05, |
|
"loss": 0.3849, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 8.942098092643052e-05, |
|
"loss": 0.5427, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 8.935286103542236e-05, |
|
"loss": 0.4053, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 8.928474114441417e-05, |
|
"loss": 0.3635, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 8.9216621253406e-05, |
|
"loss": 0.3638, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 8.914850136239783e-05, |
|
"loss": 0.4078, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 8.908038147138965e-05, |
|
"loss": 0.5507, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 8.901226158038147e-05, |
|
"loss": 0.3924, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 8.894414168937331e-05, |
|
"loss": 0.3673, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 8.887602179836512e-05, |
|
"loss": 0.3392, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 8.880790190735695e-05, |
|
"loss": 0.3742, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 8.873978201634878e-05, |
|
"loss": 0.5251, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 8.86716621253406e-05, |
|
"loss": 0.3981, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 8.860354223433243e-05, |
|
"loss": 0.3673, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 8.853542234332426e-05, |
|
"loss": 0.3505, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 8.846730245231609e-05, |
|
"loss": 0.3614, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 8.83991825613079e-05, |
|
"loss": 0.5592, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 8.833106267029974e-05, |
|
"loss": 0.3909, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 8.826294277929155e-05, |
|
"loss": 0.3616, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 8.819482288828338e-05, |
|
"loss": 0.3378, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 8.812670299727521e-05, |
|
"loss": 0.4051, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 8.805858310626704e-05, |
|
"loss": 0.5289, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 8.799046321525885e-05, |
|
"loss": 0.3972, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 8.792234332425069e-05, |
|
"loss": 0.3723, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 8.78542234332425e-05, |
|
"loss": 0.3565, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 8.778610354223433e-05, |
|
"loss": 0.3424, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"eval_loss": 0.3786042034626007, |
|
"eval_runtime": 1357.3296, |
|
"eval_samples_per_second": 4.592, |
|
"eval_steps_per_second": 0.575, |
|
"eval_wer": 0.3741026424316481, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 8.771798365122616e-05, |
|
"loss": 0.5377, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 8.764986376021799e-05, |
|
"loss": 0.3955, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 8.758174386920981e-05, |
|
"loss": 0.3742, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 8.751362397820164e-05, |
|
"loss": 0.3342, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 8.744550408719347e-05, |
|
"loss": 0.4129, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 8.737738419618528e-05, |
|
"loss": 0.5368, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 8.730926430517712e-05, |
|
"loss": 0.3735, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 8.724114441416894e-05, |
|
"loss": 0.3438, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 8.717302452316076e-05, |
|
"loss": 0.3248, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 8.710490463215259e-05, |
|
"loss": 0.3426, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 8.703678474114442e-05, |
|
"loss": 0.4977, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 8.696866485013623e-05, |
|
"loss": 0.3857, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 8.690054495912807e-05, |
|
"loss": 0.3688, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 8.683242506811989e-05, |
|
"loss": 0.3203, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 8.676430517711172e-05, |
|
"loss": 0.3877, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 8.669618528610354e-05, |
|
"loss": 0.4694, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 8.662806539509537e-05, |
|
"loss": 0.3847, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 8.65599455040872e-05, |
|
"loss": 0.3363, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 8.649182561307902e-05, |
|
"loss": 0.3188, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 8.642370572207085e-05, |
|
"loss": 0.3828, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 8.635558583106268e-05, |
|
"loss": 0.4805, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 8.62874659400545e-05, |
|
"loss": 0.3729, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 8.621934604904632e-05, |
|
"loss": 0.3452, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 8.615122615803816e-05, |
|
"loss": 0.3196, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 8.608310626702997e-05, |
|
"loss": 0.3589, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 8.60149863760218e-05, |
|
"loss": 0.4738, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 8.594686648501363e-05, |
|
"loss": 0.3593, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 8.587874659400546e-05, |
|
"loss": 0.3296, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 8.581062670299727e-05, |
|
"loss": 0.326, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 8.574250681198911e-05, |
|
"loss": 0.3493, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 8.567438692098093e-05, |
|
"loss": 0.422, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 8.560626702997275e-05, |
|
"loss": 0.362, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 8.553814713896458e-05, |
|
"loss": 0.329, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 8.547002724795641e-05, |
|
"loss": 0.4632, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 8.540190735694823e-05, |
|
"loss": 0.3265, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 8.533378746594006e-05, |
|
"loss": 0.2534, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 8.526566757493189e-05, |
|
"loss": 0.2548, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 8.51975476839237e-05, |
|
"loss": 0.2539, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 8.512942779291554e-05, |
|
"loss": 0.4004, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 8.506130790190736e-05, |
|
"loss": 0.3858, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"eval_loss": 0.31610095500946045, |
|
"eval_runtime": 1426.1412, |
|
"eval_samples_per_second": 4.371, |
|
"eval_steps_per_second": 0.547, |
|
"eval_wer": 0.35519130899648693, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 8.499318801089918e-05, |
|
"loss": 0.2862, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 8.492506811989101e-05, |
|
"loss": 0.2404, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 8.485694822888284e-05, |
|
"loss": 0.2461, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 8.478882833787465e-05, |
|
"loss": 0.3903, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 8.47207084468665e-05, |
|
"loss": 0.3463, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 8.465258855585831e-05, |
|
"loss": 0.2812, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 8.458446866485014e-05, |
|
"loss": 0.2613, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 8.451634877384196e-05, |
|
"loss": 0.2712, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 8.444822888283379e-05, |
|
"loss": 0.4027, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 8.438010899182562e-05, |
|
"loss": 0.3261, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 8.431198910081744e-05, |
|
"loss": 0.2902, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 8.424386920980927e-05, |
|
"loss": 0.2529, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 8.417574931880109e-05, |
|
"loss": 0.2809, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 8.410762942779293e-05, |
|
"loss": 0.3984, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 8.403950953678474e-05, |
|
"loss": 0.3262, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 8.397138964577657e-05, |
|
"loss": 0.2792, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 8.39032697547684e-05, |
|
"loss": 0.2571, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 8.383514986376022e-05, |
|
"loss": 0.2702, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 8.376702997275204e-05, |
|
"loss": 0.4027, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 8.369891008174388e-05, |
|
"loss": 0.3302, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 8.363079019073569e-05, |
|
"loss": 0.2765, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 8.356267029972752e-05, |
|
"loss": 0.2596, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 8.349455040871936e-05, |
|
"loss": 0.2913, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 8.342643051771117e-05, |
|
"loss": 0.3909, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 8.335831062670301e-05, |
|
"loss": 0.3138, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 8.329019073569483e-05, |
|
"loss": 0.2773, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 8.322207084468665e-05, |
|
"loss": 0.2594, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 8.315395095367848e-05, |
|
"loss": 0.2463, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 8.308583106267031e-05, |
|
"loss": 0.3788, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 8.301771117166212e-05, |
|
"loss": 0.3399, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 8.294959128065396e-05, |
|
"loss": 0.2706, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 8.288147138964578e-05, |
|
"loss": 0.2421, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 8.28133514986376e-05, |
|
"loss": 0.2637, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 8.274523160762943e-05, |
|
"loss": 0.381, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 8.267711171662126e-05, |
|
"loss": 0.3187, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 8.260899182561309e-05, |
|
"loss": 0.2779, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 8.254087193460491e-05, |
|
"loss": 0.2455, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 8.247275204359674e-05, |
|
"loss": 0.2843, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 8.240463215258856e-05, |
|
"loss": 0.4318, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 8.23365122615804e-05, |
|
"loss": 0.3218, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"eval_loss": 0.3028896749019623, |
|
"eval_runtime": 1356.3055, |
|
"eval_samples_per_second": 4.596, |
|
"eval_steps_per_second": 0.575, |
|
"eval_wer": 0.35104819001069193, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 8.226839237057221e-05, |
|
"loss": 0.2763, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 8.220027247956404e-05, |
|
"loss": 0.2707, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 8.213215258855586e-05, |
|
"loss": 0.2201, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 8.206403269754769e-05, |
|
"loss": 0.3985, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 8.19959128065395e-05, |
|
"loss": 0.3283, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 8.192779291553135e-05, |
|
"loss": 0.2688, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 8.185967302452316e-05, |
|
"loss": 0.2559, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 8.179155313351499e-05, |
|
"loss": 0.2632, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 8.172343324250681e-05, |
|
"loss": 0.381, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 8.165531335149864e-05, |
|
"loss": 0.3152, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 8.158719346049047e-05, |
|
"loss": 0.2753, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 8.15190735694823e-05, |
|
"loss": 0.2612, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 8.145095367847412e-05, |
|
"loss": 0.2563, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 8.138283378746594e-05, |
|
"loss": 0.4019, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 8.131471389645778e-05, |
|
"loss": 0.3191, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 8.124659400544959e-05, |
|
"loss": 0.2542, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 8.117847411444142e-05, |
|
"loss": 0.2379, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 8.111035422343325e-05, |
|
"loss": 0.2729, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 8.104223433242507e-05, |
|
"loss": 0.4223, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 8.097411444141689e-05, |
|
"loss": 0.3398, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 8.090599455040873e-05, |
|
"loss": 0.275, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 8.083787465940054e-05, |
|
"loss": 0.2567, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 8.076975476839237e-05, |
|
"loss": 0.2844, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 8.07016348773842e-05, |
|
"loss": 0.3447, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 8.063351498637602e-05, |
|
"loss": 0.3174, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 8.056539509536785e-05, |
|
"loss": 0.2743, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 8.049727520435968e-05, |
|
"loss": 0.2494, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 8.04291553133515e-05, |
|
"loss": 0.2413, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 8.036103542234332e-05, |
|
"loss": 0.3719, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 8.029291553133516e-05, |
|
"loss": 0.3094, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 8.022479564032698e-05, |
|
"loss": 0.2559, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 8.015667574931882e-05, |
|
"loss": 0.292, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 8.008855585831063e-05, |
|
"loss": 0.3344, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 8.002043596730246e-05, |
|
"loss": 0.2311, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 7.995231607629428e-05, |
|
"loss": 0.1952, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 7.988419618528611e-05, |
|
"loss": 0.1826, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 7.981607629427793e-05, |
|
"loss": 0.2663, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"learning_rate": 7.974795640326977e-05, |
|
"loss": 0.3244, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 7.967983651226158e-05, |
|
"loss": 0.2331, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 7.961171662125341e-05, |
|
"loss": 0.199, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"eval_loss": 0.29879283905029297, |
|
"eval_runtime": 1354.5596, |
|
"eval_samples_per_second": 4.601, |
|
"eval_steps_per_second": 0.576, |
|
"eval_wer": 0.34178822361386896, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"learning_rate": 7.954359673024523e-05, |
|
"loss": 0.1886, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 7.947547683923706e-05, |
|
"loss": 0.2599, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 7.940735694822889e-05, |
|
"loss": 0.3075, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"learning_rate": 7.933923705722072e-05, |
|
"loss": 0.2248, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 7.927111716621254e-05, |
|
"loss": 0.2008, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"learning_rate": 7.920299727520436e-05, |
|
"loss": 0.1997, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 7.91348773841962e-05, |
|
"loss": 0.2697, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 7.906675749318801e-05, |
|
"loss": 0.3081, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"learning_rate": 7.899863760217984e-05, |
|
"loss": 0.2232, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 7.893051771117167e-05, |
|
"loss": 0.2146, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 7.88623978201635e-05, |
|
"loss": 0.1739, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 7.879427792915531e-05, |
|
"loss": 0.2945, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 7.872615803814715e-05, |
|
"loss": 0.3158, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 7.865803814713896e-05, |
|
"loss": 0.243, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"learning_rate": 7.858991825613079e-05, |
|
"loss": 0.2088, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 7.852179836512262e-05, |
|
"loss": 0.1899, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"learning_rate": 7.845367847411444e-05, |
|
"loss": 0.2968, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"learning_rate": 7.838555858310627e-05, |
|
"loss": 0.3202, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 7.83174386920981e-05, |
|
"loss": 0.2636, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 7.824931880108993e-05, |
|
"loss": 0.2132, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"learning_rate": 7.818119891008174e-05, |
|
"loss": 0.1976, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 7.811307901907358e-05, |
|
"loss": 0.2709, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 7.80449591280654e-05, |
|
"loss": 0.3094, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 7.797683923705722e-05, |
|
"loss": 0.2282, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"learning_rate": 7.790871934604905e-05, |
|
"loss": 0.2148, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 7.784059945504088e-05, |
|
"loss": 0.2079, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 7.777247956403269e-05, |
|
"loss": 0.2455, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"learning_rate": 7.770435967302453e-05, |
|
"loss": 0.3046, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 7.763623978201635e-05, |
|
"loss": 0.2257, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 7.756811989100817e-05, |
|
"loss": 0.2027, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 7.75e-05, |
|
"loss": 0.2004, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 7.743188010899183e-05, |
|
"loss": 0.2498, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 7.736376021798365e-05, |
|
"loss": 0.3068, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"learning_rate": 7.729564032697548e-05, |
|
"loss": 0.2259, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 7.722752043596731e-05, |
|
"loss": 0.1961, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 7.715940054495914e-05, |
|
"loss": 0.2078, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"learning_rate": 7.709128065395096e-05, |
|
"loss": 0.256, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 7.702316076294278e-05, |
|
"loss": 0.3219, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 7.695504087193462e-05, |
|
"loss": 0.234, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 7.688692098092643e-05, |
|
"loss": 0.2054, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"eval_loss": 0.2873386740684509, |
|
"eval_runtime": 1362.5508, |
|
"eval_samples_per_second": 4.575, |
|
"eval_steps_per_second": 0.572, |
|
"eval_wer": 0.34335382617992977, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"learning_rate": 7.681880108991826e-05, |
|
"loss": 0.1902, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 7.675068119891009e-05, |
|
"loss": 0.2802, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"learning_rate": 7.668256130790191e-05, |
|
"loss": 0.307, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 7.661444141689373e-05, |
|
"loss": 0.2302, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 7.654632152588557e-05, |
|
"loss": 0.2121, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 7.647820163487738e-05, |
|
"loss": 0.1782, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"learning_rate": 7.641008174386921e-05, |
|
"loss": 0.2857, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"learning_rate": 7.634196185286104e-05, |
|
"loss": 0.305, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 7.627384196185286e-05, |
|
"loss": 0.2318, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 7.620572207084469e-05, |
|
"loss": 0.2035, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 7.613760217983652e-05, |
|
"loss": 0.1914, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 7.606948228882835e-05, |
|
"loss": 0.2506, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 7.600136239782016e-05, |
|
"loss": 0.2988, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"learning_rate": 7.5933242506812e-05, |
|
"loss": 0.2363, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 7.586512261580381e-05, |
|
"loss": 0.2106, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 7.579700272479564e-05, |
|
"loss": 0.1792, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 7.572888283378747e-05, |
|
"loss": 0.2635, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"learning_rate": 7.56607629427793e-05, |
|
"loss": 0.3131, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 7.559264305177111e-05, |
|
"loss": 0.2375, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 7.552452316076295e-05, |
|
"loss": 0.1996, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 7.545640326975477e-05, |
|
"loss": 0.1848, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 7.538828337874659e-05, |
|
"loss": 0.2375, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 7.532016348773842e-05, |
|
"loss": 0.3027, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"learning_rate": 7.525204359673025e-05, |
|
"loss": 0.2376, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"learning_rate": 7.518392370572207e-05, |
|
"loss": 0.2072, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 7.51158038147139e-05, |
|
"loss": 0.2128, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 7.504768392370573e-05, |
|
"loss": 0.3047, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 7.497956403269754e-05, |
|
"loss": 0.2724, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"learning_rate": 7.491144414168938e-05, |
|
"loss": 0.2071, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"learning_rate": 7.48433242506812e-05, |
|
"loss": 0.2074, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 7.477520435967302e-05, |
|
"loss": 0.3043, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"learning_rate": 7.470708446866485e-05, |
|
"loss": 0.2099, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 7.463896457765668e-05, |
|
"loss": 0.1647, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 7.45708446866485e-05, |
|
"loss": 0.1382, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"learning_rate": 7.450272479564033e-05, |
|
"loss": 0.1726, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"learning_rate": 7.443460490463216e-05, |
|
"loss": 0.2836, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"learning_rate": 7.436648501362398e-05, |
|
"loss": 0.2129, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 7.429836512261582e-05, |
|
"loss": 0.1673, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 6.11, |
|
"learning_rate": 7.423024523160763e-05, |
|
"loss": 0.1534, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 7.416212534059946e-05, |
|
"loss": 0.1704, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"eval_loss": 0.31291627883911133, |
|
"eval_runtime": 1357.9405, |
|
"eval_samples_per_second": 4.59, |
|
"eval_steps_per_second": 0.574, |
|
"eval_wer": 0.34319153810905756, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"learning_rate": 7.409400544959128e-05, |
|
"loss": 0.2631, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"learning_rate": 7.402588555858311e-05, |
|
"loss": 0.2144, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 7.395776566757494e-05, |
|
"loss": 0.1788, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 7.388964577656677e-05, |
|
"loss": 0.148, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"learning_rate": 7.382152588555858e-05, |
|
"loss": 0.2017, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"learning_rate": 7.375340599455042e-05, |
|
"loss": 0.302, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 6.21, |
|
"learning_rate": 7.368528610354223e-05, |
|
"loss": 0.2176, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 7.361716621253406e-05, |
|
"loss": 0.1862, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"learning_rate": 7.354904632152589e-05, |
|
"loss": 0.156, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 7.348092643051772e-05, |
|
"loss": 0.1761, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"learning_rate": 7.341280653950954e-05, |
|
"loss": 0.2865, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"learning_rate": 7.334468664850137e-05, |
|
"loss": 0.215, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 7.32765667574932e-05, |
|
"loss": 0.1806, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"learning_rate": 7.320844686648501e-05, |
|
"loss": 0.1613, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"learning_rate": 7.314032697547685e-05, |
|
"loss": 0.1898, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"learning_rate": 7.307220708446867e-05, |
|
"loss": 0.2798, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 7.30040871934605e-05, |
|
"loss": 0.2264, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 6.35, |
|
"learning_rate": 7.293596730245232e-05, |
|
"loss": 0.1772, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"learning_rate": 7.286784741144415e-05, |
|
"loss": 0.1632, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 7.279972752043596e-05, |
|
"loss": 0.1935, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 7.27316076294278e-05, |
|
"loss": 0.2846, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"learning_rate": 7.266348773841962e-05, |
|
"loss": 0.2205, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"learning_rate": 7.259536784741144e-05, |
|
"loss": 0.1869, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"learning_rate": 7.252724795640327e-05, |
|
"loss": 0.1546, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"learning_rate": 7.24591280653951e-05, |
|
"loss": 0.1831, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"learning_rate": 7.239100817438693e-05, |
|
"loss": 0.2718, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"learning_rate": 7.232288828337875e-05, |
|
"loss": 0.2188, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"learning_rate": 7.225476839237058e-05, |
|
"loss": 0.1769, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 7.21866485013624e-05, |
|
"loss": 0.1684, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 7.211852861035424e-05, |
|
"loss": 0.1683, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"learning_rate": 7.205040871934605e-05, |
|
"loss": 0.3011, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"learning_rate": 7.198228882833788e-05, |
|
"loss": 0.2114, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"learning_rate": 7.19141689373297e-05, |
|
"loss": 0.1741, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 7.184604904632153e-05, |
|
"loss": 0.1467, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 6.57, |
|
"learning_rate": 7.177792915531335e-05, |
|
"loss": 0.1705, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 7.170980926430519e-05, |
|
"loss": 0.296, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"learning_rate": 7.1641689373297e-05, |
|
"loss": 0.2137, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"learning_rate": 7.157356948228883e-05, |
|
"loss": 0.1733, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"learning_rate": 7.150544959128065e-05, |
|
"loss": 0.1585, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"learning_rate": 7.143732970027248e-05, |
|
"loss": 0.1805, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"eval_loss": 0.296330988407135, |
|
"eval_runtime": 1357.6829, |
|
"eval_samples_per_second": 4.591, |
|
"eval_steps_per_second": 0.575, |
|
"eval_wer": 0.3412727203299221, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 7.136920980926431e-05, |
|
"loss": 0.2849, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 7.130108991825614e-05, |
|
"loss": 0.2099, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 7.123297002724796e-05, |
|
"loss": 0.1777, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 6.68, |
|
"learning_rate": 7.116485013623978e-05, |
|
"loss": 0.1496, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 7.109673024523162e-05, |
|
"loss": 0.1906, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"learning_rate": 7.102861035422343e-05, |
|
"loss": 0.2884, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"learning_rate": 7.096049046321526e-05, |
|
"loss": 0.2049, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 7.089237057220709e-05, |
|
"loss": 0.1815, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 7.082425068119891e-05, |
|
"loss": 0.156, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"learning_rate": 7.075613079019074e-05, |
|
"loss": 0.1857, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"learning_rate": 7.068801089918257e-05, |
|
"loss": 0.277, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"learning_rate": 7.061989100817438e-05, |
|
"loss": 0.2278, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"learning_rate": 7.055177111716622e-05, |
|
"loss": 0.1667, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 7.048365122615804e-05, |
|
"loss": 0.1474, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"learning_rate": 7.041553133514986e-05, |
|
"loss": 0.164, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"learning_rate": 7.034741144414169e-05, |
|
"loss": 0.2885, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"learning_rate": 7.027929155313352e-05, |
|
"loss": 0.2117, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"learning_rate": 7.021117166212535e-05, |
|
"loss": 0.1828, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"learning_rate": 7.014305177111717e-05, |
|
"loss": 0.1589, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 6.89, |
|
"learning_rate": 7.0074931880109e-05, |
|
"loss": 0.1649, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"learning_rate": 7.000681198910082e-05, |
|
"loss": 0.282, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"learning_rate": 6.993869209809266e-05, |
|
"loss": 0.211, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 6.987057220708447e-05, |
|
"loss": 0.1849, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 6.94, |
|
"learning_rate": 6.98024523160763e-05, |
|
"loss": 0.1615, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 6.95, |
|
"learning_rate": 6.973433242506812e-05, |
|
"loss": 0.1879, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"learning_rate": 6.966621253405995e-05, |
|
"loss": 0.2815, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"learning_rate": 6.959809264305177e-05, |
|
"loss": 0.2039, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"learning_rate": 6.95299727520436e-05, |
|
"loss": 0.1596, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 6.946185286103542e-05, |
|
"loss": 0.2583, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 7.02, |
|
"learning_rate": 6.939373297002725e-05, |
|
"loss": 0.1986, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 6.932561307901907e-05, |
|
"loss": 0.1591, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 7.04, |
|
"learning_rate": 6.92574931880109e-05, |
|
"loss": 0.1369, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 6.918937329700273e-05, |
|
"loss": 0.123, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 7.07, |
|
"learning_rate": 6.912125340599456e-05, |
|
"loss": 0.2453, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"learning_rate": 6.905313351498638e-05, |
|
"loss": 0.1955, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"learning_rate": 6.89850136239782e-05, |
|
"loss": 0.1656, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"learning_rate": 6.891689373297004e-05, |
|
"loss": 0.1371, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"learning_rate": 6.884877384196185e-05, |
|
"loss": 0.1227, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"learning_rate": 6.878065395095368e-05, |
|
"loss": 0.2073, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 7.14, |
|
"learning_rate": 6.871253405994551e-05, |
|
"loss": 0.2091, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 7.14, |
|
"eval_loss": 0.27548345923423767, |
|
"eval_runtime": 1422.2095, |
|
"eval_samples_per_second": 4.383, |
|
"eval_steps_per_second": 0.548, |
|
"eval_wer": 0.3328528333587903, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"learning_rate": 6.864441416893733e-05, |
|
"loss": 0.1649, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 7.17, |
|
"learning_rate": 6.857629427792915e-05, |
|
"loss": 0.1315, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"learning_rate": 6.850817438692099e-05, |
|
"loss": 0.1456, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"learning_rate": 6.84400544959128e-05, |
|
"loss": 0.1979, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 6.837193460490463e-05, |
|
"loss": 0.2183, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 7.22, |
|
"learning_rate": 6.830381471389646e-05, |
|
"loss": 0.1624, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"learning_rate": 6.823569482288828e-05, |
|
"loss": 0.1402, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"learning_rate": 6.816757493188011e-05, |
|
"loss": 0.1308, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 7.26, |
|
"learning_rate": 6.809945504087194e-05, |
|
"loss": 0.2221, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"learning_rate": 6.803133514986377e-05, |
|
"loss": 0.2085, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"learning_rate": 6.796321525885558e-05, |
|
"loss": 0.1621, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"learning_rate": 6.789509536784742e-05, |
|
"loss": 0.1396, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"learning_rate": 6.782697547683924e-05, |
|
"loss": 0.134, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 7.32, |
|
"learning_rate": 6.775885558583108e-05, |
|
"loss": 0.2114, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"learning_rate": 6.769073569482289e-05, |
|
"loss": 0.2041, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 7.35, |
|
"learning_rate": 6.762261580381472e-05, |
|
"loss": 0.1704, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"learning_rate": 6.755449591280654e-05, |
|
"loss": 0.1387, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"learning_rate": 6.748637602179837e-05, |
|
"loss": 0.1347, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"learning_rate": 6.741825613079019e-05, |
|
"loss": 0.2059, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 7.4, |
|
"learning_rate": 6.735013623978203e-05, |
|
"loss": 0.2099, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 7.41, |
|
"learning_rate": 6.728201634877384e-05, |
|
"loss": 0.1647, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"learning_rate": 6.721389645776567e-05, |
|
"loss": 0.1292, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 7.44, |
|
"learning_rate": 6.71457765667575e-05, |
|
"loss": 0.1363, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"learning_rate": 6.707765667574932e-05, |
|
"loss": 0.243, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 7.46, |
|
"learning_rate": 6.700953678474115e-05, |
|
"loss": 0.2221, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"learning_rate": 6.694141689373298e-05, |
|
"loss": 0.1653, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 7.49, |
|
"learning_rate": 6.68732970027248e-05, |
|
"loss": 0.1463, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 6.680517711171662e-05, |
|
"loss": 0.128, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 7.51, |
|
"learning_rate": 6.673705722070846e-05, |
|
"loss": 0.228, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 7.53, |
|
"learning_rate": 6.666893732970027e-05, |
|
"loss": 0.2107, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"learning_rate": 6.66008174386921e-05, |
|
"loss": 0.1571, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"learning_rate": 6.653269754768393e-05, |
|
"loss": 0.1351, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 7.56, |
|
"learning_rate": 6.646457765667575e-05, |
|
"loss": 0.1337, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 7.58, |
|
"learning_rate": 6.639645776566757e-05, |
|
"loss": 0.2023, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"learning_rate": 6.632833787465941e-05, |
|
"loss": 0.2083, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"learning_rate": 6.626021798365122e-05, |
|
"loss": 0.1521, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"learning_rate": 6.619209809264305e-05, |
|
"loss": 0.1219, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"learning_rate": 6.612397820163488e-05, |
|
"loss": 0.1414, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 7.64, |
|
"learning_rate": 6.60558583106267e-05, |
|
"loss": 0.2388, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"learning_rate": 6.598773841961853e-05, |
|
"loss": 0.1971, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"eval_loss": 0.2706403434276581, |
|
"eval_runtime": 1357.2506, |
|
"eval_samples_per_second": 4.592, |
|
"eval_steps_per_second": 0.575, |
|
"eval_wer": 0.33087673743699403, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 7.67, |
|
"learning_rate": 6.591961852861036e-05, |
|
"loss": 0.1618, |
|
"step": 6010 |
|
}, |
|
{ |
|
"epoch": 7.68, |
|
"learning_rate": 6.585149863760219e-05, |
|
"loss": 0.1335, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"learning_rate": 6.5783378746594e-05, |
|
"loss": 0.1397, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 7.7, |
|
"learning_rate": 6.571525885558584e-05, |
|
"loss": 0.2312, |
|
"step": 6040 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"learning_rate": 6.564713896457766e-05, |
|
"loss": 0.2257, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 7.73, |
|
"learning_rate": 6.557901907356948e-05, |
|
"loss": 0.1647, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 7.74, |
|
"learning_rate": 6.551089918256131e-05, |
|
"loss": 0.1399, |
|
"step": 6070 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"learning_rate": 6.544277929155314e-05, |
|
"loss": 0.1251, |
|
"step": 6080 |
|
}, |
|
{ |
|
"epoch": 7.77, |
|
"learning_rate": 6.537465940054495e-05, |
|
"loss": 0.2068, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 7.78, |
|
"learning_rate": 6.530653950953679e-05, |
|
"loss": 0.1983, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"learning_rate": 6.523841961852862e-05, |
|
"loss": 0.1569, |
|
"step": 6110 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 6.517029972752043e-05, |
|
"loss": 0.1373, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 7.82, |
|
"learning_rate": 6.510217983651227e-05, |
|
"loss": 0.126, |
|
"step": 6130 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"learning_rate": 6.503405994550409e-05, |
|
"loss": 0.1909, |
|
"step": 6140 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"learning_rate": 6.496594005449591e-05, |
|
"loss": 0.2118, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 7.86, |
|
"learning_rate": 6.489782016348774e-05, |
|
"loss": 0.1755, |
|
"step": 6160 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"learning_rate": 6.482970027247957e-05, |
|
"loss": 0.1371, |
|
"step": 6170 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 6.476158038147138e-05, |
|
"loss": 0.1302, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 7.89, |
|
"learning_rate": 6.469346049046322e-05, |
|
"loss": 0.2641, |
|
"step": 6190 |
|
}, |
|
{ |
|
"epoch": 7.91, |
|
"learning_rate": 6.462534059945504e-05, |
|
"loss": 0.2094, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"learning_rate": 6.455722070844688e-05, |
|
"loss": 0.1446, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 7.93, |
|
"learning_rate": 6.448910081743869e-05, |
|
"loss": 0.1363, |
|
"step": 6220 |
|
}, |
|
{ |
|
"epoch": 7.95, |
|
"learning_rate": 6.442098092643052e-05, |
|
"loss": 0.1349, |
|
"step": 6230 |
|
}, |
|
{ |
|
"epoch": 7.96, |
|
"learning_rate": 6.435286103542235e-05, |
|
"loss": 0.2243, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 6.428474114441417e-05, |
|
"loss": 0.2035, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 7.98, |
|
"learning_rate": 6.4216621253406e-05, |
|
"loss": 0.1409, |
|
"step": 6260 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"learning_rate": 6.414850136239783e-05, |
|
"loss": 0.1474, |
|
"step": 6270 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"learning_rate": 6.408038147138966e-05, |
|
"loss": 0.2361, |
|
"step": 6280 |
|
}, |
|
{ |
|
"epoch": 8.02, |
|
"learning_rate": 6.401226158038147e-05, |
|
"loss": 0.1616, |
|
"step": 6290 |
|
}, |
|
{ |
|
"epoch": 8.04, |
|
"learning_rate": 6.394414168937331e-05, |
|
"loss": 0.1255, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 8.05, |
|
"learning_rate": 6.387602179836512e-05, |
|
"loss": 0.1027, |
|
"step": 6310 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"learning_rate": 6.380790190735695e-05, |
|
"loss": 0.1261, |
|
"step": 6320 |
|
}, |
|
{ |
|
"epoch": 8.07, |
|
"learning_rate": 6.373978201634878e-05, |
|
"loss": 0.2168, |
|
"step": 6330 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"learning_rate": 6.36716621253406e-05, |
|
"loss": 0.1548, |
|
"step": 6340 |
|
}, |
|
{ |
|
"epoch": 8.1, |
|
"learning_rate": 6.360354223433242e-05, |
|
"loss": 0.1257, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"learning_rate": 6.353542234332426e-05, |
|
"loss": 0.1043, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"learning_rate": 6.346730245231608e-05, |
|
"loss": 0.1502, |
|
"step": 6370 |
|
}, |
|
{ |
|
"epoch": 8.14, |
|
"learning_rate": 6.33991825613079e-05, |
|
"loss": 0.2203, |
|
"step": 6380 |
|
}, |
|
{ |
|
"epoch": 8.15, |
|
"learning_rate": 6.333106267029973e-05, |
|
"loss": 0.1471, |
|
"step": 6390 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"learning_rate": 6.326294277929156e-05, |
|
"loss": 0.1237, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"eval_loss": 0.28225475549697876, |
|
"eval_runtime": 1353.8722, |
|
"eval_samples_per_second": 4.604, |
|
"eval_steps_per_second": 0.576, |
|
"eval_wer": 0.3269627310218421, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 8.18, |
|
"learning_rate": 6.319482288828338e-05, |
|
"loss": 0.0982, |
|
"step": 6410 |
|
}, |
|
{ |
|
"epoch": 8.19, |
|
"learning_rate": 6.312670299727521e-05, |
|
"loss": 0.1334, |
|
"step": 6420 |
|
}, |
|
{ |
|
"epoch": 8.2, |
|
"learning_rate": 6.305858310626704e-05, |
|
"loss": 0.2213, |
|
"step": 6430 |
|
}, |
|
{ |
|
"epoch": 8.21, |
|
"learning_rate": 6.299046321525885e-05, |
|
"loss": 0.1592, |
|
"step": 6440 |
|
}, |
|
{ |
|
"epoch": 8.23, |
|
"learning_rate": 6.29223433242507e-05, |
|
"loss": 0.1368, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"learning_rate": 6.285422343324251e-05, |
|
"loss": 0.1116, |
|
"step": 6460 |
|
}, |
|
{ |
|
"epoch": 8.25, |
|
"learning_rate": 6.278610354223433e-05, |
|
"loss": 0.1464, |
|
"step": 6470 |
|
}, |
|
{ |
|
"epoch": 8.27, |
|
"learning_rate": 6.271798365122616e-05, |
|
"loss": 0.2219, |
|
"step": 6480 |
|
}, |
|
{ |
|
"epoch": 8.28, |
|
"learning_rate": 6.264986376021799e-05, |
|
"loss": 0.1525, |
|
"step": 6490 |
|
}, |
|
{ |
|
"epoch": 8.29, |
|
"learning_rate": 6.25817438692098e-05, |
|
"loss": 0.1158, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 8.3, |
|
"learning_rate": 6.251362397820164e-05, |
|
"loss": 0.1072, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"learning_rate": 6.244550408719346e-05, |
|
"loss": 0.1393, |
|
"step": 6520 |
|
}, |
|
{ |
|
"epoch": 8.33, |
|
"learning_rate": 6.237738419618529e-05, |
|
"loss": 0.2376, |
|
"step": 6530 |
|
}, |
|
{ |
|
"epoch": 8.34, |
|
"learning_rate": 6.230926430517711e-05, |
|
"loss": 0.1616, |
|
"step": 6540 |
|
}, |
|
{ |
|
"epoch": 8.35, |
|
"learning_rate": 6.224114441416894e-05, |
|
"loss": 0.1236, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 8.37, |
|
"learning_rate": 6.217302452316077e-05, |
|
"loss": 0.0995, |
|
"step": 6560 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"learning_rate": 6.21049046321526e-05, |
|
"loss": 0.1579, |
|
"step": 6570 |
|
}, |
|
{ |
|
"epoch": 8.39, |
|
"learning_rate": 6.203678474114442e-05, |
|
"loss": 0.2111, |
|
"step": 6580 |
|
}, |
|
{ |
|
"epoch": 8.41, |
|
"learning_rate": 6.196866485013624e-05, |
|
"loss": 0.1493, |
|
"step": 6590 |
|
}, |
|
{ |
|
"epoch": 8.42, |
|
"learning_rate": 6.190054495912808e-05, |
|
"loss": 0.126, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 8.43, |
|
"learning_rate": 6.183242506811989e-05, |
|
"loss": 0.1424, |
|
"step": 6610 |
|
}, |
|
{ |
|
"epoch": 8.44, |
|
"learning_rate": 6.176430517711172e-05, |
|
"loss": 0.1535, |
|
"step": 6620 |
|
}, |
|
{ |
|
"epoch": 8.46, |
|
"learning_rate": 6.169618528610354e-05, |
|
"loss": 0.2261, |
|
"step": 6630 |
|
}, |
|
{ |
|
"epoch": 8.47, |
|
"learning_rate": 6.162806539509537e-05, |
|
"loss": 0.1551, |
|
"step": 6640 |
|
}, |
|
{ |
|
"epoch": 8.48, |
|
"learning_rate": 6.15599455040872e-05, |
|
"loss": 0.1835, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 8.49, |
|
"learning_rate": 6.149182561307903e-05, |
|
"loss": 0.1058, |
|
"step": 6660 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"learning_rate": 6.142370572207084e-05, |
|
"loss": 0.1685, |
|
"step": 6670 |
|
}, |
|
{ |
|
"epoch": 8.52, |
|
"learning_rate": 6.135558583106268e-05, |
|
"loss": 0.2288, |
|
"step": 6680 |
|
}, |
|
{ |
|
"epoch": 8.53, |
|
"learning_rate": 6.12874659400545e-05, |
|
"loss": 0.1564, |
|
"step": 6690 |
|
}, |
|
{ |
|
"epoch": 8.55, |
|
"learning_rate": 6.121934604904632e-05, |
|
"loss": 0.1162, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 8.56, |
|
"learning_rate": 6.115122615803815e-05, |
|
"loss": 0.1167, |
|
"step": 6710 |
|
}, |
|
{ |
|
"epoch": 8.57, |
|
"learning_rate": 6.108310626702998e-05, |
|
"loss": 0.145, |
|
"step": 6720 |
|
}, |
|
{ |
|
"epoch": 8.58, |
|
"learning_rate": 6.10149863760218e-05, |
|
"loss": 0.231, |
|
"step": 6730 |
|
}, |
|
{ |
|
"epoch": 8.6, |
|
"learning_rate": 6.094686648501363e-05, |
|
"loss": 0.1575, |
|
"step": 6740 |
|
}, |
|
{ |
|
"epoch": 8.61, |
|
"learning_rate": 6.087874659400545e-05, |
|
"loss": 0.1249, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 8.62, |
|
"learning_rate": 6.081062670299727e-05, |
|
"loss": 0.1176, |
|
"step": 6760 |
|
}, |
|
{ |
|
"epoch": 8.63, |
|
"learning_rate": 6.074250681198911e-05, |
|
"loss": 0.1393, |
|
"step": 6770 |
|
}, |
|
{ |
|
"epoch": 8.65, |
|
"learning_rate": 6.067438692098093e-05, |
|
"loss": 0.2434, |
|
"step": 6780 |
|
}, |
|
{ |
|
"epoch": 8.66, |
|
"learning_rate": 6.0606267029972755e-05, |
|
"loss": 0.1537, |
|
"step": 6790 |
|
}, |
|
{ |
|
"epoch": 8.67, |
|
"learning_rate": 6.053814713896458e-05, |
|
"loss": 0.123, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 8.67, |
|
"eval_loss": 0.27536651492118835, |
|
"eval_runtime": 1356.6363, |
|
"eval_samples_per_second": 4.594, |
|
"eval_steps_per_second": 0.575, |
|
"eval_wer": 0.3246143271727509, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 8.69, |
|
"learning_rate": 6.047002724795641e-05, |
|
"loss": 0.1009, |
|
"step": 6810 |
|
}, |
|
{ |
|
"epoch": 8.7, |
|
"learning_rate": 6.040190735694823e-05, |
|
"loss": 0.1458, |
|
"step": 6820 |
|
}, |
|
{ |
|
"epoch": 8.71, |
|
"learning_rate": 6.0333787465940064e-05, |
|
"loss": 0.2283, |
|
"step": 6830 |
|
}, |
|
{ |
|
"epoch": 8.72, |
|
"learning_rate": 6.0265667574931884e-05, |
|
"loss": 0.1539, |
|
"step": 6840 |
|
}, |
|
{ |
|
"epoch": 8.74, |
|
"learning_rate": 6.0197547683923705e-05, |
|
"loss": 0.1197, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"learning_rate": 6.012942779291554e-05, |
|
"loss": 0.1078, |
|
"step": 6860 |
|
}, |
|
{ |
|
"epoch": 8.76, |
|
"learning_rate": 6.006130790190736e-05, |
|
"loss": 0.1415, |
|
"step": 6870 |
|
}, |
|
{ |
|
"epoch": 8.77, |
|
"learning_rate": 5.999318801089918e-05, |
|
"loss": 0.226, |
|
"step": 6880 |
|
}, |
|
{ |
|
"epoch": 8.79, |
|
"learning_rate": 5.9925068119891014e-05, |
|
"loss": 0.146, |
|
"step": 6890 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"learning_rate": 5.9856948228882835e-05, |
|
"loss": 0.1146, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 8.81, |
|
"learning_rate": 5.9788828337874655e-05, |
|
"loss": 0.1093, |
|
"step": 6910 |
|
}, |
|
{ |
|
"epoch": 8.83, |
|
"learning_rate": 5.972070844686649e-05, |
|
"loss": 0.1492, |
|
"step": 6920 |
|
}, |
|
{ |
|
"epoch": 8.84, |
|
"learning_rate": 5.965258855585831e-05, |
|
"loss": 0.2254, |
|
"step": 6930 |
|
}, |
|
{ |
|
"epoch": 8.85, |
|
"learning_rate": 5.958446866485014e-05, |
|
"loss": 0.1561, |
|
"step": 6940 |
|
}, |
|
{ |
|
"epoch": 8.86, |
|
"learning_rate": 5.9516348773841965e-05, |
|
"loss": 0.1232, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 8.88, |
|
"learning_rate": 5.944822888283379e-05, |
|
"loss": 0.1036, |
|
"step": 6960 |
|
}, |
|
{ |
|
"epoch": 8.89, |
|
"learning_rate": 5.938010899182561e-05, |
|
"loss": 0.1354, |
|
"step": 6970 |
|
}, |
|
{ |
|
"epoch": 8.9, |
|
"learning_rate": 5.9311989100817447e-05, |
|
"loss": 0.2194, |
|
"step": 6980 |
|
}, |
|
{ |
|
"epoch": 8.92, |
|
"learning_rate": 5.924386920980927e-05, |
|
"loss": 0.1511, |
|
"step": 6990 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"learning_rate": 5.917574931880109e-05, |
|
"loss": 0.1192, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 8.94, |
|
"learning_rate": 5.910762942779292e-05, |
|
"loss": 0.1051, |
|
"step": 7010 |
|
}, |
|
{ |
|
"epoch": 8.95, |
|
"learning_rate": 5.903950953678474e-05, |
|
"loss": 0.1295, |
|
"step": 7020 |
|
}, |
|
{ |
|
"epoch": 8.97, |
|
"learning_rate": 5.897138964577656e-05, |
|
"loss": 0.2105, |
|
"step": 7030 |
|
}, |
|
{ |
|
"epoch": 8.98, |
|
"learning_rate": 5.89032697547684e-05, |
|
"loss": 0.1368, |
|
"step": 7040 |
|
}, |
|
{ |
|
"epoch": 8.99, |
|
"learning_rate": 5.883514986376022e-05, |
|
"loss": 0.1107, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"learning_rate": 5.876702997275204e-05, |
|
"loss": 0.2054, |
|
"step": 7060 |
|
}, |
|
{ |
|
"epoch": 9.02, |
|
"learning_rate": 5.869891008174387e-05, |
|
"loss": 0.141, |
|
"step": 7070 |
|
}, |
|
{ |
|
"epoch": 9.03, |
|
"learning_rate": 5.863079019073569e-05, |
|
"loss": 0.1179, |
|
"step": 7080 |
|
}, |
|
{ |
|
"epoch": 9.04, |
|
"learning_rate": 5.856267029972752e-05, |
|
"loss": 0.0953, |
|
"step": 7090 |
|
}, |
|
{ |
|
"epoch": 9.06, |
|
"learning_rate": 5.8494550408719354e-05, |
|
"loss": 0.114, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 9.07, |
|
"learning_rate": 5.8426430517711175e-05, |
|
"loss": 0.1698, |
|
"step": 7110 |
|
}, |
|
{ |
|
"epoch": 9.08, |
|
"learning_rate": 5.835831062670301e-05, |
|
"loss": 0.1508, |
|
"step": 7120 |
|
}, |
|
{ |
|
"epoch": 9.09, |
|
"learning_rate": 5.829019073569483e-05, |
|
"loss": 0.1086, |
|
"step": 7130 |
|
}, |
|
{ |
|
"epoch": 9.11, |
|
"learning_rate": 5.822207084468665e-05, |
|
"loss": 0.1121, |
|
"step": 7140 |
|
}, |
|
{ |
|
"epoch": 9.12, |
|
"learning_rate": 5.8153950953678484e-05, |
|
"loss": 0.1243, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 9.13, |
|
"learning_rate": 5.8085831062670304e-05, |
|
"loss": 0.1963, |
|
"step": 7160 |
|
}, |
|
{ |
|
"epoch": 9.15, |
|
"learning_rate": 5.8017711171662125e-05, |
|
"loss": 0.1519, |
|
"step": 7170 |
|
}, |
|
{ |
|
"epoch": 9.16, |
|
"learning_rate": 5.794959128065396e-05, |
|
"loss": 0.1153, |
|
"step": 7180 |
|
}, |
|
{ |
|
"epoch": 9.17, |
|
"learning_rate": 5.788147138964578e-05, |
|
"loss": 0.1292, |
|
"step": 7190 |
|
}, |
|
{ |
|
"epoch": 9.18, |
|
"learning_rate": 5.78133514986376e-05, |
|
"loss": 0.103, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 9.18, |
|
"eval_loss": 0.2917274832725525, |
|
"eval_runtime": 1363.2635, |
|
"eval_samples_per_second": 4.572, |
|
"eval_steps_per_second": 0.572, |
|
"eval_wer": 0.3271536581640446, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 9.2, |
|
"learning_rate": 5.7745231607629434e-05, |
|
"loss": 0.2107, |
|
"step": 7210 |
|
}, |
|
{ |
|
"epoch": 9.21, |
|
"learning_rate": 5.7677111716621255e-05, |
|
"loss": 0.1528, |
|
"step": 7220 |
|
}, |
|
{ |
|
"epoch": 9.22, |
|
"learning_rate": 5.760899182561308e-05, |
|
"loss": 0.1125, |
|
"step": 7230 |
|
}, |
|
{ |
|
"epoch": 9.23, |
|
"learning_rate": 5.754087193460491e-05, |
|
"loss": 0.0891, |
|
"step": 7240 |
|
}, |
|
{ |
|
"epoch": 9.25, |
|
"learning_rate": 5.747275204359674e-05, |
|
"loss": 0.1036, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 9.26, |
|
"learning_rate": 5.740463215258856e-05, |
|
"loss": 0.1822, |
|
"step": 7260 |
|
}, |
|
{ |
|
"epoch": 9.27, |
|
"learning_rate": 5.733651226158039e-05, |
|
"loss": 0.1684, |
|
"step": 7270 |
|
}, |
|
{ |
|
"epoch": 9.29, |
|
"learning_rate": 5.726839237057221e-05, |
|
"loss": 0.1207, |
|
"step": 7280 |
|
}, |
|
{ |
|
"epoch": 9.3, |
|
"learning_rate": 5.720027247956403e-05, |
|
"loss": 0.0943, |
|
"step": 7290 |
|
}, |
|
{ |
|
"epoch": 9.31, |
|
"learning_rate": 5.7132152588555867e-05, |
|
"loss": 0.1049, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 9.32, |
|
"learning_rate": 5.706403269754769e-05, |
|
"loss": 0.179, |
|
"step": 7310 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"learning_rate": 5.699591280653951e-05, |
|
"loss": 0.1544, |
|
"step": 7320 |
|
}, |
|
{ |
|
"epoch": 9.35, |
|
"learning_rate": 5.692779291553134e-05, |
|
"loss": 0.1168, |
|
"step": 7330 |
|
}, |
|
{ |
|
"epoch": 9.36, |
|
"learning_rate": 5.685967302452316e-05, |
|
"loss": 0.091, |
|
"step": 7340 |
|
}, |
|
{ |
|
"epoch": 9.37, |
|
"learning_rate": 5.679155313351498e-05, |
|
"loss": 0.0994, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 9.39, |
|
"learning_rate": 5.672343324250682e-05, |
|
"loss": 0.1756, |
|
"step": 7360 |
|
}, |
|
{ |
|
"epoch": 9.4, |
|
"learning_rate": 5.665531335149864e-05, |
|
"loss": 0.1602, |
|
"step": 7370 |
|
}, |
|
{ |
|
"epoch": 9.41, |
|
"learning_rate": 5.6587193460490465e-05, |
|
"loss": 0.1205, |
|
"step": 7380 |
|
}, |
|
{ |
|
"epoch": 9.43, |
|
"learning_rate": 5.651907356948229e-05, |
|
"loss": 0.0882, |
|
"step": 7390 |
|
}, |
|
{ |
|
"epoch": 9.44, |
|
"learning_rate": 5.645095367847412e-05, |
|
"loss": 0.1134, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 9.45, |
|
"learning_rate": 5.638283378746594e-05, |
|
"loss": 0.1802, |
|
"step": 7410 |
|
}, |
|
{ |
|
"epoch": 9.46, |
|
"learning_rate": 5.6314713896457774e-05, |
|
"loss": 0.1509, |
|
"step": 7420 |
|
}, |
|
{ |
|
"epoch": 9.48, |
|
"learning_rate": 5.6246594005449595e-05, |
|
"loss": 0.1117, |
|
"step": 7430 |
|
}, |
|
{ |
|
"epoch": 9.49, |
|
"learning_rate": 5.6178474114441415e-05, |
|
"loss": 0.1064, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 9.5, |
|
"learning_rate": 5.611035422343325e-05, |
|
"loss": 0.0984, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 9.51, |
|
"learning_rate": 5.604223433242507e-05, |
|
"loss": 0.1623, |
|
"step": 7460 |
|
}, |
|
{ |
|
"epoch": 9.53, |
|
"learning_rate": 5.597411444141689e-05, |
|
"loss": 0.1516, |
|
"step": 7470 |
|
}, |
|
{ |
|
"epoch": 9.54, |
|
"learning_rate": 5.5905994550408724e-05, |
|
"loss": 0.1151, |
|
"step": 7480 |
|
}, |
|
{ |
|
"epoch": 9.55, |
|
"learning_rate": 5.5837874659400545e-05, |
|
"loss": 0.1012, |
|
"step": 7490 |
|
}, |
|
{ |
|
"epoch": 9.57, |
|
"learning_rate": 5.5769754768392365e-05, |
|
"loss": 0.0917, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 9.58, |
|
"learning_rate": 5.57016348773842e-05, |
|
"loss": 0.2109, |
|
"step": 7510 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"learning_rate": 5.563351498637602e-05, |
|
"loss": 0.1456, |
|
"step": 7520 |
|
}, |
|
{ |
|
"epoch": 9.6, |
|
"learning_rate": 5.556539509536785e-05, |
|
"loss": 0.1244, |
|
"step": 7530 |
|
}, |
|
{ |
|
"epoch": 9.62, |
|
"learning_rate": 5.5497275204359675e-05, |
|
"loss": 0.0984, |
|
"step": 7540 |
|
}, |
|
{ |
|
"epoch": 9.63, |
|
"learning_rate": 5.54291553133515e-05, |
|
"loss": 0.0984, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 9.64, |
|
"learning_rate": 5.536103542234332e-05, |
|
"loss": 0.1904, |
|
"step": 7560 |
|
}, |
|
{ |
|
"epoch": 9.66, |
|
"learning_rate": 5.529291553133516e-05, |
|
"loss": 0.1523, |
|
"step": 7570 |
|
}, |
|
{ |
|
"epoch": 9.67, |
|
"learning_rate": 5.522479564032698e-05, |
|
"loss": 0.1162, |
|
"step": 7580 |
|
}, |
|
{ |
|
"epoch": 9.68, |
|
"learning_rate": 5.515667574931881e-05, |
|
"loss": 0.095, |
|
"step": 7590 |
|
}, |
|
{ |
|
"epoch": 9.69, |
|
"learning_rate": 5.508855585831063e-05, |
|
"loss": 0.1143, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 9.69, |
|
"eval_loss": 0.28853729367256165, |
|
"eval_runtime": 1355.1162, |
|
"eval_samples_per_second": 4.6, |
|
"eval_steps_per_second": 0.576, |
|
"eval_wer": 0.33048533679547887, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 9.71, |
|
"learning_rate": 5.502043596730245e-05, |
|
"loss": 0.1862, |
|
"step": 7610 |
|
}, |
|
{ |
|
"epoch": 9.72, |
|
"learning_rate": 5.4952316076294287e-05, |
|
"loss": 0.164, |
|
"step": 7620 |
|
}, |
|
{ |
|
"epoch": 9.73, |
|
"learning_rate": 5.488419618528611e-05, |
|
"loss": 0.1247, |
|
"step": 7630 |
|
}, |
|
{ |
|
"epoch": 9.74, |
|
"learning_rate": 5.481607629427793e-05, |
|
"loss": 0.0893, |
|
"step": 7640 |
|
}, |
|
{ |
|
"epoch": 9.76, |
|
"learning_rate": 5.474795640326976e-05, |
|
"loss": 0.1028, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 9.77, |
|
"learning_rate": 5.467983651226158e-05, |
|
"loss": 0.2013, |
|
"step": 7660 |
|
}, |
|
{ |
|
"epoch": 9.78, |
|
"learning_rate": 5.46117166212534e-05, |
|
"loss": 0.1607, |
|
"step": 7670 |
|
}, |
|
{ |
|
"epoch": 9.8, |
|
"learning_rate": 5.454359673024524e-05, |
|
"loss": 0.1262, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 9.81, |
|
"learning_rate": 5.447547683923706e-05, |
|
"loss": 0.103, |
|
"step": 7690 |
|
}, |
|
{ |
|
"epoch": 9.82, |
|
"learning_rate": 5.4407356948228885e-05, |
|
"loss": 0.098, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 9.83, |
|
"learning_rate": 5.433923705722072e-05, |
|
"loss": 0.2012, |
|
"step": 7710 |
|
}, |
|
{ |
|
"epoch": 9.85, |
|
"learning_rate": 5.427111716621254e-05, |
|
"loss": 0.1486, |
|
"step": 7720 |
|
}, |
|
{ |
|
"epoch": 9.86, |
|
"learning_rate": 5.420299727520436e-05, |
|
"loss": 0.11, |
|
"step": 7730 |
|
}, |
|
{ |
|
"epoch": 9.87, |
|
"learning_rate": 5.4134877384196194e-05, |
|
"loss": 0.0958, |
|
"step": 7740 |
|
}, |
|
{ |
|
"epoch": 9.88, |
|
"learning_rate": 5.4066757493188015e-05, |
|
"loss": 0.1152, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 9.9, |
|
"learning_rate": 5.3998637602179835e-05, |
|
"loss": 0.1947, |
|
"step": 7760 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"learning_rate": 5.393051771117167e-05, |
|
"loss": 0.1528, |
|
"step": 7770 |
|
}, |
|
{ |
|
"epoch": 9.92, |
|
"learning_rate": 5.386239782016349e-05, |
|
"loss": 0.1232, |
|
"step": 7780 |
|
}, |
|
{ |
|
"epoch": 9.94, |
|
"learning_rate": 5.379427792915531e-05, |
|
"loss": 0.0953, |
|
"step": 7790 |
|
}, |
|
{ |
|
"epoch": 9.95, |
|
"learning_rate": 5.3726158038147144e-05, |
|
"loss": 0.1072, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 9.96, |
|
"learning_rate": 5.3658038147138965e-05, |
|
"loss": 0.1809, |
|
"step": 7810 |
|
}, |
|
{ |
|
"epoch": 9.97, |
|
"learning_rate": 5.358991825613079e-05, |
|
"loss": 0.1366, |
|
"step": 7820 |
|
}, |
|
{ |
|
"epoch": 9.99, |
|
"learning_rate": 5.352179836512262e-05, |
|
"loss": 0.1063, |
|
"step": 7830 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 5.345367847411445e-05, |
|
"loss": 0.1365, |
|
"step": 7840 |
|
}, |
|
{ |
|
"epoch": 10.01, |
|
"learning_rate": 5.338555858310627e-05, |
|
"loss": 0.1612, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 10.03, |
|
"learning_rate": 5.33174386920981e-05, |
|
"loss": 0.1143, |
|
"step": 7860 |
|
}, |
|
{ |
|
"epoch": 10.04, |
|
"learning_rate": 5.324931880108992e-05, |
|
"loss": 0.0963, |
|
"step": 7870 |
|
}, |
|
{ |
|
"epoch": 10.05, |
|
"learning_rate": 5.318119891008174e-05, |
|
"loss": 0.085, |
|
"step": 7880 |
|
}, |
|
{ |
|
"epoch": 10.06, |
|
"learning_rate": 5.311307901907358e-05, |
|
"loss": 0.1373, |
|
"step": 7890 |
|
}, |
|
{ |
|
"epoch": 10.08, |
|
"learning_rate": 5.30449591280654e-05, |
|
"loss": 0.168, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 10.09, |
|
"learning_rate": 5.297683923705722e-05, |
|
"loss": 0.1151, |
|
"step": 7910 |
|
}, |
|
{ |
|
"epoch": 10.1, |
|
"learning_rate": 5.290871934604905e-05, |
|
"loss": 0.0903, |
|
"step": 7920 |
|
}, |
|
{ |
|
"epoch": 10.11, |
|
"learning_rate": 5.284059945504087e-05, |
|
"loss": 0.0753, |
|
"step": 7930 |
|
}, |
|
{ |
|
"epoch": 10.13, |
|
"learning_rate": 5.277247956403269e-05, |
|
"loss": 0.1385, |
|
"step": 7940 |
|
}, |
|
{ |
|
"epoch": 10.14, |
|
"learning_rate": 5.270435967302453e-05, |
|
"loss": 0.1558, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 10.15, |
|
"learning_rate": 5.263623978201635e-05, |
|
"loss": 0.1149, |
|
"step": 7960 |
|
}, |
|
{ |
|
"epoch": 10.17, |
|
"learning_rate": 5.2568119891008175e-05, |
|
"loss": 0.0961, |
|
"step": 7970 |
|
}, |
|
{ |
|
"epoch": 10.18, |
|
"learning_rate": 5.25e-05, |
|
"loss": 0.086, |
|
"step": 7980 |
|
}, |
|
{ |
|
"epoch": 10.19, |
|
"learning_rate": 5.243188010899183e-05, |
|
"loss": 0.1316, |
|
"step": 7990 |
|
}, |
|
{ |
|
"epoch": 10.2, |
|
"learning_rate": 5.236376021798365e-05, |
|
"loss": 0.156, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 10.2, |
|
"eval_loss": 0.281043142080307, |
|
"eval_runtime": 1358.69, |
|
"eval_samples_per_second": 4.588, |
|
"eval_steps_per_second": 0.574, |
|
"eval_wer": 0.3288147243012067, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 10.22, |
|
"learning_rate": 5.2295640326975484e-05, |
|
"loss": 0.1093, |
|
"step": 8010 |
|
}, |
|
{ |
|
"epoch": 10.23, |
|
"learning_rate": 5.2227520435967305e-05, |
|
"loss": 0.098, |
|
"step": 8020 |
|
}, |
|
{ |
|
"epoch": 10.24, |
|
"learning_rate": 5.215940054495914e-05, |
|
"loss": 0.071, |
|
"step": 8030 |
|
}, |
|
{ |
|
"epoch": 10.25, |
|
"learning_rate": 5.209128065395096e-05, |
|
"loss": 0.1449, |
|
"step": 8040 |
|
}, |
|
{ |
|
"epoch": 10.27, |
|
"learning_rate": 5.202316076294278e-05, |
|
"loss": 0.1516, |
|
"step": 8050 |
|
}, |
|
{ |
|
"epoch": 10.28, |
|
"learning_rate": 5.1955040871934614e-05, |
|
"loss": 0.1174, |
|
"step": 8060 |
|
}, |
|
{ |
|
"epoch": 10.29, |
|
"learning_rate": 5.1886920980926434e-05, |
|
"loss": 0.0923, |
|
"step": 8070 |
|
}, |
|
{ |
|
"epoch": 10.31, |
|
"learning_rate": 5.1818801089918255e-05, |
|
"loss": 0.0845, |
|
"step": 8080 |
|
}, |
|
{ |
|
"epoch": 10.32, |
|
"learning_rate": 5.175068119891009e-05, |
|
"loss": 0.1245, |
|
"step": 8090 |
|
}, |
|
{ |
|
"epoch": 10.33, |
|
"learning_rate": 5.168256130790191e-05, |
|
"loss": 0.1581, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 10.34, |
|
"learning_rate": 5.161444141689373e-05, |
|
"loss": 0.1186, |
|
"step": 8110 |
|
}, |
|
{ |
|
"epoch": 10.36, |
|
"learning_rate": 5.1546321525885564e-05, |
|
"loss": 0.0911, |
|
"step": 8120 |
|
}, |
|
{ |
|
"epoch": 10.37, |
|
"learning_rate": 5.1478201634877385e-05, |
|
"loss": 0.0854, |
|
"step": 8130 |
|
}, |
|
{ |
|
"epoch": 10.38, |
|
"learning_rate": 5.141008174386921e-05, |
|
"loss": 0.1656, |
|
"step": 8140 |
|
}, |
|
{ |
|
"epoch": 10.4, |
|
"learning_rate": 5.134196185286104e-05, |
|
"loss": 0.1625, |
|
"step": 8150 |
|
}, |
|
{ |
|
"epoch": 10.41, |
|
"learning_rate": 5.127384196185287e-05, |
|
"loss": 0.1175, |
|
"step": 8160 |
|
}, |
|
{ |
|
"epoch": 10.42, |
|
"learning_rate": 5.120572207084469e-05, |
|
"loss": 0.0917, |
|
"step": 8170 |
|
}, |
|
{ |
|
"epoch": 10.43, |
|
"learning_rate": 5.113760217983652e-05, |
|
"loss": 0.0878, |
|
"step": 8180 |
|
}, |
|
{ |
|
"epoch": 10.45, |
|
"learning_rate": 5.106948228882834e-05, |
|
"loss": 0.1276, |
|
"step": 8190 |
|
}, |
|
{ |
|
"epoch": 10.46, |
|
"learning_rate": 5.100136239782016e-05, |
|
"loss": 0.1524, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 10.47, |
|
"learning_rate": 5.0933242506811997e-05, |
|
"loss": 0.1095, |
|
"step": 8210 |
|
}, |
|
{ |
|
"epoch": 10.48, |
|
"learning_rate": 5.086512261580382e-05, |
|
"loss": 0.0993, |
|
"step": 8220 |
|
}, |
|
{ |
|
"epoch": 10.5, |
|
"learning_rate": 5.079700272479564e-05, |
|
"loss": 0.0883, |
|
"step": 8230 |
|
}, |
|
{ |
|
"epoch": 10.51, |
|
"learning_rate": 5.072888283378747e-05, |
|
"loss": 0.1267, |
|
"step": 8240 |
|
}, |
|
{ |
|
"epoch": 10.52, |
|
"learning_rate": 5.066076294277929e-05, |
|
"loss": 0.1568, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 10.54, |
|
"learning_rate": 5.059264305177111e-05, |
|
"loss": 0.1185, |
|
"step": 8260 |
|
}, |
|
{ |
|
"epoch": 10.55, |
|
"learning_rate": 5.052452316076295e-05, |
|
"loss": 0.0867, |
|
"step": 8270 |
|
}, |
|
{ |
|
"epoch": 10.56, |
|
"learning_rate": 5.045640326975477e-05, |
|
"loss": 0.0829, |
|
"step": 8280 |
|
}, |
|
{ |
|
"epoch": 10.57, |
|
"learning_rate": 5.0388283378746595e-05, |
|
"loss": 0.1458, |
|
"step": 8290 |
|
}, |
|
{ |
|
"epoch": 10.59, |
|
"learning_rate": 5.032016348773842e-05, |
|
"loss": 0.1596, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 10.6, |
|
"learning_rate": 5.025204359673025e-05, |
|
"loss": 0.1166, |
|
"step": 8310 |
|
}, |
|
{ |
|
"epoch": 10.61, |
|
"learning_rate": 5.018392370572207e-05, |
|
"loss": 0.092, |
|
"step": 8320 |
|
}, |
|
{ |
|
"epoch": 10.62, |
|
"learning_rate": 5.0115803814713904e-05, |
|
"loss": 0.0755, |
|
"step": 8330 |
|
}, |
|
{ |
|
"epoch": 10.64, |
|
"learning_rate": 5.0047683923705725e-05, |
|
"loss": 0.1201, |
|
"step": 8340 |
|
}, |
|
{ |
|
"epoch": 10.65, |
|
"learning_rate": 4.997956403269755e-05, |
|
"loss": 0.1542, |
|
"step": 8350 |
|
}, |
|
{ |
|
"epoch": 10.66, |
|
"learning_rate": 4.991144414168937e-05, |
|
"loss": 0.11, |
|
"step": 8360 |
|
}, |
|
{ |
|
"epoch": 10.68, |
|
"learning_rate": 4.98433242506812e-05, |
|
"loss": 0.0914, |
|
"step": 8370 |
|
}, |
|
{ |
|
"epoch": 10.69, |
|
"learning_rate": 4.977520435967303e-05, |
|
"loss": 0.0864, |
|
"step": 8380 |
|
}, |
|
{ |
|
"epoch": 10.7, |
|
"learning_rate": 4.9707084468664854e-05, |
|
"loss": 0.1088, |
|
"step": 8390 |
|
}, |
|
{ |
|
"epoch": 10.71, |
|
"learning_rate": 4.9638964577656675e-05, |
|
"loss": 0.167, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 10.71, |
|
"eval_loss": 0.2688770592212677, |
|
"eval_runtime": 1426.2094, |
|
"eval_samples_per_second": 4.37, |
|
"eval_steps_per_second": 0.547, |
|
"eval_wer": 0.3232014663204521, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 10.73, |
|
"learning_rate": 4.95708446866485e-05, |
|
"loss": 0.1123, |
|
"step": 8410 |
|
}, |
|
{ |
|
"epoch": 10.74, |
|
"learning_rate": 4.950272479564033e-05, |
|
"loss": 0.0967, |
|
"step": 8420 |
|
}, |
|
{ |
|
"epoch": 10.75, |
|
"learning_rate": 4.943460490463216e-05, |
|
"loss": 0.0923, |
|
"step": 8430 |
|
}, |
|
{ |
|
"epoch": 10.76, |
|
"learning_rate": 4.9366485013623984e-05, |
|
"loss": 0.1188, |
|
"step": 8440 |
|
}, |
|
{ |
|
"epoch": 10.78, |
|
"learning_rate": 4.929836512261581e-05, |
|
"loss": 0.162, |
|
"step": 8450 |
|
}, |
|
{ |
|
"epoch": 10.79, |
|
"learning_rate": 4.923024523160763e-05, |
|
"loss": 0.1141, |
|
"step": 8460 |
|
}, |
|
{ |
|
"epoch": 10.8, |
|
"learning_rate": 4.916212534059946e-05, |
|
"loss": 0.0887, |
|
"step": 8470 |
|
}, |
|
{ |
|
"epoch": 10.82, |
|
"learning_rate": 4.909400544959129e-05, |
|
"loss": 0.0782, |
|
"step": 8480 |
|
}, |
|
{ |
|
"epoch": 10.83, |
|
"learning_rate": 4.902588555858311e-05, |
|
"loss": 0.1172, |
|
"step": 8490 |
|
}, |
|
{ |
|
"epoch": 10.84, |
|
"learning_rate": 4.8957765667574935e-05, |
|
"loss": 0.1609, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 10.85, |
|
"learning_rate": 4.888964577656676e-05, |
|
"loss": 0.1158, |
|
"step": 8510 |
|
}, |
|
{ |
|
"epoch": 10.87, |
|
"learning_rate": 4.882152588555858e-05, |
|
"loss": 0.0916, |
|
"step": 8520 |
|
}, |
|
{ |
|
"epoch": 10.88, |
|
"learning_rate": 4.875340599455041e-05, |
|
"loss": 0.0741, |
|
"step": 8530 |
|
}, |
|
{ |
|
"epoch": 10.89, |
|
"learning_rate": 4.868528610354224e-05, |
|
"loss": 0.1279, |
|
"step": 8540 |
|
}, |
|
{ |
|
"epoch": 10.9, |
|
"learning_rate": 4.861716621253406e-05, |
|
"loss": 0.1497, |
|
"step": 8550 |
|
}, |
|
{ |
|
"epoch": 10.92, |
|
"learning_rate": 4.8549046321525885e-05, |
|
"loss": 0.1137, |
|
"step": 8560 |
|
}, |
|
{ |
|
"epoch": 10.93, |
|
"learning_rate": 4.848092643051771e-05, |
|
"loss": 0.0854, |
|
"step": 8570 |
|
}, |
|
{ |
|
"epoch": 10.94, |
|
"learning_rate": 4.841280653950954e-05, |
|
"loss": 0.0817, |
|
"step": 8580 |
|
}, |
|
{ |
|
"epoch": 10.96, |
|
"learning_rate": 4.834468664850137e-05, |
|
"loss": 0.1257, |
|
"step": 8590 |
|
}, |
|
{ |
|
"epoch": 10.97, |
|
"learning_rate": 4.8276566757493194e-05, |
|
"loss": 0.1407, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 10.98, |
|
"learning_rate": 4.820844686648502e-05, |
|
"loss": 0.094, |
|
"step": 8610 |
|
}, |
|
{ |
|
"epoch": 10.99, |
|
"learning_rate": 4.814032697547684e-05, |
|
"loss": 0.086, |
|
"step": 8620 |
|
}, |
|
{ |
|
"epoch": 11.01, |
|
"learning_rate": 4.807220708446867e-05, |
|
"loss": 0.1743, |
|
"step": 8630 |
|
}, |
|
{ |
|
"epoch": 11.02, |
|
"learning_rate": 4.80040871934605e-05, |
|
"loss": 0.1049, |
|
"step": 8640 |
|
}, |
|
{ |
|
"epoch": 11.03, |
|
"learning_rate": 4.793596730245232e-05, |
|
"loss": 0.0851, |
|
"step": 8650 |
|
}, |
|
{ |
|
"epoch": 11.05, |
|
"learning_rate": 4.7867847411444145e-05, |
|
"loss": 0.0711, |
|
"step": 8660 |
|
}, |
|
{ |
|
"epoch": 11.06, |
|
"learning_rate": 4.779972752043597e-05, |
|
"loss": 0.0907, |
|
"step": 8670 |
|
}, |
|
{ |
|
"epoch": 11.07, |
|
"learning_rate": 4.773160762942779e-05, |
|
"loss": 0.1484, |
|
"step": 8680 |
|
}, |
|
{ |
|
"epoch": 11.08, |
|
"learning_rate": 4.766348773841962e-05, |
|
"loss": 0.1114, |
|
"step": 8690 |
|
}, |
|
{ |
|
"epoch": 11.1, |
|
"learning_rate": 4.759536784741145e-05, |
|
"loss": 0.0901, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 11.11, |
|
"learning_rate": 4.752724795640327e-05, |
|
"loss": 0.0707, |
|
"step": 8710 |
|
}, |
|
{ |
|
"epoch": 11.12, |
|
"learning_rate": 4.7459128065395095e-05, |
|
"loss": 0.0971, |
|
"step": 8720 |
|
}, |
|
{ |
|
"epoch": 11.14, |
|
"learning_rate": 4.739100817438692e-05, |
|
"loss": 0.1691, |
|
"step": 8730 |
|
}, |
|
{ |
|
"epoch": 11.15, |
|
"learning_rate": 4.732288828337875e-05, |
|
"loss": 0.1155, |
|
"step": 8740 |
|
}, |
|
{ |
|
"epoch": 11.16, |
|
"learning_rate": 4.725476839237058e-05, |
|
"loss": 0.0854, |
|
"step": 8750 |
|
}, |
|
{ |
|
"epoch": 11.17, |
|
"learning_rate": 4.7186648501362404e-05, |
|
"loss": 0.0676, |
|
"step": 8760 |
|
}, |
|
{ |
|
"epoch": 11.19, |
|
"learning_rate": 4.7118528610354225e-05, |
|
"loss": 0.0866, |
|
"step": 8770 |
|
}, |
|
{ |
|
"epoch": 11.2, |
|
"learning_rate": 4.705040871934605e-05, |
|
"loss": 0.1466, |
|
"step": 8780 |
|
}, |
|
{ |
|
"epoch": 11.21, |
|
"learning_rate": 4.698228882833788e-05, |
|
"loss": 0.1183, |
|
"step": 8790 |
|
}, |
|
{ |
|
"epoch": 11.22, |
|
"learning_rate": 4.69141689373297e-05, |
|
"loss": 0.0815, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 11.22, |
|
"eval_loss": 0.28985264897346497, |
|
"eval_runtime": 1359.0731, |
|
"eval_samples_per_second": 4.586, |
|
"eval_steps_per_second": 0.574, |
|
"eval_wer": 0.32356422789063694, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 11.24, |
|
"learning_rate": 4.684604904632153e-05, |
|
"loss": 0.0679, |
|
"step": 8810 |
|
}, |
|
{ |
|
"epoch": 11.25, |
|
"learning_rate": 4.6777929155313355e-05, |
|
"loss": 0.0858, |
|
"step": 8820 |
|
}, |
|
{ |
|
"epoch": 11.26, |
|
"learning_rate": 4.670980926430518e-05, |
|
"loss": 0.14, |
|
"step": 8830 |
|
}, |
|
{ |
|
"epoch": 11.28, |
|
"learning_rate": 4.6641689373297e-05, |
|
"loss": 0.1108, |
|
"step": 8840 |
|
}, |
|
{ |
|
"epoch": 11.29, |
|
"learning_rate": 4.657356948228883e-05, |
|
"loss": 0.0885, |
|
"step": 8850 |
|
}, |
|
{ |
|
"epoch": 11.3, |
|
"learning_rate": 4.650544959128066e-05, |
|
"loss": 0.0676, |
|
"step": 8860 |
|
}, |
|
{ |
|
"epoch": 11.31, |
|
"learning_rate": 4.643732970027248e-05, |
|
"loss": 0.0892, |
|
"step": 8870 |
|
}, |
|
{ |
|
"epoch": 11.33, |
|
"learning_rate": 4.6369209809264305e-05, |
|
"loss": 0.1677, |
|
"step": 8880 |
|
}, |
|
{ |
|
"epoch": 11.34, |
|
"learning_rate": 4.630108991825613e-05, |
|
"loss": 0.1307, |
|
"step": 8890 |
|
}, |
|
{ |
|
"epoch": 11.35, |
|
"learning_rate": 4.623297002724796e-05, |
|
"loss": 0.1091, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 11.36, |
|
"learning_rate": 4.616485013623979e-05, |
|
"loss": 0.0746, |
|
"step": 8910 |
|
}, |
|
{ |
|
"epoch": 11.38, |
|
"learning_rate": 4.6096730245231614e-05, |
|
"loss": 0.0818, |
|
"step": 8920 |
|
}, |
|
{ |
|
"epoch": 11.39, |
|
"learning_rate": 4.6028610354223435e-05, |
|
"loss": 0.147, |
|
"step": 8930 |
|
}, |
|
{ |
|
"epoch": 11.4, |
|
"learning_rate": 4.596049046321526e-05, |
|
"loss": 0.1256, |
|
"step": 8940 |
|
}, |
|
{ |
|
"epoch": 11.42, |
|
"learning_rate": 4.589237057220709e-05, |
|
"loss": 0.0888, |
|
"step": 8950 |
|
}, |
|
{ |
|
"epoch": 11.43, |
|
"learning_rate": 4.582425068119891e-05, |
|
"loss": 0.071, |
|
"step": 8960 |
|
}, |
|
{ |
|
"epoch": 11.44, |
|
"learning_rate": 4.575613079019074e-05, |
|
"loss": 0.0862, |
|
"step": 8970 |
|
}, |
|
{ |
|
"epoch": 11.45, |
|
"learning_rate": 4.5688010899182565e-05, |
|
"loss": 0.1401, |
|
"step": 8980 |
|
}, |
|
{ |
|
"epoch": 11.47, |
|
"learning_rate": 4.5619891008174385e-05, |
|
"loss": 0.1185, |
|
"step": 8990 |
|
}, |
|
{ |
|
"epoch": 11.48, |
|
"learning_rate": 4.555177111716621e-05, |
|
"loss": 0.0934, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 11.49, |
|
"learning_rate": 4.548365122615804e-05, |
|
"loss": 0.0785, |
|
"step": 9010 |
|
}, |
|
{ |
|
"epoch": 11.5, |
|
"learning_rate": 4.541553133514986e-05, |
|
"loss": 0.0778, |
|
"step": 9020 |
|
}, |
|
{ |
|
"epoch": 11.52, |
|
"learning_rate": 4.534741144414169e-05, |
|
"loss": 0.1465, |
|
"step": 9030 |
|
}, |
|
{ |
|
"epoch": 11.53, |
|
"learning_rate": 4.5279291553133515e-05, |
|
"loss": 0.1161, |
|
"step": 9040 |
|
}, |
|
{ |
|
"epoch": 11.54, |
|
"learning_rate": 4.521117166212534e-05, |
|
"loss": 0.0896, |
|
"step": 9050 |
|
}, |
|
{ |
|
"epoch": 11.56, |
|
"learning_rate": 4.514305177111717e-05, |
|
"loss": 0.0755, |
|
"step": 9060 |
|
}, |
|
{ |
|
"epoch": 11.57, |
|
"learning_rate": 4.5074931880109e-05, |
|
"loss": 0.0906, |
|
"step": 9070 |
|
}, |
|
{ |
|
"epoch": 11.58, |
|
"learning_rate": 4.5006811989100824e-05, |
|
"loss": 0.1451, |
|
"step": 9080 |
|
}, |
|
{ |
|
"epoch": 11.59, |
|
"learning_rate": 4.4938692098092645e-05, |
|
"loss": 0.1147, |
|
"step": 9090 |
|
}, |
|
{ |
|
"epoch": 11.61, |
|
"learning_rate": 4.487057220708447e-05, |
|
"loss": 0.0807, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 11.62, |
|
"learning_rate": 4.48024523160763e-05, |
|
"loss": 0.0787, |
|
"step": 9110 |
|
}, |
|
{ |
|
"epoch": 11.63, |
|
"learning_rate": 4.473433242506812e-05, |
|
"loss": 0.0931, |
|
"step": 9120 |
|
}, |
|
{ |
|
"epoch": 11.64, |
|
"learning_rate": 4.466621253405995e-05, |
|
"loss": 0.1479, |
|
"step": 9130 |
|
}, |
|
{ |
|
"epoch": 11.66, |
|
"learning_rate": 4.4598092643051775e-05, |
|
"loss": 0.1033, |
|
"step": 9140 |
|
}, |
|
{ |
|
"epoch": 11.67, |
|
"learning_rate": 4.4529972752043595e-05, |
|
"loss": 0.0866, |
|
"step": 9150 |
|
}, |
|
{ |
|
"epoch": 11.68, |
|
"learning_rate": 4.446185286103542e-05, |
|
"loss": 0.0759, |
|
"step": 9160 |
|
}, |
|
{ |
|
"epoch": 11.7, |
|
"learning_rate": 4.439373297002725e-05, |
|
"loss": 0.1024, |
|
"step": 9170 |
|
}, |
|
{ |
|
"epoch": 11.71, |
|
"learning_rate": 4.432561307901908e-05, |
|
"loss": 0.1796, |
|
"step": 9180 |
|
}, |
|
{ |
|
"epoch": 11.72, |
|
"learning_rate": 4.4257493188010904e-05, |
|
"loss": 0.1174, |
|
"step": 9190 |
|
}, |
|
{ |
|
"epoch": 11.73, |
|
"learning_rate": 4.418937329700273e-05, |
|
"loss": 0.0844, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 11.73, |
|
"eval_loss": 0.2798416316509247, |
|
"eval_runtime": 1354.4726, |
|
"eval_samples_per_second": 4.602, |
|
"eval_steps_per_second": 0.576, |
|
"eval_wer": 0.3224854895371926, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 11.75, |
|
"learning_rate": 4.412125340599455e-05, |
|
"loss": 0.0712, |
|
"step": 9210 |
|
}, |
|
{ |
|
"epoch": 11.76, |
|
"learning_rate": 4.405313351498638e-05, |
|
"loss": 0.0797, |
|
"step": 9220 |
|
}, |
|
{ |
|
"epoch": 11.77, |
|
"learning_rate": 4.398501362397821e-05, |
|
"loss": 0.1652, |
|
"step": 9230 |
|
}, |
|
{ |
|
"epoch": 11.79, |
|
"learning_rate": 4.391689373297003e-05, |
|
"loss": 0.1123, |
|
"step": 9240 |
|
}, |
|
{ |
|
"epoch": 11.8, |
|
"learning_rate": 4.3848773841961855e-05, |
|
"loss": 0.0905, |
|
"step": 9250 |
|
}, |
|
{ |
|
"epoch": 11.81, |
|
"learning_rate": 4.378065395095368e-05, |
|
"loss": 0.0705, |
|
"step": 9260 |
|
}, |
|
{ |
|
"epoch": 11.82, |
|
"learning_rate": 4.37125340599455e-05, |
|
"loss": 0.083, |
|
"step": 9270 |
|
}, |
|
{ |
|
"epoch": 11.84, |
|
"learning_rate": 4.364441416893733e-05, |
|
"loss": 0.159, |
|
"step": 9280 |
|
}, |
|
{ |
|
"epoch": 11.85, |
|
"learning_rate": 4.357629427792916e-05, |
|
"loss": 0.114, |
|
"step": 9290 |
|
}, |
|
{ |
|
"epoch": 11.86, |
|
"learning_rate": 4.3508174386920985e-05, |
|
"loss": 0.0832, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 11.87, |
|
"learning_rate": 4.3440054495912805e-05, |
|
"loss": 0.0679, |
|
"step": 9310 |
|
}, |
|
{ |
|
"epoch": 11.89, |
|
"learning_rate": 4.337193460490463e-05, |
|
"loss": 0.0839, |
|
"step": 9320 |
|
}, |
|
{ |
|
"epoch": 11.9, |
|
"learning_rate": 4.330381471389646e-05, |
|
"loss": 0.1515, |
|
"step": 9330 |
|
}, |
|
{ |
|
"epoch": 11.91, |
|
"learning_rate": 4.323569482288829e-05, |
|
"loss": 0.1111, |
|
"step": 9340 |
|
}, |
|
{ |
|
"epoch": 11.93, |
|
"learning_rate": 4.3167574931880114e-05, |
|
"loss": 0.0872, |
|
"step": 9350 |
|
}, |
|
{ |
|
"epoch": 11.94, |
|
"learning_rate": 4.309945504087194e-05, |
|
"loss": 0.0759, |
|
"step": 9360 |
|
}, |
|
{ |
|
"epoch": 11.95, |
|
"learning_rate": 4.303133514986376e-05, |
|
"loss": 0.087, |
|
"step": 9370 |
|
}, |
|
{ |
|
"epoch": 11.96, |
|
"learning_rate": 4.296321525885559e-05, |
|
"loss": 0.1595, |
|
"step": 9380 |
|
}, |
|
{ |
|
"epoch": 11.98, |
|
"learning_rate": 4.289509536784742e-05, |
|
"loss": 0.0986, |
|
"step": 9390 |
|
}, |
|
{ |
|
"epoch": 11.99, |
|
"learning_rate": 4.282697547683924e-05, |
|
"loss": 0.0768, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"learning_rate": 4.2758855585831065e-05, |
|
"loss": 0.1419, |
|
"step": 9410 |
|
}, |
|
{ |
|
"epoch": 12.02, |
|
"learning_rate": 4.269073569482289e-05, |
|
"loss": 0.1068, |
|
"step": 9420 |
|
}, |
|
{ |
|
"epoch": 12.03, |
|
"learning_rate": 4.262261580381471e-05, |
|
"loss": 0.0839, |
|
"step": 9430 |
|
}, |
|
{ |
|
"epoch": 12.04, |
|
"learning_rate": 4.255449591280654e-05, |
|
"loss": 0.0717, |
|
"step": 9440 |
|
}, |
|
{ |
|
"epoch": 12.05, |
|
"learning_rate": 4.248637602179837e-05, |
|
"loss": 0.0673, |
|
"step": 9450 |
|
}, |
|
{ |
|
"epoch": 12.07, |
|
"learning_rate": 4.241825613079019e-05, |
|
"loss": 0.1271, |
|
"step": 9460 |
|
}, |
|
{ |
|
"epoch": 12.08, |
|
"learning_rate": 4.2350136239782015e-05, |
|
"loss": 0.116, |
|
"step": 9470 |
|
}, |
|
{ |
|
"epoch": 12.09, |
|
"learning_rate": 4.228201634877384e-05, |
|
"loss": 0.0861, |
|
"step": 9480 |
|
}, |
|
{ |
|
"epoch": 12.1, |
|
"learning_rate": 4.221389645776567e-05, |
|
"loss": 0.0669, |
|
"step": 9490 |
|
}, |
|
{ |
|
"epoch": 12.12, |
|
"learning_rate": 4.21457765667575e-05, |
|
"loss": 0.0641, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 12.13, |
|
"learning_rate": 4.2077656675749324e-05, |
|
"loss": 0.1306, |
|
"step": 9510 |
|
}, |
|
{ |
|
"epoch": 12.14, |
|
"learning_rate": 4.200953678474115e-05, |
|
"loss": 0.1095, |
|
"step": 9520 |
|
}, |
|
{ |
|
"epoch": 12.16, |
|
"learning_rate": 4.194141689373297e-05, |
|
"loss": 0.0887, |
|
"step": 9530 |
|
}, |
|
{ |
|
"epoch": 12.17, |
|
"learning_rate": 4.18732970027248e-05, |
|
"loss": 0.0654, |
|
"step": 9540 |
|
}, |
|
{ |
|
"epoch": 12.18, |
|
"learning_rate": 4.180517711171663e-05, |
|
"loss": 0.078, |
|
"step": 9550 |
|
}, |
|
{ |
|
"epoch": 12.19, |
|
"learning_rate": 4.173705722070845e-05, |
|
"loss": 0.1252, |
|
"step": 9560 |
|
}, |
|
{ |
|
"epoch": 12.21, |
|
"learning_rate": 4.1668937329700275e-05, |
|
"loss": 0.1113, |
|
"step": 9570 |
|
}, |
|
{ |
|
"epoch": 12.22, |
|
"learning_rate": 4.16008174386921e-05, |
|
"loss": 0.0859, |
|
"step": 9580 |
|
}, |
|
{ |
|
"epoch": 12.23, |
|
"learning_rate": 4.153269754768392e-05, |
|
"loss": 0.0796, |
|
"step": 9590 |
|
}, |
|
{ |
|
"epoch": 12.24, |
|
"learning_rate": 4.146457765667575e-05, |
|
"loss": 0.0775, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 12.24, |
|
"eval_loss": 0.2894273102283478, |
|
"eval_runtime": 1358.164, |
|
"eval_samples_per_second": 4.589, |
|
"eval_steps_per_second": 0.574, |
|
"eval_wer": 0.3224186650374217, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 12.26, |
|
"learning_rate": 4.139645776566758e-05, |
|
"loss": 0.1213, |
|
"step": 9610 |
|
}, |
|
{ |
|
"epoch": 12.27, |
|
"learning_rate": 4.13283378746594e-05, |
|
"loss": 0.1165, |
|
"step": 9620 |
|
}, |
|
{ |
|
"epoch": 12.28, |
|
"learning_rate": 4.1260217983651225e-05, |
|
"loss": 0.0892, |
|
"step": 9630 |
|
}, |
|
{ |
|
"epoch": 12.3, |
|
"learning_rate": 4.119209809264305e-05, |
|
"loss": 0.069, |
|
"step": 9640 |
|
}, |
|
{ |
|
"epoch": 12.31, |
|
"learning_rate": 4.112397820163488e-05, |
|
"loss": 0.0692, |
|
"step": 9650 |
|
}, |
|
{ |
|
"epoch": 12.32, |
|
"learning_rate": 4.105585831062671e-05, |
|
"loss": 0.121, |
|
"step": 9660 |
|
}, |
|
{ |
|
"epoch": 12.33, |
|
"learning_rate": 4.0987738419618534e-05, |
|
"loss": 0.1186, |
|
"step": 9670 |
|
}, |
|
{ |
|
"epoch": 12.35, |
|
"learning_rate": 4.0919618528610355e-05, |
|
"loss": 0.082, |
|
"step": 9680 |
|
}, |
|
{ |
|
"epoch": 12.36, |
|
"learning_rate": 4.085149863760218e-05, |
|
"loss": 0.0721, |
|
"step": 9690 |
|
}, |
|
{ |
|
"epoch": 12.37, |
|
"learning_rate": 4.078337874659401e-05, |
|
"loss": 0.0555, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 12.38, |
|
"learning_rate": 4.071525885558583e-05, |
|
"loss": 0.1086, |
|
"step": 9710 |
|
}, |
|
{ |
|
"epoch": 12.4, |
|
"learning_rate": 4.064713896457766e-05, |
|
"loss": 0.1175, |
|
"step": 9720 |
|
}, |
|
{ |
|
"epoch": 12.41, |
|
"learning_rate": 4.0579019073569485e-05, |
|
"loss": 0.0856, |
|
"step": 9730 |
|
}, |
|
{ |
|
"epoch": 12.42, |
|
"learning_rate": 4.0510899182561305e-05, |
|
"loss": 0.0777, |
|
"step": 9740 |
|
}, |
|
{ |
|
"epoch": 12.44, |
|
"learning_rate": 4.044277929155313e-05, |
|
"loss": 0.0705, |
|
"step": 9750 |
|
}, |
|
{ |
|
"epoch": 12.45, |
|
"learning_rate": 4.037465940054496e-05, |
|
"loss": 0.1141, |
|
"step": 9760 |
|
}, |
|
{ |
|
"epoch": 12.46, |
|
"learning_rate": 4.030653950953679e-05, |
|
"loss": 0.1149, |
|
"step": 9770 |
|
}, |
|
{ |
|
"epoch": 12.47, |
|
"learning_rate": 4.0238419618528614e-05, |
|
"loss": 0.0973, |
|
"step": 9780 |
|
}, |
|
{ |
|
"epoch": 12.49, |
|
"learning_rate": 4.017029972752044e-05, |
|
"loss": 0.071, |
|
"step": 9790 |
|
}, |
|
{ |
|
"epoch": 12.5, |
|
"learning_rate": 4.010217983651227e-05, |
|
"loss": 0.0653, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 12.51, |
|
"learning_rate": 4.003405994550409e-05, |
|
"loss": 0.1146, |
|
"step": 9810 |
|
}, |
|
{ |
|
"epoch": 12.53, |
|
"learning_rate": 3.996594005449592e-05, |
|
"loss": 0.1125, |
|
"step": 9820 |
|
}, |
|
{ |
|
"epoch": 12.54, |
|
"learning_rate": 3.9897820163487744e-05, |
|
"loss": 0.0917, |
|
"step": 9830 |
|
}, |
|
{ |
|
"epoch": 12.55, |
|
"learning_rate": 3.9829700272479565e-05, |
|
"loss": 0.0703, |
|
"step": 9840 |
|
}, |
|
{ |
|
"epoch": 12.56, |
|
"learning_rate": 3.976158038147139e-05, |
|
"loss": 0.0643, |
|
"step": 9850 |
|
}, |
|
{ |
|
"epoch": 12.58, |
|
"learning_rate": 3.969346049046322e-05, |
|
"loss": 0.1285, |
|
"step": 9860 |
|
}, |
|
{ |
|
"epoch": 12.59, |
|
"learning_rate": 3.962534059945504e-05, |
|
"loss": 0.1159, |
|
"step": 9870 |
|
}, |
|
{ |
|
"epoch": 12.6, |
|
"learning_rate": 3.955722070844687e-05, |
|
"loss": 0.0922, |
|
"step": 9880 |
|
}, |
|
{ |
|
"epoch": 12.61, |
|
"learning_rate": 3.9489100817438695e-05, |
|
"loss": 0.0686, |
|
"step": 9890 |
|
}, |
|
{ |
|
"epoch": 12.63, |
|
"learning_rate": 3.9420980926430515e-05, |
|
"loss": 0.0609, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 12.64, |
|
"learning_rate": 3.935286103542234e-05, |
|
"loss": 0.126, |
|
"step": 9910 |
|
}, |
|
{ |
|
"epoch": 12.65, |
|
"learning_rate": 3.928474114441417e-05, |
|
"loss": 0.1126, |
|
"step": 9920 |
|
}, |
|
{ |
|
"epoch": 12.67, |
|
"learning_rate": 3.9216621253406e-05, |
|
"loss": 0.0827, |
|
"step": 9930 |
|
}, |
|
{ |
|
"epoch": 12.68, |
|
"learning_rate": 3.9148501362397824e-05, |
|
"loss": 0.0679, |
|
"step": 9940 |
|
}, |
|
{ |
|
"epoch": 12.69, |
|
"learning_rate": 3.908038147138965e-05, |
|
"loss": 0.0673, |
|
"step": 9950 |
|
}, |
|
{ |
|
"epoch": 12.7, |
|
"learning_rate": 3.901226158038147e-05, |
|
"loss": 0.1171, |
|
"step": 9960 |
|
}, |
|
{ |
|
"epoch": 12.72, |
|
"learning_rate": 3.89441416893733e-05, |
|
"loss": 0.1119, |
|
"step": 9970 |
|
}, |
|
{ |
|
"epoch": 12.73, |
|
"learning_rate": 3.887602179836513e-05, |
|
"loss": 0.0927, |
|
"step": 9980 |
|
}, |
|
{ |
|
"epoch": 12.74, |
|
"learning_rate": 3.8807901907356954e-05, |
|
"loss": 0.0805, |
|
"step": 9990 |
|
}, |
|
{ |
|
"epoch": 12.75, |
|
"learning_rate": 3.8739782016348775e-05, |
|
"loss": 0.0677, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 12.75, |
|
"eval_loss": 0.28383344411849976, |
|
"eval_runtime": 1361.9523, |
|
"eval_samples_per_second": 4.577, |
|
"eval_steps_per_second": 0.573, |
|
"eval_wer": 0.32036619825874446, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 12.77, |
|
"learning_rate": 3.86716621253406e-05, |
|
"loss": 0.1114, |
|
"step": 10010 |
|
}, |
|
{ |
|
"epoch": 12.78, |
|
"learning_rate": 3.860354223433243e-05, |
|
"loss": 0.1127, |
|
"step": 10020 |
|
}, |
|
{ |
|
"epoch": 12.79, |
|
"learning_rate": 3.853542234332425e-05, |
|
"loss": 0.08, |
|
"step": 10030 |
|
}, |
|
{ |
|
"epoch": 12.81, |
|
"learning_rate": 3.846730245231608e-05, |
|
"loss": 0.0703, |
|
"step": 10040 |
|
}, |
|
{ |
|
"epoch": 12.82, |
|
"learning_rate": 3.8399182561307905e-05, |
|
"loss": 0.0738, |
|
"step": 10050 |
|
}, |
|
{ |
|
"epoch": 12.83, |
|
"learning_rate": 3.8331062670299725e-05, |
|
"loss": 0.115, |
|
"step": 10060 |
|
}, |
|
{ |
|
"epoch": 12.84, |
|
"learning_rate": 3.826294277929155e-05, |
|
"loss": 0.1193, |
|
"step": 10070 |
|
}, |
|
{ |
|
"epoch": 12.86, |
|
"learning_rate": 3.819482288828338e-05, |
|
"loss": 0.0817, |
|
"step": 10080 |
|
}, |
|
{ |
|
"epoch": 12.87, |
|
"learning_rate": 3.812670299727521e-05, |
|
"loss": 0.061, |
|
"step": 10090 |
|
}, |
|
{ |
|
"epoch": 12.88, |
|
"learning_rate": 3.8058583106267034e-05, |
|
"loss": 0.0629, |
|
"step": 10100 |
|
}, |
|
{ |
|
"epoch": 12.89, |
|
"learning_rate": 3.799046321525886e-05, |
|
"loss": 0.113, |
|
"step": 10110 |
|
}, |
|
{ |
|
"epoch": 12.91, |
|
"learning_rate": 3.792234332425068e-05, |
|
"loss": 0.1108, |
|
"step": 10120 |
|
}, |
|
{ |
|
"epoch": 12.92, |
|
"learning_rate": 3.785422343324251e-05, |
|
"loss": 0.0959, |
|
"step": 10130 |
|
}, |
|
{ |
|
"epoch": 12.93, |
|
"learning_rate": 3.778610354223434e-05, |
|
"loss": 0.0624, |
|
"step": 10140 |
|
}, |
|
{ |
|
"epoch": 12.95, |
|
"learning_rate": 3.771798365122616e-05, |
|
"loss": 0.0729, |
|
"step": 10150 |
|
}, |
|
{ |
|
"epoch": 12.96, |
|
"learning_rate": 3.7649863760217985e-05, |
|
"loss": 0.118, |
|
"step": 10160 |
|
}, |
|
{ |
|
"epoch": 12.97, |
|
"learning_rate": 3.758174386920981e-05, |
|
"loss": 0.1069, |
|
"step": 10170 |
|
}, |
|
{ |
|
"epoch": 12.98, |
|
"learning_rate": 3.751362397820163e-05, |
|
"loss": 0.0815, |
|
"step": 10180 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"learning_rate": 3.744550408719346e-05, |
|
"loss": 0.0708, |
|
"step": 10190 |
|
}, |
|
{ |
|
"epoch": 13.01, |
|
"learning_rate": 3.737738419618529e-05, |
|
"loss": 0.1372, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 13.02, |
|
"learning_rate": 3.7309264305177115e-05, |
|
"loss": 0.0833, |
|
"step": 10210 |
|
}, |
|
{ |
|
"epoch": 13.04, |
|
"learning_rate": 3.7241144414168935e-05, |
|
"loss": 0.0669, |
|
"step": 10220 |
|
}, |
|
{ |
|
"epoch": 13.05, |
|
"learning_rate": 3.717302452316076e-05, |
|
"loss": 0.0548, |
|
"step": 10230 |
|
}, |
|
{ |
|
"epoch": 13.06, |
|
"learning_rate": 3.710490463215259e-05, |
|
"loss": 0.0677, |
|
"step": 10240 |
|
}, |
|
{ |
|
"epoch": 13.07, |
|
"learning_rate": 3.703678474114442e-05, |
|
"loss": 0.1136, |
|
"step": 10250 |
|
}, |
|
{ |
|
"epoch": 13.09, |
|
"learning_rate": 3.6968664850136244e-05, |
|
"loss": 0.087, |
|
"step": 10260 |
|
}, |
|
{ |
|
"epoch": 13.1, |
|
"learning_rate": 3.690054495912807e-05, |
|
"loss": 0.0735, |
|
"step": 10270 |
|
}, |
|
{ |
|
"epoch": 13.11, |
|
"learning_rate": 3.683242506811989e-05, |
|
"loss": 0.0486, |
|
"step": 10280 |
|
}, |
|
{ |
|
"epoch": 13.12, |
|
"learning_rate": 3.676430517711172e-05, |
|
"loss": 0.0615, |
|
"step": 10290 |
|
}, |
|
{ |
|
"epoch": 13.14, |
|
"learning_rate": 3.669618528610355e-05, |
|
"loss": 0.1125, |
|
"step": 10300 |
|
}, |
|
{ |
|
"epoch": 13.15, |
|
"learning_rate": 3.662806539509537e-05, |
|
"loss": 0.0852, |
|
"step": 10310 |
|
}, |
|
{ |
|
"epoch": 13.16, |
|
"learning_rate": 3.6559945504087195e-05, |
|
"loss": 0.0758, |
|
"step": 10320 |
|
}, |
|
{ |
|
"epoch": 13.18, |
|
"learning_rate": 3.649182561307902e-05, |
|
"loss": 0.0538, |
|
"step": 10330 |
|
}, |
|
{ |
|
"epoch": 13.19, |
|
"learning_rate": 3.642370572207084e-05, |
|
"loss": 0.0628, |
|
"step": 10340 |
|
}, |
|
{ |
|
"epoch": 13.2, |
|
"learning_rate": 3.635558583106267e-05, |
|
"loss": 0.1344, |
|
"step": 10350 |
|
}, |
|
{ |
|
"epoch": 13.21, |
|
"learning_rate": 3.62874659400545e-05, |
|
"loss": 0.0847, |
|
"step": 10360 |
|
}, |
|
{ |
|
"epoch": 13.23, |
|
"learning_rate": 3.621934604904632e-05, |
|
"loss": 0.0728, |
|
"step": 10370 |
|
}, |
|
{ |
|
"epoch": 13.24, |
|
"learning_rate": 3.6151226158038145e-05, |
|
"loss": 0.058, |
|
"step": 10380 |
|
}, |
|
{ |
|
"epoch": 13.25, |
|
"learning_rate": 3.608310626702997e-05, |
|
"loss": 0.066, |
|
"step": 10390 |
|
}, |
|
{ |
|
"epoch": 13.27, |
|
"learning_rate": 3.60149863760218e-05, |
|
"loss": 0.1383, |
|
"step": 10400 |
|
}, |
|
{ |
|
"epoch": 13.27, |
|
"eval_loss": 0.2958827614784241, |
|
"eval_runtime": 1362.145, |
|
"eval_samples_per_second": 4.576, |
|
"eval_steps_per_second": 0.573, |
|
"eval_wer": 0.3211299068275546, |
|
"step": 10400 |
|
}, |
|
{ |
|
"epoch": 13.28, |
|
"learning_rate": 3.594686648501363e-05, |
|
"loss": 0.0874, |
|
"step": 10410 |
|
}, |
|
{ |
|
"epoch": 13.29, |
|
"learning_rate": 3.5878746594005454e-05, |
|
"loss": 0.0677, |
|
"step": 10420 |
|
}, |
|
{ |
|
"epoch": 13.3, |
|
"learning_rate": 3.5810626702997275e-05, |
|
"loss": 0.0481, |
|
"step": 10430 |
|
}, |
|
{ |
|
"epoch": 13.32, |
|
"learning_rate": 3.57425068119891e-05, |
|
"loss": 0.0682, |
|
"step": 10440 |
|
}, |
|
{ |
|
"epoch": 13.33, |
|
"learning_rate": 3.567438692098093e-05, |
|
"loss": 0.1395, |
|
"step": 10450 |
|
}, |
|
{ |
|
"epoch": 13.34, |
|
"learning_rate": 3.560626702997276e-05, |
|
"loss": 0.0879, |
|
"step": 10460 |
|
}, |
|
{ |
|
"epoch": 13.35, |
|
"learning_rate": 3.553814713896458e-05, |
|
"loss": 0.0668, |
|
"step": 10470 |
|
}, |
|
{ |
|
"epoch": 13.37, |
|
"learning_rate": 3.5470027247956405e-05, |
|
"loss": 0.0565, |
|
"step": 10480 |
|
}, |
|
{ |
|
"epoch": 13.38, |
|
"learning_rate": 3.540190735694823e-05, |
|
"loss": 0.076, |
|
"step": 10490 |
|
}, |
|
{ |
|
"epoch": 13.39, |
|
"learning_rate": 3.533378746594005e-05, |
|
"loss": 0.1275, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 13.41, |
|
"learning_rate": 3.526566757493188e-05, |
|
"loss": 0.0807, |
|
"step": 10510 |
|
}, |
|
{ |
|
"epoch": 13.42, |
|
"learning_rate": 3.519754768392371e-05, |
|
"loss": 0.0743, |
|
"step": 10520 |
|
}, |
|
{ |
|
"epoch": 13.43, |
|
"learning_rate": 3.5129427792915535e-05, |
|
"loss": 0.0607, |
|
"step": 10530 |
|
}, |
|
{ |
|
"epoch": 13.44, |
|
"learning_rate": 3.506130790190736e-05, |
|
"loss": 0.0709, |
|
"step": 10540 |
|
}, |
|
{ |
|
"epoch": 13.46, |
|
"learning_rate": 3.499318801089919e-05, |
|
"loss": 0.1291, |
|
"step": 10550 |
|
}, |
|
{ |
|
"epoch": 13.47, |
|
"learning_rate": 3.492506811989101e-05, |
|
"loss": 0.0856, |
|
"step": 10560 |
|
}, |
|
{ |
|
"epoch": 13.48, |
|
"learning_rate": 3.485694822888284e-05, |
|
"loss": 0.068, |
|
"step": 10570 |
|
}, |
|
{ |
|
"epoch": 13.49, |
|
"learning_rate": 3.4788828337874664e-05, |
|
"loss": 0.0611, |
|
"step": 10580 |
|
}, |
|
{ |
|
"epoch": 13.51, |
|
"learning_rate": 3.4720708446866485e-05, |
|
"loss": 0.0812, |
|
"step": 10590 |
|
}, |
|
{ |
|
"epoch": 13.52, |
|
"learning_rate": 3.465258855585831e-05, |
|
"loss": 0.1167, |
|
"step": 10600 |
|
}, |
|
{ |
|
"epoch": 13.53, |
|
"learning_rate": 3.458446866485014e-05, |
|
"loss": 0.0918, |
|
"step": 10610 |
|
}, |
|
{ |
|
"epoch": 13.55, |
|
"learning_rate": 3.451634877384196e-05, |
|
"loss": 0.0617, |
|
"step": 10620 |
|
}, |
|
{ |
|
"epoch": 13.56, |
|
"learning_rate": 3.444822888283379e-05, |
|
"loss": 0.0641, |
|
"step": 10630 |
|
}, |
|
{ |
|
"epoch": 13.57, |
|
"learning_rate": 3.4380108991825615e-05, |
|
"loss": 0.0783, |
|
"step": 10640 |
|
}, |
|
{ |
|
"epoch": 13.58, |
|
"learning_rate": 3.4311989100817435e-05, |
|
"loss": 0.1344, |
|
"step": 10650 |
|
}, |
|
{ |
|
"epoch": 13.6, |
|
"learning_rate": 3.424386920980926e-05, |
|
"loss": 0.0866, |
|
"step": 10660 |
|
}, |
|
{ |
|
"epoch": 13.61, |
|
"learning_rate": 3.417574931880109e-05, |
|
"loss": 0.0709, |
|
"step": 10670 |
|
}, |
|
{ |
|
"epoch": 13.62, |
|
"learning_rate": 3.410762942779292e-05, |
|
"loss": 0.0547, |
|
"step": 10680 |
|
}, |
|
{ |
|
"epoch": 13.63, |
|
"learning_rate": 3.4039509536784744e-05, |
|
"loss": 0.0879, |
|
"step": 10690 |
|
}, |
|
{ |
|
"epoch": 13.65, |
|
"learning_rate": 3.397138964577657e-05, |
|
"loss": 0.124, |
|
"step": 10700 |
|
}, |
|
{ |
|
"epoch": 13.66, |
|
"learning_rate": 3.39032697547684e-05, |
|
"loss": 0.0929, |
|
"step": 10710 |
|
}, |
|
{ |
|
"epoch": 13.67, |
|
"learning_rate": 3.383514986376022e-05, |
|
"loss": 0.0844, |
|
"step": 10720 |
|
}, |
|
{ |
|
"epoch": 13.69, |
|
"learning_rate": 3.376702997275205e-05, |
|
"loss": 0.0602, |
|
"step": 10730 |
|
}, |
|
{ |
|
"epoch": 13.7, |
|
"learning_rate": 3.3698910081743874e-05, |
|
"loss": 0.0663, |
|
"step": 10740 |
|
}, |
|
{ |
|
"epoch": 13.71, |
|
"learning_rate": 3.3630790190735695e-05, |
|
"loss": 0.1273, |
|
"step": 10750 |
|
}, |
|
{ |
|
"epoch": 13.72, |
|
"learning_rate": 3.356267029972752e-05, |
|
"loss": 0.0865, |
|
"step": 10760 |
|
}, |
|
{ |
|
"epoch": 13.74, |
|
"learning_rate": 3.349455040871935e-05, |
|
"loss": 0.0716, |
|
"step": 10770 |
|
}, |
|
{ |
|
"epoch": 13.75, |
|
"learning_rate": 3.342643051771117e-05, |
|
"loss": 0.0654, |
|
"step": 10780 |
|
}, |
|
{ |
|
"epoch": 13.76, |
|
"learning_rate": 3.3358310626703e-05, |
|
"loss": 0.0767, |
|
"step": 10790 |
|
}, |
|
{ |
|
"epoch": 13.77, |
|
"learning_rate": 3.3290190735694825e-05, |
|
"loss": 0.1233, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 13.77, |
|
"eval_loss": 0.29217278957366943, |
|
"eval_runtime": 1363.9978, |
|
"eval_samples_per_second": 4.57, |
|
"eval_steps_per_second": 0.572, |
|
"eval_wer": 0.3212540094699863, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 13.79, |
|
"learning_rate": 3.3222070844686645e-05, |
|
"loss": 0.0851, |
|
"step": 10810 |
|
}, |
|
{ |
|
"epoch": 13.8, |
|
"learning_rate": 3.315395095367847e-05, |
|
"loss": 0.0695, |
|
"step": 10820 |
|
}, |
|
{ |
|
"epoch": 13.81, |
|
"learning_rate": 3.30858310626703e-05, |
|
"loss": 0.0589, |
|
"step": 10830 |
|
}, |
|
{ |
|
"epoch": 13.83, |
|
"learning_rate": 3.301771117166213e-05, |
|
"loss": 0.0692, |
|
"step": 10840 |
|
}, |
|
{ |
|
"epoch": 13.84, |
|
"learning_rate": 3.2949591280653954e-05, |
|
"loss": 0.1202, |
|
"step": 10850 |
|
}, |
|
{ |
|
"epoch": 13.85, |
|
"learning_rate": 3.288147138964578e-05, |
|
"loss": 0.0794, |
|
"step": 10860 |
|
}, |
|
{ |
|
"epoch": 13.86, |
|
"learning_rate": 3.28133514986376e-05, |
|
"loss": 0.0652, |
|
"step": 10870 |
|
}, |
|
{ |
|
"epoch": 13.88, |
|
"learning_rate": 3.274523160762943e-05, |
|
"loss": 0.055, |
|
"step": 10880 |
|
}, |
|
{ |
|
"epoch": 13.89, |
|
"learning_rate": 3.267711171662126e-05, |
|
"loss": 0.0576, |
|
"step": 10890 |
|
}, |
|
{ |
|
"epoch": 13.9, |
|
"learning_rate": 3.2608991825613084e-05, |
|
"loss": 0.1079, |
|
"step": 10900 |
|
}, |
|
{ |
|
"epoch": 13.92, |
|
"learning_rate": 3.2540871934604905e-05, |
|
"loss": 0.0861, |
|
"step": 10910 |
|
}, |
|
{ |
|
"epoch": 13.93, |
|
"learning_rate": 3.247275204359673e-05, |
|
"loss": 0.0703, |
|
"step": 10920 |
|
}, |
|
{ |
|
"epoch": 13.94, |
|
"learning_rate": 3.240463215258856e-05, |
|
"loss": 0.052, |
|
"step": 10930 |
|
}, |
|
{ |
|
"epoch": 13.95, |
|
"learning_rate": 3.233651226158038e-05, |
|
"loss": 0.0741, |
|
"step": 10940 |
|
}, |
|
{ |
|
"epoch": 13.97, |
|
"learning_rate": 3.226839237057221e-05, |
|
"loss": 0.1237, |
|
"step": 10950 |
|
}, |
|
{ |
|
"epoch": 13.98, |
|
"learning_rate": 3.2200272479564035e-05, |
|
"loss": 0.0753, |
|
"step": 10960 |
|
}, |
|
{ |
|
"epoch": 13.99, |
|
"learning_rate": 3.2132152588555855e-05, |
|
"loss": 0.0648, |
|
"step": 10970 |
|
}, |
|
{ |
|
"epoch": 14.01, |
|
"learning_rate": 3.206403269754768e-05, |
|
"loss": 0.1136, |
|
"step": 10980 |
|
}, |
|
{ |
|
"epoch": 14.02, |
|
"learning_rate": 3.199591280653951e-05, |
|
"loss": 0.0857, |
|
"step": 10990 |
|
}, |
|
{ |
|
"epoch": 14.03, |
|
"learning_rate": 3.192779291553134e-05, |
|
"loss": 0.0645, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 14.04, |
|
"learning_rate": 3.1859673024523164e-05, |
|
"loss": 0.0549, |
|
"step": 11010 |
|
}, |
|
{ |
|
"epoch": 14.06, |
|
"learning_rate": 3.179155313351499e-05, |
|
"loss": 0.0598, |
|
"step": 11020 |
|
}, |
|
{ |
|
"epoch": 14.07, |
|
"learning_rate": 3.172343324250681e-05, |
|
"loss": 0.1101, |
|
"step": 11030 |
|
}, |
|
{ |
|
"epoch": 14.08, |
|
"learning_rate": 3.165531335149864e-05, |
|
"loss": 0.0832, |
|
"step": 11040 |
|
}, |
|
{ |
|
"epoch": 14.09, |
|
"learning_rate": 3.158719346049047e-05, |
|
"loss": 0.066, |
|
"step": 11050 |
|
}, |
|
{ |
|
"epoch": 14.11, |
|
"learning_rate": 3.151907356948229e-05, |
|
"loss": 0.0602, |
|
"step": 11060 |
|
}, |
|
{ |
|
"epoch": 14.12, |
|
"learning_rate": 3.1450953678474115e-05, |
|
"loss": 0.0611, |
|
"step": 11070 |
|
}, |
|
{ |
|
"epoch": 14.13, |
|
"learning_rate": 3.138283378746594e-05, |
|
"loss": 0.0953, |
|
"step": 11080 |
|
}, |
|
{ |
|
"epoch": 14.15, |
|
"learning_rate": 3.131471389645776e-05, |
|
"loss": 0.0902, |
|
"step": 11090 |
|
}, |
|
{ |
|
"epoch": 14.16, |
|
"learning_rate": 3.124659400544959e-05, |
|
"loss": 0.0605, |
|
"step": 11100 |
|
}, |
|
{ |
|
"epoch": 14.17, |
|
"learning_rate": 3.117847411444142e-05, |
|
"loss": 0.0536, |
|
"step": 11110 |
|
}, |
|
{ |
|
"epoch": 14.18, |
|
"learning_rate": 3.1110354223433245e-05, |
|
"loss": 0.0545, |
|
"step": 11120 |
|
}, |
|
{ |
|
"epoch": 14.2, |
|
"learning_rate": 3.104223433242507e-05, |
|
"loss": 0.0958, |
|
"step": 11130 |
|
}, |
|
{ |
|
"epoch": 14.21, |
|
"learning_rate": 3.09741144414169e-05, |
|
"loss": 0.0857, |
|
"step": 11140 |
|
}, |
|
{ |
|
"epoch": 14.22, |
|
"learning_rate": 3.0905994550408727e-05, |
|
"loss": 0.0664, |
|
"step": 11150 |
|
}, |
|
{ |
|
"epoch": 14.23, |
|
"learning_rate": 3.083787465940055e-05, |
|
"loss": 0.0511, |
|
"step": 11160 |
|
}, |
|
{ |
|
"epoch": 14.25, |
|
"learning_rate": 3.0769754768392374e-05, |
|
"loss": 0.0616, |
|
"step": 11170 |
|
}, |
|
{ |
|
"epoch": 14.26, |
|
"learning_rate": 3.07016348773842e-05, |
|
"loss": 0.1027, |
|
"step": 11180 |
|
}, |
|
{ |
|
"epoch": 14.27, |
|
"learning_rate": 3.063351498637602e-05, |
|
"loss": 0.0851, |
|
"step": 11190 |
|
}, |
|
{ |
|
"epoch": 14.29, |
|
"learning_rate": 3.056539509536785e-05, |
|
"loss": 0.0688, |
|
"step": 11200 |
|
}, |
|
{ |
|
"epoch": 14.29, |
|
"eval_loss": 0.2903039753437042, |
|
"eval_runtime": 1371.1539, |
|
"eval_samples_per_second": 4.546, |
|
"eval_steps_per_second": 0.569, |
|
"eval_wer": 0.32086260882847106, |
|
"step": 11200 |
|
}, |
|
{ |
|
"epoch": 14.3, |
|
"learning_rate": 3.0497275204359677e-05, |
|
"loss": 0.0593, |
|
"step": 11210 |
|
}, |
|
{ |
|
"epoch": 14.31, |
|
"learning_rate": 3.0429155313351497e-05, |
|
"loss": 0.0553, |
|
"step": 11220 |
|
}, |
|
{ |
|
"epoch": 14.32, |
|
"learning_rate": 3.0361035422343325e-05, |
|
"loss": 0.0969, |
|
"step": 11230 |
|
}, |
|
{ |
|
"epoch": 14.34, |
|
"learning_rate": 3.0292915531335152e-05, |
|
"loss": 0.0858, |
|
"step": 11240 |
|
}, |
|
{ |
|
"epoch": 14.35, |
|
"learning_rate": 3.0224795640326976e-05, |
|
"loss": 0.0708, |
|
"step": 11250 |
|
}, |
|
{ |
|
"epoch": 14.36, |
|
"learning_rate": 3.0156675749318803e-05, |
|
"loss": 0.0539, |
|
"step": 11260 |
|
}, |
|
{ |
|
"epoch": 14.37, |
|
"learning_rate": 3.008855585831063e-05, |
|
"loss": 0.0664, |
|
"step": 11270 |
|
}, |
|
{ |
|
"epoch": 14.39, |
|
"learning_rate": 3.002043596730245e-05, |
|
"loss": 0.0867, |
|
"step": 11280 |
|
}, |
|
{ |
|
"epoch": 14.4, |
|
"learning_rate": 2.995231607629428e-05, |
|
"loss": 0.0821, |
|
"step": 11290 |
|
}, |
|
{ |
|
"epoch": 14.41, |
|
"learning_rate": 2.9884196185286106e-05, |
|
"loss": 0.069, |
|
"step": 11300 |
|
}, |
|
{ |
|
"epoch": 14.43, |
|
"learning_rate": 2.981607629427793e-05, |
|
"loss": 0.0555, |
|
"step": 11310 |
|
}, |
|
{ |
|
"epoch": 14.44, |
|
"learning_rate": 2.9747956403269757e-05, |
|
"loss": 0.0501, |
|
"step": 11320 |
|
}, |
|
{ |
|
"epoch": 14.45, |
|
"learning_rate": 2.9679836512261584e-05, |
|
"loss": 0.0969, |
|
"step": 11330 |
|
}, |
|
{ |
|
"epoch": 14.46, |
|
"learning_rate": 2.9611716621253405e-05, |
|
"loss": 0.0821, |
|
"step": 11340 |
|
}, |
|
{ |
|
"epoch": 14.48, |
|
"learning_rate": 2.9543596730245232e-05, |
|
"loss": 0.0683, |
|
"step": 11350 |
|
}, |
|
{ |
|
"epoch": 14.49, |
|
"learning_rate": 2.947547683923706e-05, |
|
"loss": 0.0515, |
|
"step": 11360 |
|
}, |
|
{ |
|
"epoch": 14.5, |
|
"learning_rate": 2.9407356948228887e-05, |
|
"loss": 0.0518, |
|
"step": 11370 |
|
}, |
|
{ |
|
"epoch": 14.51, |
|
"learning_rate": 2.9339237057220707e-05, |
|
"loss": 0.0908, |
|
"step": 11380 |
|
}, |
|
{ |
|
"epoch": 14.53, |
|
"learning_rate": 2.9271117166212535e-05, |
|
"loss": 0.0841, |
|
"step": 11390 |
|
}, |
|
{ |
|
"epoch": 14.54, |
|
"learning_rate": 2.9202997275204362e-05, |
|
"loss": 0.0628, |
|
"step": 11400 |
|
}, |
|
{ |
|
"epoch": 14.55, |
|
"learning_rate": 2.9134877384196186e-05, |
|
"loss": 0.0517, |
|
"step": 11410 |
|
}, |
|
{ |
|
"epoch": 14.57, |
|
"learning_rate": 2.9066757493188013e-05, |
|
"loss": 0.055, |
|
"step": 11420 |
|
}, |
|
{ |
|
"epoch": 14.58, |
|
"learning_rate": 2.899863760217984e-05, |
|
"loss": 0.0904, |
|
"step": 11430 |
|
}, |
|
{ |
|
"epoch": 14.59, |
|
"learning_rate": 2.893051771117166e-05, |
|
"loss": 0.0848, |
|
"step": 11440 |
|
}, |
|
{ |
|
"epoch": 14.6, |
|
"learning_rate": 2.886239782016349e-05, |
|
"loss": 0.0694, |
|
"step": 11450 |
|
}, |
|
{ |
|
"epoch": 14.62, |
|
"learning_rate": 2.8794277929155316e-05, |
|
"loss": 0.0515, |
|
"step": 11460 |
|
}, |
|
{ |
|
"epoch": 14.63, |
|
"learning_rate": 2.872615803814714e-05, |
|
"loss": 0.0622, |
|
"step": 11470 |
|
}, |
|
{ |
|
"epoch": 14.64, |
|
"learning_rate": 2.8658038147138967e-05, |
|
"loss": 0.1064, |
|
"step": 11480 |
|
}, |
|
{ |
|
"epoch": 14.66, |
|
"learning_rate": 2.8589918256130794e-05, |
|
"loss": 0.0824, |
|
"step": 11490 |
|
}, |
|
{ |
|
"epoch": 14.67, |
|
"learning_rate": 2.8521798365122615e-05, |
|
"loss": 0.0665, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 14.68, |
|
"learning_rate": 2.8453678474114442e-05, |
|
"loss": 0.0541, |
|
"step": 11510 |
|
}, |
|
{ |
|
"epoch": 14.69, |
|
"learning_rate": 2.838555858310627e-05, |
|
"loss": 0.0654, |
|
"step": 11520 |
|
}, |
|
{ |
|
"epoch": 14.71, |
|
"learning_rate": 2.831743869209809e-05, |
|
"loss": 0.1008, |
|
"step": 11530 |
|
}, |
|
{ |
|
"epoch": 14.72, |
|
"learning_rate": 2.8249318801089917e-05, |
|
"loss": 0.0819, |
|
"step": 11540 |
|
}, |
|
{ |
|
"epoch": 14.73, |
|
"learning_rate": 2.8181198910081745e-05, |
|
"loss": 0.0713, |
|
"step": 11550 |
|
}, |
|
{ |
|
"epoch": 14.74, |
|
"learning_rate": 2.811307901907357e-05, |
|
"loss": 0.057, |
|
"step": 11560 |
|
}, |
|
{ |
|
"epoch": 14.76, |
|
"learning_rate": 2.8044959128065396e-05, |
|
"loss": 0.0629, |
|
"step": 11570 |
|
}, |
|
{ |
|
"epoch": 14.77, |
|
"learning_rate": 2.7976839237057223e-05, |
|
"loss": 0.1028, |
|
"step": 11580 |
|
}, |
|
{ |
|
"epoch": 14.78, |
|
"learning_rate": 2.790871934604905e-05, |
|
"loss": 0.0902, |
|
"step": 11590 |
|
}, |
|
{ |
|
"epoch": 14.8, |
|
"learning_rate": 2.784059945504087e-05, |
|
"loss": 0.0655, |
|
"step": 11600 |
|
}, |
|
{ |
|
"epoch": 14.8, |
|
"eval_loss": 0.2868470549583435, |
|
"eval_runtime": 1425.5906, |
|
"eval_samples_per_second": 4.372, |
|
"eval_steps_per_second": 0.547, |
|
"eval_wer": 0.3182087215518558, |
|
"step": 11600 |
|
}, |
|
{ |
|
"epoch": 14.81, |
|
"learning_rate": 2.77724795640327e-05, |
|
"loss": 0.0506, |
|
"step": 11610 |
|
}, |
|
{ |
|
"epoch": 14.82, |
|
"learning_rate": 2.7704359673024526e-05, |
|
"loss": 0.0544, |
|
"step": 11620 |
|
}, |
|
{ |
|
"epoch": 14.83, |
|
"learning_rate": 2.763623978201635e-05, |
|
"loss": 0.0956, |
|
"step": 11630 |
|
}, |
|
{ |
|
"epoch": 14.85, |
|
"learning_rate": 2.7568119891008177e-05, |
|
"loss": 0.0852, |
|
"step": 11640 |
|
}, |
|
{ |
|
"epoch": 14.86, |
|
"learning_rate": 2.7500000000000004e-05, |
|
"loss": 0.0614, |
|
"step": 11650 |
|
}, |
|
{ |
|
"epoch": 14.87, |
|
"learning_rate": 2.7431880108991825e-05, |
|
"loss": 0.0484, |
|
"step": 11660 |
|
}, |
|
{ |
|
"epoch": 14.88, |
|
"learning_rate": 2.7363760217983652e-05, |
|
"loss": 0.0523, |
|
"step": 11670 |
|
}, |
|
{ |
|
"epoch": 14.9, |
|
"learning_rate": 2.729564032697548e-05, |
|
"loss": 0.1058, |
|
"step": 11680 |
|
}, |
|
{ |
|
"epoch": 14.91, |
|
"learning_rate": 2.7227520435967303e-05, |
|
"loss": 0.0824, |
|
"step": 11690 |
|
}, |
|
{ |
|
"epoch": 14.92, |
|
"learning_rate": 2.715940054495913e-05, |
|
"loss": 0.0649, |
|
"step": 11700 |
|
}, |
|
{ |
|
"epoch": 14.94, |
|
"learning_rate": 2.7091280653950958e-05, |
|
"loss": 0.054, |
|
"step": 11710 |
|
}, |
|
{ |
|
"epoch": 14.95, |
|
"learning_rate": 2.702316076294278e-05, |
|
"loss": 0.0536, |
|
"step": 11720 |
|
}, |
|
{ |
|
"epoch": 14.96, |
|
"learning_rate": 2.6955040871934606e-05, |
|
"loss": 0.1042, |
|
"step": 11730 |
|
}, |
|
{ |
|
"epoch": 14.97, |
|
"learning_rate": 2.6886920980926433e-05, |
|
"loss": 0.0756, |
|
"step": 11740 |
|
}, |
|
{ |
|
"epoch": 14.99, |
|
"learning_rate": 2.6818801089918254e-05, |
|
"loss": 0.0554, |
|
"step": 11750 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"learning_rate": 2.675068119891008e-05, |
|
"loss": 0.0682, |
|
"step": 11760 |
|
}, |
|
{ |
|
"epoch": 15.01, |
|
"learning_rate": 2.668256130790191e-05, |
|
"loss": 0.093, |
|
"step": 11770 |
|
}, |
|
{ |
|
"epoch": 15.03, |
|
"learning_rate": 2.6614441416893732e-05, |
|
"loss": 0.0687, |
|
"step": 11780 |
|
}, |
|
{ |
|
"epoch": 15.04, |
|
"learning_rate": 2.654632152588556e-05, |
|
"loss": 0.0511, |
|
"step": 11790 |
|
}, |
|
{ |
|
"epoch": 15.05, |
|
"learning_rate": 2.6478201634877387e-05, |
|
"loss": 0.0451, |
|
"step": 11800 |
|
}, |
|
{ |
|
"epoch": 15.06, |
|
"learning_rate": 2.6410081743869208e-05, |
|
"loss": 0.0571, |
|
"step": 11810 |
|
}, |
|
{ |
|
"epoch": 15.08, |
|
"learning_rate": 2.6341961852861035e-05, |
|
"loss": 0.0889, |
|
"step": 11820 |
|
}, |
|
{ |
|
"epoch": 15.09, |
|
"learning_rate": 2.6273841961852862e-05, |
|
"loss": 0.0667, |
|
"step": 11830 |
|
}, |
|
{ |
|
"epoch": 15.1, |
|
"learning_rate": 2.620572207084469e-05, |
|
"loss": 0.0586, |
|
"step": 11840 |
|
}, |
|
{ |
|
"epoch": 15.11, |
|
"learning_rate": 2.6137602179836513e-05, |
|
"loss": 0.0461, |
|
"step": 11850 |
|
}, |
|
{ |
|
"epoch": 15.13, |
|
"learning_rate": 2.606948228882834e-05, |
|
"loss": 0.0781, |
|
"step": 11860 |
|
}, |
|
{ |
|
"epoch": 15.14, |
|
"learning_rate": 2.6001362397820168e-05, |
|
"loss": 0.089, |
|
"step": 11870 |
|
}, |
|
{ |
|
"epoch": 15.15, |
|
"learning_rate": 2.593324250681199e-05, |
|
"loss": 0.0619, |
|
"step": 11880 |
|
}, |
|
{ |
|
"epoch": 15.17, |
|
"learning_rate": 2.5865122615803816e-05, |
|
"loss": 0.0508, |
|
"step": 11890 |
|
}, |
|
{ |
|
"epoch": 15.18, |
|
"learning_rate": 2.5797002724795643e-05, |
|
"loss": 0.0482, |
|
"step": 11900 |
|
}, |
|
{ |
|
"epoch": 15.19, |
|
"learning_rate": 2.5728882833787464e-05, |
|
"loss": 0.079, |
|
"step": 11910 |
|
}, |
|
{ |
|
"epoch": 15.2, |
|
"learning_rate": 2.5660762942779295e-05, |
|
"loss": 0.0918, |
|
"step": 11920 |
|
}, |
|
{ |
|
"epoch": 15.22, |
|
"learning_rate": 2.5592643051771122e-05, |
|
"loss": 0.0626, |
|
"step": 11930 |
|
}, |
|
{ |
|
"epoch": 15.23, |
|
"learning_rate": 2.5524523160762942e-05, |
|
"loss": 0.0569, |
|
"step": 11940 |
|
}, |
|
{ |
|
"epoch": 15.24, |
|
"learning_rate": 2.545640326975477e-05, |
|
"loss": 0.0482, |
|
"step": 11950 |
|
}, |
|
{ |
|
"epoch": 15.25, |
|
"learning_rate": 2.5388283378746597e-05, |
|
"loss": 0.0682, |
|
"step": 11960 |
|
}, |
|
{ |
|
"epoch": 15.27, |
|
"learning_rate": 2.5320163487738418e-05, |
|
"loss": 0.0965, |
|
"step": 11970 |
|
}, |
|
{ |
|
"epoch": 15.28, |
|
"learning_rate": 2.5252043596730245e-05, |
|
"loss": 0.0629, |
|
"step": 11980 |
|
}, |
|
{ |
|
"epoch": 15.29, |
|
"learning_rate": 2.5183923705722072e-05, |
|
"loss": 0.0557, |
|
"step": 11990 |
|
}, |
|
{ |
|
"epoch": 15.31, |
|
"learning_rate": 2.5115803814713896e-05, |
|
"loss": 0.0449, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 15.31, |
|
"eval_loss": 0.29593759775161743, |
|
"eval_runtime": 1364.8447, |
|
"eval_samples_per_second": 4.567, |
|
"eval_steps_per_second": 0.571, |
|
"eval_wer": 0.31715862226974184, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 15.32, |
|
"learning_rate": 2.5047683923705723e-05, |
|
"loss": 0.0664, |
|
"step": 12010 |
|
}, |
|
{ |
|
"epoch": 15.33, |
|
"learning_rate": 2.497956403269755e-05, |
|
"loss": 0.088, |
|
"step": 12020 |
|
}, |
|
{ |
|
"epoch": 15.34, |
|
"learning_rate": 2.4911444141689375e-05, |
|
"loss": 0.0641, |
|
"step": 12030 |
|
}, |
|
{ |
|
"epoch": 15.36, |
|
"learning_rate": 2.48433242506812e-05, |
|
"loss": 0.0518, |
|
"step": 12040 |
|
}, |
|
{ |
|
"epoch": 15.37, |
|
"learning_rate": 2.4775204359673026e-05, |
|
"loss": 0.0465, |
|
"step": 12050 |
|
}, |
|
{ |
|
"epoch": 15.38, |
|
"learning_rate": 2.470708446866485e-05, |
|
"loss": 0.0814, |
|
"step": 12060 |
|
}, |
|
{ |
|
"epoch": 15.4, |
|
"learning_rate": 2.4638964577656677e-05, |
|
"loss": 0.098, |
|
"step": 12070 |
|
}, |
|
{ |
|
"epoch": 15.41, |
|
"learning_rate": 2.4570844686648504e-05, |
|
"loss": 0.0637, |
|
"step": 12080 |
|
}, |
|
{ |
|
"epoch": 15.42, |
|
"learning_rate": 2.450272479564033e-05, |
|
"loss": 0.0536, |
|
"step": 12090 |
|
}, |
|
{ |
|
"epoch": 15.43, |
|
"learning_rate": 2.4434604904632156e-05, |
|
"loss": 0.046, |
|
"step": 12100 |
|
}, |
|
{ |
|
"epoch": 15.45, |
|
"learning_rate": 2.436648501362398e-05, |
|
"loss": 0.0679, |
|
"step": 12110 |
|
}, |
|
{ |
|
"epoch": 15.46, |
|
"learning_rate": 2.4298365122615804e-05, |
|
"loss": 0.0886, |
|
"step": 12120 |
|
}, |
|
{ |
|
"epoch": 15.47, |
|
"learning_rate": 2.423024523160763e-05, |
|
"loss": 0.061, |
|
"step": 12130 |
|
}, |
|
{ |
|
"epoch": 15.48, |
|
"learning_rate": 2.4162125340599455e-05, |
|
"loss": 0.0512, |
|
"step": 12140 |
|
}, |
|
{ |
|
"epoch": 15.5, |
|
"learning_rate": 2.4094005449591282e-05, |
|
"loss": 0.0457, |
|
"step": 12150 |
|
}, |
|
{ |
|
"epoch": 15.51, |
|
"learning_rate": 2.402588555858311e-05, |
|
"loss": 0.0706, |
|
"step": 12160 |
|
}, |
|
{ |
|
"epoch": 15.52, |
|
"learning_rate": 2.3957765667574933e-05, |
|
"loss": 0.0955, |
|
"step": 12170 |
|
}, |
|
{ |
|
"epoch": 15.54, |
|
"learning_rate": 2.3889645776566757e-05, |
|
"loss": 0.0637, |
|
"step": 12180 |
|
}, |
|
{ |
|
"epoch": 15.55, |
|
"learning_rate": 2.3821525885558585e-05, |
|
"loss": 0.05, |
|
"step": 12190 |
|
}, |
|
{ |
|
"epoch": 15.56, |
|
"learning_rate": 2.375340599455041e-05, |
|
"loss": 0.0466, |
|
"step": 12200 |
|
}, |
|
{ |
|
"epoch": 15.57, |
|
"learning_rate": 2.3685286103542233e-05, |
|
"loss": 0.0742, |
|
"step": 12210 |
|
}, |
|
{ |
|
"epoch": 15.59, |
|
"learning_rate": 2.361716621253406e-05, |
|
"loss": 0.0933, |
|
"step": 12220 |
|
}, |
|
{ |
|
"epoch": 15.6, |
|
"learning_rate": 2.3549046321525887e-05, |
|
"loss": 0.0702, |
|
"step": 12230 |
|
}, |
|
{ |
|
"epoch": 15.61, |
|
"learning_rate": 2.3480926430517714e-05, |
|
"loss": 0.0646, |
|
"step": 12240 |
|
}, |
|
{ |
|
"epoch": 15.62, |
|
"learning_rate": 2.341280653950954e-05, |
|
"loss": 0.0508, |
|
"step": 12250 |
|
}, |
|
{ |
|
"epoch": 15.64, |
|
"learning_rate": 2.3344686648501362e-05, |
|
"loss": 0.065, |
|
"step": 12260 |
|
}, |
|
{ |
|
"epoch": 15.65, |
|
"learning_rate": 2.327656675749319e-05, |
|
"loss": 0.0909, |
|
"step": 12270 |
|
}, |
|
{ |
|
"epoch": 15.66, |
|
"learning_rate": 2.3208446866485014e-05, |
|
"loss": 0.0707, |
|
"step": 12280 |
|
}, |
|
{ |
|
"epoch": 15.68, |
|
"learning_rate": 2.314032697547684e-05, |
|
"loss": 0.0536, |
|
"step": 12290 |
|
}, |
|
{ |
|
"epoch": 15.69, |
|
"learning_rate": 2.3072207084468668e-05, |
|
"loss": 0.0485, |
|
"step": 12300 |
|
}, |
|
{ |
|
"epoch": 15.7, |
|
"learning_rate": 2.3004087193460492e-05, |
|
"loss": 0.0644, |
|
"step": 12310 |
|
}, |
|
{ |
|
"epoch": 15.71, |
|
"learning_rate": 2.2935967302452316e-05, |
|
"loss": 0.0896, |
|
"step": 12320 |
|
}, |
|
{ |
|
"epoch": 15.73, |
|
"learning_rate": 2.2867847411444143e-05, |
|
"loss": 0.0724, |
|
"step": 12330 |
|
}, |
|
{ |
|
"epoch": 15.74, |
|
"learning_rate": 2.2799727520435967e-05, |
|
"loss": 0.0558, |
|
"step": 12340 |
|
}, |
|
{ |
|
"epoch": 15.75, |
|
"learning_rate": 2.2731607629427795e-05, |
|
"loss": 0.0443, |
|
"step": 12350 |
|
}, |
|
{ |
|
"epoch": 15.76, |
|
"learning_rate": 2.266348773841962e-05, |
|
"loss": 0.0647, |
|
"step": 12360 |
|
}, |
|
{ |
|
"epoch": 15.78, |
|
"learning_rate": 2.2595367847411446e-05, |
|
"loss": 0.0854, |
|
"step": 12370 |
|
}, |
|
{ |
|
"epoch": 15.79, |
|
"learning_rate": 2.2527247956403273e-05, |
|
"loss": 0.07, |
|
"step": 12380 |
|
}, |
|
{ |
|
"epoch": 15.8, |
|
"learning_rate": 2.2459128065395097e-05, |
|
"loss": 0.0533, |
|
"step": 12390 |
|
}, |
|
{ |
|
"epoch": 15.82, |
|
"learning_rate": 2.239100817438692e-05, |
|
"loss": 0.0421, |
|
"step": 12400 |
|
}, |
|
{ |
|
"epoch": 15.82, |
|
"eval_loss": 0.29661157727241516, |
|
"eval_runtime": 1360.7234, |
|
"eval_samples_per_second": 4.581, |
|
"eval_steps_per_second": 0.573, |
|
"eval_wer": 0.31797006262410266, |
|
"step": 12400 |
|
}, |
|
{ |
|
"epoch": 15.83, |
|
"learning_rate": 2.232288828337875e-05, |
|
"loss": 0.0668, |
|
"step": 12410 |
|
}, |
|
{ |
|
"epoch": 15.84, |
|
"learning_rate": 2.2254768392370572e-05, |
|
"loss": 0.0895, |
|
"step": 12420 |
|
}, |
|
{ |
|
"epoch": 15.85, |
|
"learning_rate": 2.2186648501362396e-05, |
|
"loss": 0.0717, |
|
"step": 12430 |
|
}, |
|
{ |
|
"epoch": 15.87, |
|
"learning_rate": 2.2118528610354224e-05, |
|
"loss": 0.0525, |
|
"step": 12440 |
|
}, |
|
{ |
|
"epoch": 15.88, |
|
"learning_rate": 2.205040871934605e-05, |
|
"loss": 0.0492, |
|
"step": 12450 |
|
}, |
|
{ |
|
"epoch": 15.89, |
|
"learning_rate": 2.1982288828337878e-05, |
|
"loss": 0.0814, |
|
"step": 12460 |
|
}, |
|
{ |
|
"epoch": 15.9, |
|
"learning_rate": 2.1914168937329702e-05, |
|
"loss": 0.0914, |
|
"step": 12470 |
|
}, |
|
{ |
|
"epoch": 15.92, |
|
"learning_rate": 2.1846049046321526e-05, |
|
"loss": 0.0625, |
|
"step": 12480 |
|
}, |
|
{ |
|
"epoch": 15.93, |
|
"learning_rate": 2.1777929155313353e-05, |
|
"loss": 0.0505, |
|
"step": 12490 |
|
}, |
|
{ |
|
"epoch": 15.94, |
|
"learning_rate": 2.1709809264305177e-05, |
|
"loss": 0.0409, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 15.96, |
|
"learning_rate": 2.1641689373297e-05, |
|
"loss": 0.0759, |
|
"step": 12510 |
|
}, |
|
{ |
|
"epoch": 15.97, |
|
"learning_rate": 2.157356948228883e-05, |
|
"loss": 0.0873, |
|
"step": 12520 |
|
}, |
|
{ |
|
"epoch": 15.98, |
|
"learning_rate": 2.1505449591280656e-05, |
|
"loss": 0.0574, |
|
"step": 12530 |
|
}, |
|
{ |
|
"epoch": 15.99, |
|
"learning_rate": 2.143732970027248e-05, |
|
"loss": 0.0468, |
|
"step": 12540 |
|
}, |
|
{ |
|
"epoch": 16.01, |
|
"learning_rate": 2.1369209809264307e-05, |
|
"loss": 0.0902, |
|
"step": 12550 |
|
}, |
|
{ |
|
"epoch": 16.02, |
|
"learning_rate": 2.130108991825613e-05, |
|
"loss": 0.0667, |
|
"step": 12560 |
|
}, |
|
{ |
|
"epoch": 16.03, |
|
"learning_rate": 2.123297002724796e-05, |
|
"loss": 0.0488, |
|
"step": 12570 |
|
}, |
|
{ |
|
"epoch": 16.05, |
|
"learning_rate": 2.1164850136239782e-05, |
|
"loss": 0.0498, |
|
"step": 12580 |
|
}, |
|
{ |
|
"epoch": 16.06, |
|
"learning_rate": 2.1096730245231606e-05, |
|
"loss": 0.0525, |
|
"step": 12590 |
|
}, |
|
{ |
|
"epoch": 16.07, |
|
"learning_rate": 2.1028610354223437e-05, |
|
"loss": 0.0809, |
|
"step": 12600 |
|
}, |
|
{ |
|
"epoch": 16.08, |
|
"learning_rate": 2.096049046321526e-05, |
|
"loss": 0.0687, |
|
"step": 12610 |
|
}, |
|
{ |
|
"epoch": 16.1, |
|
"learning_rate": 2.0892370572207085e-05, |
|
"loss": 0.0535, |
|
"step": 12620 |
|
}, |
|
{ |
|
"epoch": 16.11, |
|
"learning_rate": 2.0824250681198912e-05, |
|
"loss": 0.0404, |
|
"step": 12630 |
|
}, |
|
{ |
|
"epoch": 16.12, |
|
"learning_rate": 2.0756130790190736e-05, |
|
"loss": 0.0473, |
|
"step": 12640 |
|
}, |
|
{ |
|
"epoch": 16.14, |
|
"learning_rate": 2.068801089918256e-05, |
|
"loss": 0.0861, |
|
"step": 12650 |
|
}, |
|
{ |
|
"epoch": 16.15, |
|
"learning_rate": 2.0619891008174387e-05, |
|
"loss": 0.0626, |
|
"step": 12660 |
|
}, |
|
{ |
|
"epoch": 16.16, |
|
"learning_rate": 2.0551771117166215e-05, |
|
"loss": 0.0545, |
|
"step": 12670 |
|
}, |
|
{ |
|
"epoch": 16.17, |
|
"learning_rate": 2.0483651226158042e-05, |
|
"loss": 0.0408, |
|
"step": 12680 |
|
}, |
|
{ |
|
"epoch": 16.19, |
|
"learning_rate": 2.0415531335149866e-05, |
|
"loss": 0.0507, |
|
"step": 12690 |
|
}, |
|
{ |
|
"epoch": 16.2, |
|
"learning_rate": 2.034741144414169e-05, |
|
"loss": 0.0905, |
|
"step": 12700 |
|
}, |
|
{ |
|
"epoch": 16.21, |
|
"learning_rate": 2.0279291553133517e-05, |
|
"loss": 0.0666, |
|
"step": 12710 |
|
}, |
|
{ |
|
"epoch": 16.22, |
|
"learning_rate": 2.021117166212534e-05, |
|
"loss": 0.0541, |
|
"step": 12720 |
|
}, |
|
{ |
|
"epoch": 16.24, |
|
"learning_rate": 2.0143051771117165e-05, |
|
"loss": 0.0542, |
|
"step": 12730 |
|
}, |
|
{ |
|
"epoch": 16.25, |
|
"learning_rate": 2.0074931880108992e-05, |
|
"loss": 0.053, |
|
"step": 12740 |
|
}, |
|
{ |
|
"epoch": 16.26, |
|
"learning_rate": 2.000681198910082e-05, |
|
"loss": 0.0833, |
|
"step": 12750 |
|
}, |
|
{ |
|
"epoch": 16.28, |
|
"learning_rate": 1.9938692098092644e-05, |
|
"loss": 0.0669, |
|
"step": 12760 |
|
}, |
|
{ |
|
"epoch": 16.29, |
|
"learning_rate": 1.987057220708447e-05, |
|
"loss": 0.0485, |
|
"step": 12770 |
|
}, |
|
{ |
|
"epoch": 16.3, |
|
"learning_rate": 1.9802452316076295e-05, |
|
"loss": 0.0424, |
|
"step": 12780 |
|
}, |
|
{ |
|
"epoch": 16.31, |
|
"learning_rate": 1.9734332425068122e-05, |
|
"loss": 0.0491, |
|
"step": 12790 |
|
}, |
|
{ |
|
"epoch": 16.33, |
|
"learning_rate": 1.9666212534059946e-05, |
|
"loss": 0.0858, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 16.33, |
|
"eval_loss": 0.29408928751945496, |
|
"eval_runtime": 1425.5229, |
|
"eval_samples_per_second": 4.372, |
|
"eval_steps_per_second": 0.547, |
|
"eval_wer": 0.3163853673438216, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 16.34, |
|
"learning_rate": 1.959809264305177e-05, |
|
"loss": 0.0637, |
|
"step": 12810 |
|
}, |
|
{ |
|
"epoch": 16.35, |
|
"learning_rate": 1.9529972752043597e-05, |
|
"loss": 0.0578, |
|
"step": 12820 |
|
}, |
|
{ |
|
"epoch": 16.36, |
|
"learning_rate": 1.9461852861035425e-05, |
|
"loss": 0.0447, |
|
"step": 12830 |
|
}, |
|
{ |
|
"epoch": 16.38, |
|
"learning_rate": 1.939373297002725e-05, |
|
"loss": 0.0543, |
|
"step": 12840 |
|
}, |
|
{ |
|
"epoch": 16.39, |
|
"learning_rate": 1.9325613079019076e-05, |
|
"loss": 0.0896, |
|
"step": 12850 |
|
}, |
|
{ |
|
"epoch": 16.4, |
|
"learning_rate": 1.92574931880109e-05, |
|
"loss": 0.0639, |
|
"step": 12860 |
|
}, |
|
{ |
|
"epoch": 16.42, |
|
"learning_rate": 1.9189373297002724e-05, |
|
"loss": 0.0601, |
|
"step": 12870 |
|
}, |
|
{ |
|
"epoch": 16.43, |
|
"learning_rate": 1.912125340599455e-05, |
|
"loss": 0.044, |
|
"step": 12880 |
|
}, |
|
{ |
|
"epoch": 16.44, |
|
"learning_rate": 1.9053133514986375e-05, |
|
"loss": 0.0538, |
|
"step": 12890 |
|
}, |
|
{ |
|
"epoch": 16.45, |
|
"learning_rate": 1.8985013623978202e-05, |
|
"loss": 0.0752, |
|
"step": 12900 |
|
}, |
|
{ |
|
"epoch": 16.47, |
|
"learning_rate": 1.891689373297003e-05, |
|
"loss": 0.0674, |
|
"step": 12910 |
|
}, |
|
{ |
|
"epoch": 16.48, |
|
"learning_rate": 1.8848773841961853e-05, |
|
"loss": 0.0531, |
|
"step": 12920 |
|
}, |
|
{ |
|
"epoch": 16.49, |
|
"learning_rate": 1.878065395095368e-05, |
|
"loss": 0.0451, |
|
"step": 12930 |
|
}, |
|
{ |
|
"epoch": 16.5, |
|
"learning_rate": 1.8712534059945505e-05, |
|
"loss": 0.045, |
|
"step": 12940 |
|
}, |
|
{ |
|
"epoch": 16.52, |
|
"learning_rate": 1.864441416893733e-05, |
|
"loss": 0.092, |
|
"step": 12950 |
|
}, |
|
{ |
|
"epoch": 16.53, |
|
"learning_rate": 1.8576294277929156e-05, |
|
"loss": 0.0682, |
|
"step": 12960 |
|
}, |
|
{ |
|
"epoch": 16.54, |
|
"learning_rate": 1.8508174386920983e-05, |
|
"loss": 0.0575, |
|
"step": 12970 |
|
}, |
|
{ |
|
"epoch": 16.56, |
|
"learning_rate": 1.8440054495912807e-05, |
|
"loss": 0.0483, |
|
"step": 12980 |
|
}, |
|
{ |
|
"epoch": 16.57, |
|
"learning_rate": 1.8371934604904635e-05, |
|
"loss": 0.0499, |
|
"step": 12990 |
|
}, |
|
{ |
|
"epoch": 16.58, |
|
"learning_rate": 1.830381471389646e-05, |
|
"loss": 0.0786, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 16.59, |
|
"learning_rate": 1.8235694822888282e-05, |
|
"loss": 0.0647, |
|
"step": 13010 |
|
}, |
|
{ |
|
"epoch": 16.61, |
|
"learning_rate": 1.816757493188011e-05, |
|
"loss": 0.0483, |
|
"step": 13020 |
|
}, |
|
{ |
|
"epoch": 16.62, |
|
"learning_rate": 1.8099455040871934e-05, |
|
"loss": 0.0445, |
|
"step": 13030 |
|
}, |
|
{ |
|
"epoch": 16.63, |
|
"learning_rate": 1.803133514986376e-05, |
|
"loss": 0.0482, |
|
"step": 13040 |
|
}, |
|
{ |
|
"epoch": 16.64, |
|
"learning_rate": 1.7963215258855588e-05, |
|
"loss": 0.0789, |
|
"step": 13050 |
|
}, |
|
{ |
|
"epoch": 16.66, |
|
"learning_rate": 1.7895095367847412e-05, |
|
"loss": 0.0685, |
|
"step": 13060 |
|
}, |
|
{ |
|
"epoch": 16.67, |
|
"learning_rate": 1.782697547683924e-05, |
|
"loss": 0.053, |
|
"step": 13070 |
|
}, |
|
{ |
|
"epoch": 16.68, |
|
"learning_rate": 1.7758855585831063e-05, |
|
"loss": 0.0422, |
|
"step": 13080 |
|
}, |
|
{ |
|
"epoch": 16.7, |
|
"learning_rate": 1.7690735694822887e-05, |
|
"loss": 0.0411, |
|
"step": 13090 |
|
}, |
|
{ |
|
"epoch": 16.71, |
|
"learning_rate": 1.7622615803814715e-05, |
|
"loss": 0.0724, |
|
"step": 13100 |
|
}, |
|
{ |
|
"epoch": 16.72, |
|
"learning_rate": 1.755449591280654e-05, |
|
"loss": 0.0682, |
|
"step": 13110 |
|
}, |
|
{ |
|
"epoch": 16.73, |
|
"learning_rate": 1.7486376021798366e-05, |
|
"loss": 0.0536, |
|
"step": 13120 |
|
}, |
|
{ |
|
"epoch": 16.75, |
|
"learning_rate": 1.7418256130790193e-05, |
|
"loss": 0.0414, |
|
"step": 13130 |
|
}, |
|
{ |
|
"epoch": 16.76, |
|
"learning_rate": 1.7350136239782017e-05, |
|
"loss": 0.0471, |
|
"step": 13140 |
|
}, |
|
{ |
|
"epoch": 16.77, |
|
"learning_rate": 1.7282016348773845e-05, |
|
"loss": 0.0891, |
|
"step": 13150 |
|
}, |
|
{ |
|
"epoch": 16.79, |
|
"learning_rate": 1.721389645776567e-05, |
|
"loss": 0.0565, |
|
"step": 13160 |
|
}, |
|
{ |
|
"epoch": 16.8, |
|
"learning_rate": 1.7145776566757492e-05, |
|
"loss": 0.0498, |
|
"step": 13170 |
|
}, |
|
{ |
|
"epoch": 16.81, |
|
"learning_rate": 1.707765667574932e-05, |
|
"loss": 0.0436, |
|
"step": 13180 |
|
}, |
|
{ |
|
"epoch": 16.82, |
|
"learning_rate": 1.7009536784741144e-05, |
|
"loss": 0.0496, |
|
"step": 13190 |
|
}, |
|
{ |
|
"epoch": 16.84, |
|
"learning_rate": 1.694141689373297e-05, |
|
"loss": 0.0859, |
|
"step": 13200 |
|
}, |
|
{ |
|
"epoch": 16.84, |
|
"eval_loss": 0.29795339703559875, |
|
"eval_runtime": 1359.0826, |
|
"eval_samples_per_second": 4.586, |
|
"eval_steps_per_second": 0.574, |
|
"eval_wer": 0.31654765541469376, |
|
"step": 13200 |
|
}, |
|
{ |
|
"epoch": 16.85, |
|
"learning_rate": 1.6873297002724798e-05, |
|
"loss": 0.0628, |
|
"step": 13210 |
|
}, |
|
{ |
|
"epoch": 16.86, |
|
"learning_rate": 1.6805177111716622e-05, |
|
"loss": 0.0508, |
|
"step": 13220 |
|
}, |
|
{ |
|
"epoch": 16.87, |
|
"learning_rate": 1.6737057220708446e-05, |
|
"loss": 0.0443, |
|
"step": 13230 |
|
}, |
|
{ |
|
"epoch": 16.89, |
|
"learning_rate": 1.6668937329700273e-05, |
|
"loss": 0.0518, |
|
"step": 13240 |
|
}, |
|
{ |
|
"epoch": 16.9, |
|
"learning_rate": 1.6600817438692097e-05, |
|
"loss": 0.1028, |
|
"step": 13250 |
|
}, |
|
{ |
|
"epoch": 16.91, |
|
"learning_rate": 1.6532697547683925e-05, |
|
"loss": 0.0613, |
|
"step": 13260 |
|
}, |
|
{ |
|
"epoch": 16.93, |
|
"learning_rate": 1.6464577656675752e-05, |
|
"loss": 0.0516, |
|
"step": 13270 |
|
}, |
|
{ |
|
"epoch": 16.94, |
|
"learning_rate": 1.6396457765667576e-05, |
|
"loss": 0.0388, |
|
"step": 13280 |
|
}, |
|
{ |
|
"epoch": 16.95, |
|
"learning_rate": 1.6328337874659403e-05, |
|
"loss": 0.0495, |
|
"step": 13290 |
|
}, |
|
{ |
|
"epoch": 16.96, |
|
"learning_rate": 1.6260217983651227e-05, |
|
"loss": 0.092, |
|
"step": 13300 |
|
}, |
|
{ |
|
"epoch": 16.98, |
|
"learning_rate": 1.619209809264305e-05, |
|
"loss": 0.0585, |
|
"step": 13310 |
|
}, |
|
{ |
|
"epoch": 16.99, |
|
"learning_rate": 1.612397820163488e-05, |
|
"loss": 0.0389, |
|
"step": 13320 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"learning_rate": 1.6055858310626702e-05, |
|
"loss": 0.0595, |
|
"step": 13330 |
|
}, |
|
{ |
|
"epoch": 17.02, |
|
"learning_rate": 1.598773841961853e-05, |
|
"loss": 0.0681, |
|
"step": 13340 |
|
}, |
|
{ |
|
"epoch": 17.03, |
|
"learning_rate": 1.5919618528610357e-05, |
|
"loss": 0.0566, |
|
"step": 13350 |
|
}, |
|
{ |
|
"epoch": 17.04, |
|
"learning_rate": 1.585149863760218e-05, |
|
"loss": 0.0452, |
|
"step": 13360 |
|
}, |
|
{ |
|
"epoch": 17.05, |
|
"learning_rate": 1.5783378746594008e-05, |
|
"loss": 0.0345, |
|
"step": 13370 |
|
}, |
|
{ |
|
"epoch": 17.07, |
|
"learning_rate": 1.5715258855585832e-05, |
|
"loss": 0.0683, |
|
"step": 13380 |
|
}, |
|
{ |
|
"epoch": 17.08, |
|
"learning_rate": 1.5647138964577656e-05, |
|
"loss": 0.0654, |
|
"step": 13390 |
|
}, |
|
{ |
|
"epoch": 17.09, |
|
"learning_rate": 1.5579019073569483e-05, |
|
"loss": 0.0554, |
|
"step": 13400 |
|
}, |
|
{ |
|
"epoch": 17.1, |
|
"learning_rate": 1.5510899182561307e-05, |
|
"loss": 0.0459, |
|
"step": 13410 |
|
}, |
|
{ |
|
"epoch": 17.12, |
|
"learning_rate": 1.5442779291553135e-05, |
|
"loss": 0.0431, |
|
"step": 13420 |
|
}, |
|
{ |
|
"epoch": 17.13, |
|
"learning_rate": 1.5374659400544962e-05, |
|
"loss": 0.0706, |
|
"step": 13430 |
|
}, |
|
{ |
|
"epoch": 17.14, |
|
"learning_rate": 1.5306539509536786e-05, |
|
"loss": 0.0669, |
|
"step": 13440 |
|
}, |
|
{ |
|
"epoch": 17.16, |
|
"learning_rate": 1.523841961852861e-05, |
|
"loss": 0.0511, |
|
"step": 13450 |
|
}, |
|
{ |
|
"epoch": 17.17, |
|
"learning_rate": 1.5170299727520437e-05, |
|
"loss": 0.0458, |
|
"step": 13460 |
|
}, |
|
{ |
|
"epoch": 17.18, |
|
"learning_rate": 1.5102179836512261e-05, |
|
"loss": 0.0412, |
|
"step": 13470 |
|
}, |
|
{ |
|
"epoch": 17.19, |
|
"learning_rate": 1.5034059945504088e-05, |
|
"loss": 0.0718, |
|
"step": 13480 |
|
}, |
|
{ |
|
"epoch": 17.21, |
|
"learning_rate": 1.4965940054495914e-05, |
|
"loss": 0.0681, |
|
"step": 13490 |
|
}, |
|
{ |
|
"epoch": 17.22, |
|
"learning_rate": 1.4897820163487738e-05, |
|
"loss": 0.0563, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 17.23, |
|
"learning_rate": 1.4829700272479565e-05, |
|
"loss": 0.0419, |
|
"step": 13510 |
|
}, |
|
{ |
|
"epoch": 17.24, |
|
"learning_rate": 1.4761580381471391e-05, |
|
"loss": 0.037, |
|
"step": 13520 |
|
}, |
|
{ |
|
"epoch": 17.26, |
|
"learning_rate": 1.4693460490463215e-05, |
|
"loss": 0.0771, |
|
"step": 13530 |
|
}, |
|
{ |
|
"epoch": 17.27, |
|
"learning_rate": 1.4625340599455042e-05, |
|
"loss": 0.066, |
|
"step": 13540 |
|
}, |
|
{ |
|
"epoch": 17.28, |
|
"learning_rate": 1.4557220708446868e-05, |
|
"loss": 0.054, |
|
"step": 13550 |
|
}, |
|
{ |
|
"epoch": 17.3, |
|
"learning_rate": 1.4489100817438692e-05, |
|
"loss": 0.0411, |
|
"step": 13560 |
|
}, |
|
{ |
|
"epoch": 17.31, |
|
"learning_rate": 1.4420980926430519e-05, |
|
"loss": 0.0395, |
|
"step": 13570 |
|
}, |
|
{ |
|
"epoch": 17.32, |
|
"learning_rate": 1.4352861035422343e-05, |
|
"loss": 0.0583, |
|
"step": 13580 |
|
}, |
|
{ |
|
"epoch": 17.33, |
|
"learning_rate": 1.428474114441417e-05, |
|
"loss": 0.0715, |
|
"step": 13590 |
|
}, |
|
{ |
|
"epoch": 17.35, |
|
"learning_rate": 1.4216621253405996e-05, |
|
"loss": 0.0561, |
|
"step": 13600 |
|
}, |
|
{ |
|
"epoch": 17.35, |
|
"eval_loss": 0.2964591383934021, |
|
"eval_runtime": 1356.6797, |
|
"eval_samples_per_second": 4.594, |
|
"eval_steps_per_second": 0.575, |
|
"eval_wer": 0.316490377272033, |
|
"step": 13600 |
|
}, |
|
{ |
|
"epoch": 17.36, |
|
"learning_rate": 1.414850136239782e-05, |
|
"loss": 0.0422, |
|
"step": 13610 |
|
}, |
|
{ |
|
"epoch": 17.37, |
|
"learning_rate": 1.4080381471389647e-05, |
|
"loss": 0.0408, |
|
"step": 13620 |
|
}, |
|
{ |
|
"epoch": 17.38, |
|
"learning_rate": 1.4012261580381473e-05, |
|
"loss": 0.0636, |
|
"step": 13630 |
|
}, |
|
{ |
|
"epoch": 17.4, |
|
"learning_rate": 1.3944141689373297e-05, |
|
"loss": 0.071, |
|
"step": 13640 |
|
}, |
|
{ |
|
"epoch": 17.41, |
|
"learning_rate": 1.3876021798365124e-05, |
|
"loss": 0.0537, |
|
"step": 13650 |
|
}, |
|
{ |
|
"epoch": 17.42, |
|
"learning_rate": 1.3807901907356948e-05, |
|
"loss": 0.0467, |
|
"step": 13660 |
|
}, |
|
{ |
|
"epoch": 17.44, |
|
"learning_rate": 1.3739782016348774e-05, |
|
"loss": 0.0389, |
|
"step": 13670 |
|
}, |
|
{ |
|
"epoch": 17.45, |
|
"learning_rate": 1.3671662125340601e-05, |
|
"loss": 0.0678, |
|
"step": 13680 |
|
}, |
|
{ |
|
"epoch": 17.46, |
|
"learning_rate": 1.3603542234332425e-05, |
|
"loss": 0.0687, |
|
"step": 13690 |
|
}, |
|
{ |
|
"epoch": 17.47, |
|
"learning_rate": 1.353542234332425e-05, |
|
"loss": 0.0534, |
|
"step": 13700 |
|
}, |
|
{ |
|
"epoch": 17.49, |
|
"learning_rate": 1.3467302452316078e-05, |
|
"loss": 0.0449, |
|
"step": 13710 |
|
}, |
|
{ |
|
"epoch": 17.5, |
|
"learning_rate": 1.3399182561307902e-05, |
|
"loss": 0.0421, |
|
"step": 13720 |
|
}, |
|
{ |
|
"epoch": 17.51, |
|
"learning_rate": 1.3331062670299729e-05, |
|
"loss": 0.0783, |
|
"step": 13730 |
|
}, |
|
{ |
|
"epoch": 17.53, |
|
"learning_rate": 1.3262942779291555e-05, |
|
"loss": 0.0682, |
|
"step": 13740 |
|
}, |
|
{ |
|
"epoch": 17.54, |
|
"learning_rate": 1.3194822888283379e-05, |
|
"loss": 0.0581, |
|
"step": 13750 |
|
}, |
|
{ |
|
"epoch": 17.55, |
|
"learning_rate": 1.3126702997275206e-05, |
|
"loss": 0.0446, |
|
"step": 13760 |
|
}, |
|
{ |
|
"epoch": 17.56, |
|
"learning_rate": 1.305858310626703e-05, |
|
"loss": 0.0407, |
|
"step": 13770 |
|
}, |
|
{ |
|
"epoch": 17.58, |
|
"learning_rate": 1.2990463215258855e-05, |
|
"loss": 0.0706, |
|
"step": 13780 |
|
}, |
|
{ |
|
"epoch": 17.59, |
|
"learning_rate": 1.2922343324250683e-05, |
|
"loss": 0.069, |
|
"step": 13790 |
|
}, |
|
{ |
|
"epoch": 17.6, |
|
"learning_rate": 1.2854223433242507e-05, |
|
"loss": 0.0551, |
|
"step": 13800 |
|
}, |
|
{ |
|
"epoch": 17.61, |
|
"learning_rate": 1.2786103542234332e-05, |
|
"loss": 0.0436, |
|
"step": 13810 |
|
}, |
|
{ |
|
"epoch": 17.63, |
|
"learning_rate": 1.271798365122616e-05, |
|
"loss": 0.0416, |
|
"step": 13820 |
|
}, |
|
{ |
|
"epoch": 17.64, |
|
"learning_rate": 1.2649863760217984e-05, |
|
"loss": 0.0661, |
|
"step": 13830 |
|
}, |
|
{ |
|
"epoch": 17.65, |
|
"learning_rate": 1.2581743869209811e-05, |
|
"loss": 0.0648, |
|
"step": 13840 |
|
}, |
|
{ |
|
"epoch": 17.67, |
|
"learning_rate": 1.2513623978201636e-05, |
|
"loss": 0.055, |
|
"step": 13850 |
|
}, |
|
{ |
|
"epoch": 17.68, |
|
"learning_rate": 1.2445504087193462e-05, |
|
"loss": 0.0475, |
|
"step": 13860 |
|
}, |
|
{ |
|
"epoch": 17.69, |
|
"learning_rate": 1.2377384196185286e-05, |
|
"loss": 0.0417, |
|
"step": 13870 |
|
}, |
|
{ |
|
"epoch": 17.7, |
|
"learning_rate": 1.2309264305177112e-05, |
|
"loss": 0.0553, |
|
"step": 13880 |
|
}, |
|
{ |
|
"epoch": 17.72, |
|
"learning_rate": 1.2241144414168939e-05, |
|
"loss": 0.0669, |
|
"step": 13890 |
|
}, |
|
{ |
|
"epoch": 17.73, |
|
"learning_rate": 1.2173024523160763e-05, |
|
"loss": 0.0533, |
|
"step": 13900 |
|
}, |
|
{ |
|
"epoch": 17.74, |
|
"learning_rate": 1.2104904632152589e-05, |
|
"loss": 0.0457, |
|
"step": 13910 |
|
}, |
|
{ |
|
"epoch": 17.75, |
|
"learning_rate": 1.2036784741144414e-05, |
|
"loss": 0.0401, |
|
"step": 13920 |
|
}, |
|
{ |
|
"epoch": 17.77, |
|
"learning_rate": 1.1968664850136241e-05, |
|
"loss": 0.0702, |
|
"step": 13930 |
|
}, |
|
{ |
|
"epoch": 17.78, |
|
"learning_rate": 1.1900544959128065e-05, |
|
"loss": 0.0667, |
|
"step": 13940 |
|
}, |
|
{ |
|
"epoch": 17.79, |
|
"learning_rate": 1.1832425068119891e-05, |
|
"loss": 0.0543, |
|
"step": 13950 |
|
}, |
|
{ |
|
"epoch": 17.81, |
|
"learning_rate": 1.1764305177111717e-05, |
|
"loss": 0.0413, |
|
"step": 13960 |
|
}, |
|
{ |
|
"epoch": 17.82, |
|
"learning_rate": 1.1696185286103544e-05, |
|
"loss": 0.0411, |
|
"step": 13970 |
|
}, |
|
{ |
|
"epoch": 17.83, |
|
"learning_rate": 1.1628065395095368e-05, |
|
"loss": 0.0578, |
|
"step": 13980 |
|
}, |
|
{ |
|
"epoch": 17.84, |
|
"learning_rate": 1.1559945504087194e-05, |
|
"loss": 0.0687, |
|
"step": 13990 |
|
}, |
|
{ |
|
"epoch": 17.86, |
|
"learning_rate": 1.149182561307902e-05, |
|
"loss": 0.0506, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 17.86, |
|
"eval_loss": 0.29349198937416077, |
|
"eval_runtime": 1425.365, |
|
"eval_samples_per_second": 4.373, |
|
"eval_steps_per_second": 0.547, |
|
"eval_wer": 0.3147720329922102, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 17.87, |
|
"learning_rate": 1.1423705722070845e-05, |
|
"loss": 0.046, |
|
"step": 14010 |
|
}, |
|
{ |
|
"epoch": 17.88, |
|
"learning_rate": 1.135558583106267e-05, |
|
"loss": 0.0379, |
|
"step": 14020 |
|
}, |
|
{ |
|
"epoch": 17.89, |
|
"learning_rate": 1.1287465940054496e-05, |
|
"loss": 0.063, |
|
"step": 14030 |
|
}, |
|
{ |
|
"epoch": 17.91, |
|
"learning_rate": 1.1219346049046323e-05, |
|
"loss": 0.064, |
|
"step": 14040 |
|
}, |
|
{ |
|
"epoch": 17.92, |
|
"learning_rate": 1.1151226158038147e-05, |
|
"loss": 0.0539, |
|
"step": 14050 |
|
}, |
|
{ |
|
"epoch": 17.93, |
|
"learning_rate": 1.1083106267029973e-05, |
|
"loss": 0.0415, |
|
"step": 14060 |
|
}, |
|
{ |
|
"epoch": 17.95, |
|
"learning_rate": 1.1014986376021799e-05, |
|
"loss": 0.0387, |
|
"step": 14070 |
|
}, |
|
{ |
|
"epoch": 17.96, |
|
"learning_rate": 1.0946866485013626e-05, |
|
"loss": 0.0647, |
|
"step": 14080 |
|
}, |
|
{ |
|
"epoch": 17.97, |
|
"learning_rate": 1.0885558583106268e-05, |
|
"loss": 0.0569, |
|
"step": 14090 |
|
}, |
|
{ |
|
"epoch": 17.98, |
|
"learning_rate": 1.0817438692098093e-05, |
|
"loss": 0.0442, |
|
"step": 14100 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"learning_rate": 1.0749318801089919e-05, |
|
"loss": 0.0373, |
|
"step": 14110 |
|
}, |
|
{ |
|
"epoch": 18.01, |
|
"learning_rate": 1.0681198910081745e-05, |
|
"loss": 0.076, |
|
"step": 14120 |
|
}, |
|
{ |
|
"epoch": 18.02, |
|
"learning_rate": 1.061307901907357e-05, |
|
"loss": 0.0564, |
|
"step": 14130 |
|
}, |
|
{ |
|
"epoch": 18.04, |
|
"learning_rate": 1.0544959128065396e-05, |
|
"loss": 0.0406, |
|
"step": 14140 |
|
}, |
|
{ |
|
"epoch": 18.05, |
|
"learning_rate": 1.0476839237057221e-05, |
|
"loss": 0.034, |
|
"step": 14150 |
|
}, |
|
{ |
|
"epoch": 18.06, |
|
"learning_rate": 1.0408719346049047e-05, |
|
"loss": 0.0487, |
|
"step": 14160 |
|
}, |
|
{ |
|
"epoch": 18.07, |
|
"learning_rate": 1.0340599455040873e-05, |
|
"loss": 0.0756, |
|
"step": 14170 |
|
}, |
|
{ |
|
"epoch": 18.09, |
|
"learning_rate": 1.0272479564032698e-05, |
|
"loss": 0.0532, |
|
"step": 14180 |
|
}, |
|
{ |
|
"epoch": 18.1, |
|
"learning_rate": 1.0204359673024522e-05, |
|
"loss": 0.0439, |
|
"step": 14190 |
|
}, |
|
{ |
|
"epoch": 18.11, |
|
"learning_rate": 1.013623978201635e-05, |
|
"loss": 0.0335, |
|
"step": 14200 |
|
}, |
|
{ |
|
"epoch": 18.12, |
|
"learning_rate": 1.0068119891008175e-05, |
|
"loss": 0.0385, |
|
"step": 14210 |
|
}, |
|
{ |
|
"epoch": 18.14, |
|
"learning_rate": 1e-05, |
|
"loss": 0.073, |
|
"step": 14220 |
|
}, |
|
{ |
|
"epoch": 18.15, |
|
"learning_rate": 9.931880108991826e-06, |
|
"loss": 0.0474, |
|
"step": 14230 |
|
}, |
|
{ |
|
"epoch": 18.16, |
|
"learning_rate": 9.863760217983652e-06, |
|
"loss": 0.0493, |
|
"step": 14240 |
|
}, |
|
{ |
|
"epoch": 18.18, |
|
"learning_rate": 9.795640326975478e-06, |
|
"loss": 0.0343, |
|
"step": 14250 |
|
}, |
|
{ |
|
"epoch": 18.19, |
|
"learning_rate": 9.727520435967302e-06, |
|
"loss": 0.0383, |
|
"step": 14260 |
|
}, |
|
{ |
|
"epoch": 18.2, |
|
"learning_rate": 9.659400544959129e-06, |
|
"loss": 0.0704, |
|
"step": 14270 |
|
}, |
|
{ |
|
"epoch": 18.21, |
|
"learning_rate": 9.591280653950955e-06, |
|
"loss": 0.0555, |
|
"step": 14280 |
|
}, |
|
{ |
|
"epoch": 18.23, |
|
"learning_rate": 9.52316076294278e-06, |
|
"loss": 0.0432, |
|
"step": 14290 |
|
}, |
|
{ |
|
"epoch": 18.24, |
|
"learning_rate": 9.455040871934604e-06, |
|
"loss": 0.034, |
|
"step": 14300 |
|
}, |
|
{ |
|
"epoch": 18.25, |
|
"learning_rate": 9.386920980926431e-06, |
|
"loss": 0.0527, |
|
"step": 14310 |
|
}, |
|
{ |
|
"epoch": 18.27, |
|
"learning_rate": 9.318801089918257e-06, |
|
"loss": 0.0846, |
|
"step": 14320 |
|
}, |
|
{ |
|
"epoch": 18.28, |
|
"learning_rate": 9.250681198910083e-06, |
|
"loss": 0.0562, |
|
"step": 14330 |
|
}, |
|
{ |
|
"epoch": 18.29, |
|
"learning_rate": 9.182561307901907e-06, |
|
"loss": 0.0465, |
|
"step": 14340 |
|
}, |
|
{ |
|
"epoch": 18.3, |
|
"learning_rate": 9.114441416893734e-06, |
|
"loss": 0.0361, |
|
"step": 14350 |
|
}, |
|
{ |
|
"epoch": 18.32, |
|
"learning_rate": 9.04632152588556e-06, |
|
"loss": 0.038, |
|
"step": 14360 |
|
}, |
|
{ |
|
"epoch": 18.33, |
|
"learning_rate": 8.978201634877383e-06, |
|
"loss": 0.0687, |
|
"step": 14370 |
|
}, |
|
{ |
|
"epoch": 18.34, |
|
"learning_rate": 8.91008174386921e-06, |
|
"loss": 0.0545, |
|
"step": 14380 |
|
}, |
|
{ |
|
"epoch": 18.35, |
|
"learning_rate": 8.841961852861036e-06, |
|
"loss": 0.0424, |
|
"step": 14390 |
|
}, |
|
{ |
|
"epoch": 18.37, |
|
"learning_rate": 8.773841961852862e-06, |
|
"loss": 0.0312, |
|
"step": 14400 |
|
}, |
|
{ |
|
"epoch": 18.37, |
|
"eval_loss": 0.2964278757572174, |
|
"eval_runtime": 1359.3539, |
|
"eval_samples_per_second": 4.585, |
|
"eval_steps_per_second": 0.574, |
|
"eval_wer": 0.3153925462043684, |
|
"step": 14400 |
|
}, |
|
{ |
|
"epoch": 18.38, |
|
"learning_rate": 8.705722070844686e-06, |
|
"loss": 0.036, |
|
"step": 14410 |
|
}, |
|
{ |
|
"epoch": 18.39, |
|
"learning_rate": 8.637602179836513e-06, |
|
"loss": 0.0695, |
|
"step": 14420 |
|
}, |
|
{ |
|
"epoch": 18.41, |
|
"learning_rate": 8.569482288828339e-06, |
|
"loss": 0.0559, |
|
"step": 14430 |
|
}, |
|
{ |
|
"epoch": 18.42, |
|
"learning_rate": 8.501362397820165e-06, |
|
"loss": 0.0442, |
|
"step": 14440 |
|
}, |
|
{ |
|
"epoch": 18.43, |
|
"learning_rate": 8.433242506811988e-06, |
|
"loss": 0.0388, |
|
"step": 14450 |
|
}, |
|
{ |
|
"epoch": 18.44, |
|
"learning_rate": 8.365122615803816e-06, |
|
"loss": 0.039, |
|
"step": 14460 |
|
}, |
|
{ |
|
"epoch": 18.46, |
|
"learning_rate": 8.297002724795641e-06, |
|
"loss": 0.0635, |
|
"step": 14470 |
|
}, |
|
{ |
|
"epoch": 18.47, |
|
"learning_rate": 8.228882833787465e-06, |
|
"loss": 0.0522, |
|
"step": 14480 |
|
}, |
|
{ |
|
"epoch": 18.48, |
|
"learning_rate": 8.160762942779291e-06, |
|
"loss": 0.0475, |
|
"step": 14490 |
|
}, |
|
{ |
|
"epoch": 18.49, |
|
"learning_rate": 8.092643051771118e-06, |
|
"loss": 0.0347, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 18.51, |
|
"learning_rate": 8.024523160762944e-06, |
|
"loss": 0.0385, |
|
"step": 14510 |
|
}, |
|
{ |
|
"epoch": 18.52, |
|
"learning_rate": 7.956403269754768e-06, |
|
"loss": 0.0648, |
|
"step": 14520 |
|
}, |
|
{ |
|
"epoch": 18.53, |
|
"learning_rate": 7.888283378746593e-06, |
|
"loss": 0.0499, |
|
"step": 14530 |
|
}, |
|
{ |
|
"epoch": 18.55, |
|
"learning_rate": 7.82016348773842e-06, |
|
"loss": 0.0446, |
|
"step": 14540 |
|
}, |
|
{ |
|
"epoch": 18.56, |
|
"learning_rate": 7.752043596730246e-06, |
|
"loss": 0.0395, |
|
"step": 14550 |
|
}, |
|
{ |
|
"epoch": 18.57, |
|
"learning_rate": 7.68392370572207e-06, |
|
"loss": 0.0346, |
|
"step": 14560 |
|
}, |
|
{ |
|
"epoch": 18.58, |
|
"learning_rate": 7.615803814713897e-06, |
|
"loss": 0.077, |
|
"step": 14570 |
|
}, |
|
{ |
|
"epoch": 18.6, |
|
"learning_rate": 7.547683923705723e-06, |
|
"loss": 0.0541, |
|
"step": 14580 |
|
}, |
|
{ |
|
"epoch": 18.61, |
|
"learning_rate": 7.479564032697547e-06, |
|
"loss": 0.0488, |
|
"step": 14590 |
|
}, |
|
{ |
|
"epoch": 18.62, |
|
"learning_rate": 7.411444141689374e-06, |
|
"loss": 0.0365, |
|
"step": 14600 |
|
}, |
|
{ |
|
"epoch": 18.63, |
|
"learning_rate": 7.343324250681199e-06, |
|
"loss": 0.049, |
|
"step": 14610 |
|
}, |
|
{ |
|
"epoch": 18.65, |
|
"learning_rate": 7.275204359673026e-06, |
|
"loss": 0.0664, |
|
"step": 14620 |
|
}, |
|
{ |
|
"epoch": 18.66, |
|
"learning_rate": 7.20708446866485e-06, |
|
"loss": 0.0492, |
|
"step": 14630 |
|
}, |
|
{ |
|
"epoch": 18.67, |
|
"learning_rate": 7.138964577656676e-06, |
|
"loss": 0.0475, |
|
"step": 14640 |
|
}, |
|
{ |
|
"epoch": 18.69, |
|
"learning_rate": 7.070844686648502e-06, |
|
"loss": 0.0396, |
|
"step": 14650 |
|
}, |
|
{ |
|
"epoch": 18.7, |
|
"learning_rate": 7.0027247956403266e-06, |
|
"loss": 0.0372, |
|
"step": 14660 |
|
}, |
|
{ |
|
"epoch": 18.71, |
|
"learning_rate": 6.934604904632153e-06, |
|
"loss": 0.0682, |
|
"step": 14670 |
|
}, |
|
{ |
|
"epoch": 18.72, |
|
"learning_rate": 6.866485013623979e-06, |
|
"loss": 0.0486, |
|
"step": 14680 |
|
}, |
|
{ |
|
"epoch": 18.74, |
|
"learning_rate": 6.798365122615804e-06, |
|
"loss": 0.0407, |
|
"step": 14690 |
|
}, |
|
{ |
|
"epoch": 18.75, |
|
"learning_rate": 6.730245231607629e-06, |
|
"loss": 0.0349, |
|
"step": 14700 |
|
}, |
|
{ |
|
"epoch": 18.76, |
|
"learning_rate": 6.6621253405994555e-06, |
|
"loss": 0.0409, |
|
"step": 14710 |
|
}, |
|
{ |
|
"epoch": 18.77, |
|
"learning_rate": 6.594005449591281e-06, |
|
"loss": 0.0711, |
|
"step": 14720 |
|
}, |
|
{ |
|
"epoch": 18.79, |
|
"learning_rate": 6.525885558583107e-06, |
|
"loss": 0.0534, |
|
"step": 14730 |
|
}, |
|
{ |
|
"epoch": 18.8, |
|
"learning_rate": 6.4577656675749316e-06, |
|
"loss": 0.0403, |
|
"step": 14740 |
|
}, |
|
{ |
|
"epoch": 18.81, |
|
"learning_rate": 6.389645776566758e-06, |
|
"loss": 0.0348, |
|
"step": 14750 |
|
}, |
|
{ |
|
"epoch": 18.83, |
|
"learning_rate": 6.321525885558584e-06, |
|
"loss": 0.0467, |
|
"step": 14760 |
|
}, |
|
{ |
|
"epoch": 18.84, |
|
"learning_rate": 6.2534059945504084e-06, |
|
"loss": 0.0744, |
|
"step": 14770 |
|
}, |
|
{ |
|
"epoch": 18.85, |
|
"learning_rate": 6.185286103542234e-06, |
|
"loss": 0.051, |
|
"step": 14780 |
|
}, |
|
{ |
|
"epoch": 18.86, |
|
"learning_rate": 6.1171662125340605e-06, |
|
"loss": 0.0448, |
|
"step": 14790 |
|
}, |
|
{ |
|
"epoch": 18.88, |
|
"learning_rate": 6.049046321525885e-06, |
|
"loss": 0.0403, |
|
"step": 14800 |
|
}, |
|
{ |
|
"epoch": 18.88, |
|
"eval_loss": 0.29674461483955383, |
|
"eval_runtime": 1357.9469, |
|
"eval_samples_per_second": 4.59, |
|
"eval_steps_per_second": 0.574, |
|
"eval_wer": 0.3159939667023064, |
|
"step": 14800 |
|
}, |
|
{ |
|
"epoch": 18.89, |
|
"learning_rate": 5.980926430517712e-06, |
|
"loss": 0.039, |
|
"step": 14810 |
|
}, |
|
{ |
|
"epoch": 18.9, |
|
"learning_rate": 5.9128065395095365e-06, |
|
"loss": 0.0735, |
|
"step": 14820 |
|
}, |
|
{ |
|
"epoch": 18.92, |
|
"learning_rate": 5.844686648501363e-06, |
|
"loss": 0.0514, |
|
"step": 14830 |
|
}, |
|
{ |
|
"epoch": 18.93, |
|
"learning_rate": 5.776566757493189e-06, |
|
"loss": 0.0386, |
|
"step": 14840 |
|
}, |
|
{ |
|
"epoch": 18.94, |
|
"learning_rate": 5.708446866485014e-06, |
|
"loss": 0.0378, |
|
"step": 14850 |
|
}, |
|
{ |
|
"epoch": 18.95, |
|
"learning_rate": 5.64032697547684e-06, |
|
"loss": 0.0385, |
|
"step": 14860 |
|
}, |
|
{ |
|
"epoch": 18.97, |
|
"learning_rate": 5.572207084468665e-06, |
|
"loss": 0.0654, |
|
"step": 14870 |
|
}, |
|
{ |
|
"epoch": 18.98, |
|
"learning_rate": 5.504087193460491e-06, |
|
"loss": 0.05, |
|
"step": 14880 |
|
}, |
|
{ |
|
"epoch": 18.99, |
|
"learning_rate": 5.435967302452316e-06, |
|
"loss": 0.0317, |
|
"step": 14890 |
|
}, |
|
{ |
|
"epoch": 19.01, |
|
"learning_rate": 5.367847411444142e-06, |
|
"loss": 0.0565, |
|
"step": 14900 |
|
}, |
|
{ |
|
"epoch": 19.02, |
|
"learning_rate": 5.299727520435967e-06, |
|
"loss": 0.053, |
|
"step": 14910 |
|
}, |
|
{ |
|
"epoch": 19.03, |
|
"learning_rate": 5.231607629427794e-06, |
|
"loss": 0.0399, |
|
"step": 14920 |
|
}, |
|
{ |
|
"epoch": 19.04, |
|
"learning_rate": 5.163487738419618e-06, |
|
"loss": 0.033, |
|
"step": 14930 |
|
}, |
|
{ |
|
"epoch": 19.06, |
|
"learning_rate": 5.095367847411445e-06, |
|
"loss": 0.0298, |
|
"step": 14940 |
|
}, |
|
{ |
|
"epoch": 19.07, |
|
"learning_rate": 5.02724795640327e-06, |
|
"loss": 0.0633, |
|
"step": 14950 |
|
}, |
|
{ |
|
"epoch": 19.08, |
|
"learning_rate": 4.959128065395095e-06, |
|
"loss": 0.0576, |
|
"step": 14960 |
|
}, |
|
{ |
|
"epoch": 19.09, |
|
"learning_rate": 4.891008174386921e-06, |
|
"loss": 0.0463, |
|
"step": 14970 |
|
}, |
|
{ |
|
"epoch": 19.11, |
|
"learning_rate": 4.8228882833787465e-06, |
|
"loss": 0.0389, |
|
"step": 14980 |
|
}, |
|
{ |
|
"epoch": 19.12, |
|
"learning_rate": 4.754768392370572e-06, |
|
"loss": 0.039, |
|
"step": 14990 |
|
}, |
|
{ |
|
"epoch": 19.13, |
|
"learning_rate": 4.686648501362398e-06, |
|
"loss": 0.0538, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 19.15, |
|
"learning_rate": 4.618528610354224e-06, |
|
"loss": 0.0542, |
|
"step": 15010 |
|
}, |
|
{ |
|
"epoch": 19.16, |
|
"learning_rate": 4.550408719346049e-06, |
|
"loss": 0.0464, |
|
"step": 15020 |
|
}, |
|
{ |
|
"epoch": 19.17, |
|
"learning_rate": 4.4822888283378755e-06, |
|
"loss": 0.032, |
|
"step": 15030 |
|
}, |
|
{ |
|
"epoch": 19.18, |
|
"learning_rate": 4.4141689373297e-06, |
|
"loss": 0.0372, |
|
"step": 15040 |
|
}, |
|
{ |
|
"epoch": 19.2, |
|
"learning_rate": 4.346049046321527e-06, |
|
"loss": 0.0591, |
|
"step": 15050 |
|
}, |
|
{ |
|
"epoch": 19.21, |
|
"learning_rate": 4.2779291553133515e-06, |
|
"loss": 0.0531, |
|
"step": 15060 |
|
}, |
|
{ |
|
"epoch": 19.22, |
|
"learning_rate": 4.209809264305177e-06, |
|
"loss": 0.045, |
|
"step": 15070 |
|
}, |
|
{ |
|
"epoch": 19.23, |
|
"learning_rate": 4.141689373297003e-06, |
|
"loss": 0.0371, |
|
"step": 15080 |
|
}, |
|
{ |
|
"epoch": 19.25, |
|
"learning_rate": 4.073569482288828e-06, |
|
"loss": 0.0329, |
|
"step": 15090 |
|
}, |
|
{ |
|
"epoch": 19.26, |
|
"learning_rate": 4.005449591280654e-06, |
|
"loss": 0.0565, |
|
"step": 15100 |
|
}, |
|
{ |
|
"epoch": 19.27, |
|
"learning_rate": 3.93732970027248e-06, |
|
"loss": 0.0526, |
|
"step": 15110 |
|
}, |
|
{ |
|
"epoch": 19.29, |
|
"learning_rate": 3.869209809264305e-06, |
|
"loss": 0.045, |
|
"step": 15120 |
|
}, |
|
{ |
|
"epoch": 19.3, |
|
"learning_rate": 3.801089918256131e-06, |
|
"loss": 0.0344, |
|
"step": 15130 |
|
}, |
|
{ |
|
"epoch": 19.31, |
|
"learning_rate": 3.732970027247957e-06, |
|
"loss": 0.0337, |
|
"step": 15140 |
|
}, |
|
{ |
|
"epoch": 19.32, |
|
"learning_rate": 3.664850136239782e-06, |
|
"loss": 0.0644, |
|
"step": 15150 |
|
}, |
|
{ |
|
"epoch": 19.34, |
|
"learning_rate": 3.5967302452316073e-06, |
|
"loss": 0.0561, |
|
"step": 15160 |
|
}, |
|
{ |
|
"epoch": 19.35, |
|
"learning_rate": 3.5286103542234334e-06, |
|
"loss": 0.0415, |
|
"step": 15170 |
|
}, |
|
{ |
|
"epoch": 19.36, |
|
"learning_rate": 3.4604904632152586e-06, |
|
"loss": 0.0395, |
|
"step": 15180 |
|
}, |
|
{ |
|
"epoch": 19.37, |
|
"learning_rate": 3.3923705722070846e-06, |
|
"loss": 0.0317, |
|
"step": 15190 |
|
}, |
|
{ |
|
"epoch": 19.39, |
|
"learning_rate": 3.3242506811989102e-06, |
|
"loss": 0.0924, |
|
"step": 15200 |
|
}, |
|
{ |
|
"epoch": 19.39, |
|
"eval_loss": 0.29548323154449463, |
|
"eval_runtime": 1361.5115, |
|
"eval_samples_per_second": 4.578, |
|
"eval_steps_per_second": 0.573, |
|
"eval_wer": 0.31472430120665956, |
|
"step": 15200 |
|
}, |
|
{ |
|
"epoch": 19.4, |
|
"learning_rate": 3.256130790190736e-06, |
|
"loss": 0.0548, |
|
"step": 15210 |
|
}, |
|
{ |
|
"epoch": 19.41, |
|
"learning_rate": 3.1880108991825615e-06, |
|
"loss": 0.0441, |
|
"step": 15220 |
|
}, |
|
{ |
|
"epoch": 19.43, |
|
"learning_rate": 3.119891008174387e-06, |
|
"loss": 0.037, |
|
"step": 15230 |
|
}, |
|
{ |
|
"epoch": 19.44, |
|
"learning_rate": 3.0517711171662127e-06, |
|
"loss": 0.0346, |
|
"step": 15240 |
|
}, |
|
{ |
|
"epoch": 19.45, |
|
"learning_rate": 2.9836512261580384e-06, |
|
"loss": 0.0635, |
|
"step": 15250 |
|
}, |
|
{ |
|
"epoch": 19.46, |
|
"learning_rate": 2.915531335149864e-06, |
|
"loss": 0.0529, |
|
"step": 15260 |
|
}, |
|
{ |
|
"epoch": 19.48, |
|
"learning_rate": 2.8474114441416896e-06, |
|
"loss": 0.0452, |
|
"step": 15270 |
|
}, |
|
{ |
|
"epoch": 19.49, |
|
"learning_rate": 2.7792915531335152e-06, |
|
"loss": 0.0334, |
|
"step": 15280 |
|
}, |
|
{ |
|
"epoch": 19.5, |
|
"learning_rate": 2.711171662125341e-06, |
|
"loss": 0.0355, |
|
"step": 15290 |
|
}, |
|
{ |
|
"epoch": 19.51, |
|
"learning_rate": 2.6430517711171665e-06, |
|
"loss": 0.0614, |
|
"step": 15300 |
|
}, |
|
{ |
|
"epoch": 19.53, |
|
"learning_rate": 2.5749318801089917e-06, |
|
"loss": 0.0512, |
|
"step": 15310 |
|
}, |
|
{ |
|
"epoch": 19.54, |
|
"learning_rate": 2.5068119891008173e-06, |
|
"loss": 0.0467, |
|
"step": 15320 |
|
}, |
|
{ |
|
"epoch": 19.55, |
|
"learning_rate": 2.438692098092643e-06, |
|
"loss": 0.0321, |
|
"step": 15330 |
|
}, |
|
{ |
|
"epoch": 19.57, |
|
"learning_rate": 2.3705722070844686e-06, |
|
"loss": 0.0336, |
|
"step": 15340 |
|
}, |
|
{ |
|
"epoch": 19.58, |
|
"learning_rate": 2.302452316076294e-06, |
|
"loss": 0.0552, |
|
"step": 15350 |
|
}, |
|
{ |
|
"epoch": 19.59, |
|
"learning_rate": 2.2343324250681202e-06, |
|
"loss": 0.0549, |
|
"step": 15360 |
|
}, |
|
{ |
|
"epoch": 19.6, |
|
"learning_rate": 2.166212534059946e-06, |
|
"loss": 0.0471, |
|
"step": 15370 |
|
}, |
|
{ |
|
"epoch": 19.62, |
|
"learning_rate": 2.0980926430517715e-06, |
|
"loss": 0.0332, |
|
"step": 15380 |
|
}, |
|
{ |
|
"epoch": 19.63, |
|
"learning_rate": 2.029972752043597e-06, |
|
"loss": 0.036, |
|
"step": 15390 |
|
}, |
|
{ |
|
"epoch": 19.64, |
|
"learning_rate": 1.9618528610354227e-06, |
|
"loss": 0.0588, |
|
"step": 15400 |
|
}, |
|
{ |
|
"epoch": 19.66, |
|
"learning_rate": 1.893732970027248e-06, |
|
"loss": 0.0578, |
|
"step": 15410 |
|
}, |
|
{ |
|
"epoch": 19.67, |
|
"learning_rate": 1.8256130790190735e-06, |
|
"loss": 0.043, |
|
"step": 15420 |
|
}, |
|
{ |
|
"epoch": 19.68, |
|
"learning_rate": 1.7574931880108992e-06, |
|
"loss": 0.0444, |
|
"step": 15430 |
|
}, |
|
{ |
|
"epoch": 19.69, |
|
"learning_rate": 1.6893732970027248e-06, |
|
"loss": 0.0358, |
|
"step": 15440 |
|
}, |
|
{ |
|
"epoch": 19.71, |
|
"learning_rate": 1.6212534059945504e-06, |
|
"loss": 0.0561, |
|
"step": 15450 |
|
}, |
|
{ |
|
"epoch": 19.72, |
|
"learning_rate": 1.553133514986376e-06, |
|
"loss": 0.0544, |
|
"step": 15460 |
|
}, |
|
{ |
|
"epoch": 19.73, |
|
"learning_rate": 1.4850136239782019e-06, |
|
"loss": 0.0402, |
|
"step": 15470 |
|
}, |
|
{ |
|
"epoch": 19.74, |
|
"learning_rate": 1.4168937329700273e-06, |
|
"loss": 0.0356, |
|
"step": 15480 |
|
}, |
|
{ |
|
"epoch": 19.76, |
|
"learning_rate": 1.348773841961853e-06, |
|
"loss": 0.0353, |
|
"step": 15490 |
|
}, |
|
{ |
|
"epoch": 19.77, |
|
"learning_rate": 1.2806539509536785e-06, |
|
"loss": 0.0638, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 19.78, |
|
"learning_rate": 1.2125340599455042e-06, |
|
"loss": 0.0524, |
|
"step": 15510 |
|
}, |
|
{ |
|
"epoch": 19.8, |
|
"learning_rate": 1.1444141689373298e-06, |
|
"loss": 0.0416, |
|
"step": 15520 |
|
}, |
|
{ |
|
"epoch": 19.81, |
|
"learning_rate": 1.0762942779291554e-06, |
|
"loss": 0.037, |
|
"step": 15530 |
|
}, |
|
{ |
|
"epoch": 19.82, |
|
"learning_rate": 1.008174386920981e-06, |
|
"loss": 0.0317, |
|
"step": 15540 |
|
}, |
|
{ |
|
"epoch": 19.83, |
|
"learning_rate": 9.400544959128065e-07, |
|
"loss": 0.0599, |
|
"step": 15550 |
|
}, |
|
{ |
|
"epoch": 19.85, |
|
"learning_rate": 8.719346049046323e-07, |
|
"loss": 0.0561, |
|
"step": 15560 |
|
}, |
|
{ |
|
"epoch": 19.86, |
|
"learning_rate": 8.038147138964579e-07, |
|
"loss": 0.0411, |
|
"step": 15570 |
|
}, |
|
{ |
|
"epoch": 19.87, |
|
"learning_rate": 7.356948228882834e-07, |
|
"loss": 0.0349, |
|
"step": 15580 |
|
}, |
|
{ |
|
"epoch": 19.88, |
|
"learning_rate": 6.67574931880109e-07, |
|
"loss": 0.0375, |
|
"step": 15590 |
|
}, |
|
{ |
|
"epoch": 19.9, |
|
"learning_rate": 5.994550408719347e-07, |
|
"loss": 0.0585, |
|
"step": 15600 |
|
}, |
|
{ |
|
"epoch": 19.9, |
|
"eval_loss": 0.2964596450328827, |
|
"eval_runtime": 1427.8876, |
|
"eval_samples_per_second": 4.365, |
|
"eval_steps_per_second": 0.546, |
|
"eval_wer": 0.3143997250649152, |
|
"step": 15600 |
|
}, |
|
{ |
|
"epoch": 19.91, |
|
"learning_rate": 5.313351498637603e-07, |
|
"loss": 0.0515, |
|
"step": 15610 |
|
}, |
|
{ |
|
"epoch": 19.92, |
|
"learning_rate": 4.6321525885558585e-07, |
|
"loss": 0.0488, |
|
"step": 15620 |
|
}, |
|
{ |
|
"epoch": 19.94, |
|
"learning_rate": 3.950953678474115e-07, |
|
"loss": 0.0363, |
|
"step": 15630 |
|
}, |
|
{ |
|
"epoch": 19.95, |
|
"learning_rate": 3.269754768392371e-07, |
|
"loss": 0.0416, |
|
"step": 15640 |
|
}, |
|
{ |
|
"epoch": 19.96, |
|
"learning_rate": 2.5885558583106267e-07, |
|
"loss": 0.0606, |
|
"step": 15650 |
|
}, |
|
{ |
|
"epoch": 19.97, |
|
"learning_rate": 1.907356948228883e-07, |
|
"loss": 0.0508, |
|
"step": 15660 |
|
}, |
|
{ |
|
"epoch": 19.99, |
|
"learning_rate": 1.2261580381471392e-07, |
|
"loss": 0.0384, |
|
"step": 15670 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"learning_rate": 5.449591280653952e-08, |
|
"loss": 0.0452, |
|
"step": 15680 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"step": 15680, |
|
"total_flos": 0.0, |
|
"train_loss": 0.45149919644986486, |
|
"train_runtime": 150700.5368, |
|
"train_samples_per_second": 6.664, |
|
"train_steps_per_second": 0.104 |
|
} |
|
], |
|
"max_steps": 15680, |
|
"num_train_epochs": 20, |
|
"total_flos": 0.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|