|
{ |
|
"best_metric": 0.5690802335739136, |
|
"best_model_checkpoint": "ai-light-dance_singing3_ft_wav2vec2-large-xlsr-53-v1/checkpoint-3168", |
|
"epoch": 19.997402597402598, |
|
"global_step": 5760, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.0000000000000001e-07, |
|
"loss": 0.5283, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.0000000000000002e-07, |
|
"loss": 0.528, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 2.9000000000000003e-07, |
|
"loss": 0.5047, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 3.9e-07, |
|
"loss": 0.5226, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.900000000000001e-07, |
|
"loss": 0.5815, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 5.900000000000001e-07, |
|
"loss": 0.4874, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 6.900000000000001e-07, |
|
"loss": 0.5212, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 7.900000000000001e-07, |
|
"loss": 0.5171, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 8.900000000000001e-07, |
|
"loss": 0.5332, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.9e-07, |
|
"loss": 0.563, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.0900000000000002e-06, |
|
"loss": 0.5127, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.19e-06, |
|
"loss": 0.5071, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2900000000000001e-06, |
|
"loss": 0.5518, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.3900000000000002e-06, |
|
"loss": 0.5234, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.4900000000000001e-06, |
|
"loss": 0.5274, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.5900000000000002e-06, |
|
"loss": 0.529, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.6900000000000003e-06, |
|
"loss": 0.5659, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.79e-06, |
|
"loss": 0.4647, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8900000000000001e-06, |
|
"loss": 0.4986, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.9900000000000004e-06, |
|
"loss": 0.5279, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.09e-06, |
|
"loss": 0.5265, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.19e-06, |
|
"loss": 0.5067, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.29e-06, |
|
"loss": 0.4832, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.39e-06, |
|
"loss": 0.5363, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 2.4900000000000003e-06, |
|
"loss": 0.5589, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 2.59e-06, |
|
"loss": 0.5353, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.6900000000000005e-06, |
|
"loss": 0.5202, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.7900000000000004e-06, |
|
"loss": 0.5352, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.602483868598938, |
|
"eval_runtime": 129.9404, |
|
"eval_samples_per_second": 4.094, |
|
"eval_steps_per_second": 1.024, |
|
"eval_wer": 0.3327119624606287, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 2.89e-06, |
|
"loss": 0.5971, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 2.99e-06, |
|
"loss": 0.5174, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.09e-06, |
|
"loss": 0.5195, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.1900000000000004e-06, |
|
"loss": 0.5103, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.2900000000000003e-06, |
|
"loss": 0.546, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.3900000000000006e-06, |
|
"loss": 0.5034, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.49e-06, |
|
"loss": 0.4854, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 3.5900000000000004e-06, |
|
"loss": 0.4965, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 3.6900000000000002e-06, |
|
"loss": 0.4936, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 3.79e-06, |
|
"loss": 0.5177, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 3.89e-06, |
|
"loss": 0.5723, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 3.990000000000001e-06, |
|
"loss": 0.5412, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 4.09e-06, |
|
"loss": 0.5021, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.1900000000000005e-06, |
|
"loss": 0.5059, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 4.2900000000000004e-06, |
|
"loss": 0.5326, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.39e-06, |
|
"loss": 0.5409, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.49e-06, |
|
"loss": 0.5114, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.590000000000001e-06, |
|
"loss": 0.5381, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.69e-06, |
|
"loss": 0.5171, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.79e-06, |
|
"loss": 0.5357, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 4.890000000000001e-06, |
|
"loss": 0.567, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 4.9900000000000005e-06, |
|
"loss": 0.5421, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 5.09e-06, |
|
"loss": 0.5293, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 5.19e-06, |
|
"loss": 0.523, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 5.290000000000001e-06, |
|
"loss": 0.5464, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 5.390000000000001e-06, |
|
"loss": 0.5473, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 5.490000000000001e-06, |
|
"loss": 0.5302, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.590000000000001e-06, |
|
"loss": 0.506, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.69e-06, |
|
"loss": 0.5365, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 0.6085500717163086, |
|
"eval_runtime": 133.1913, |
|
"eval_samples_per_second": 3.994, |
|
"eval_steps_per_second": 0.999, |
|
"eval_wer": 0.34801054187825414, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.7900000000000005e-06, |
|
"loss": 0.5768, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 5.89e-06, |
|
"loss": 0.541, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 5.99e-06, |
|
"loss": 0.5115, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 6.09e-06, |
|
"loss": 0.5329, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 6.190000000000001e-06, |
|
"loss": 0.5549, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 6.290000000000001e-06, |
|
"loss": 0.5476, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 6.390000000000001e-06, |
|
"loss": 0.5519, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 6.4900000000000005e-06, |
|
"loss": 0.5358, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 6.5900000000000004e-06, |
|
"loss": 0.5354, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 6.690000000000001e-06, |
|
"loss": 0.5671, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 6.790000000000001e-06, |
|
"loss": 0.6002, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 6.89e-06, |
|
"loss": 0.5269, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 6.99e-06, |
|
"loss": 0.4979, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 7.09e-06, |
|
"loss": 0.5041, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 7.190000000000001e-06, |
|
"loss": 0.5332, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.2900000000000005e-06, |
|
"loss": 0.578, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.39e-06, |
|
"loss": 0.5307, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 7.48e-06, |
|
"loss": 0.5141, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 7.58e-06, |
|
"loss": 0.5475, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 7.680000000000001e-06, |
|
"loss": 0.5673, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 7.78e-06, |
|
"loss": 0.613, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 7.88e-06, |
|
"loss": 0.5118, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 7.980000000000002e-06, |
|
"loss": 0.5186, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 8.08e-06, |
|
"loss": 0.5052, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 8.18e-06, |
|
"loss": 0.5333, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 8.28e-06, |
|
"loss": 0.5374, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 8.380000000000001e-06, |
|
"loss": 0.5103, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 8.48e-06, |
|
"loss": 0.5278, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 8.580000000000001e-06, |
|
"loss": 0.5359, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 0.6110973954200745, |
|
"eval_runtime": 129.4368, |
|
"eval_samples_per_second": 4.11, |
|
"eval_steps_per_second": 1.028, |
|
"eval_wer": 0.3363116282059523, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 8.68e-06, |
|
"loss": 0.5897, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 8.78e-06, |
|
"loss": 0.5366, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 8.880000000000001e-06, |
|
"loss": 0.5459, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 8.98e-06, |
|
"loss": 0.5517, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 9.080000000000001e-06, |
|
"loss": 0.5796, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 9.180000000000002e-06, |
|
"loss": 0.5524, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 9.280000000000001e-06, |
|
"loss": 0.5449, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 9.38e-06, |
|
"loss": 0.522, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 9.48e-06, |
|
"loss": 0.5555, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 9.58e-06, |
|
"loss": 0.5739, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 9.68e-06, |
|
"loss": 0.583, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 9.780000000000001e-06, |
|
"loss": 0.562, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 9.88e-06, |
|
"loss": 0.5047, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 9.980000000000001e-06, |
|
"loss": 0.5506, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 9.983193277310925e-06, |
|
"loss": 0.5454, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 9.962184873949581e-06, |
|
"loss": 0.5662, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 9.941176470588236e-06, |
|
"loss": 0.5528, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 9.920168067226892e-06, |
|
"loss": 0.5295, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 9.899159663865548e-06, |
|
"loss": 0.533, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 9.878151260504203e-06, |
|
"loss": 0.5563, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 9.857142857142859e-06, |
|
"loss": 0.5019, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 9.836134453781513e-06, |
|
"loss": 0.5082, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 9.815126050420168e-06, |
|
"loss": 0.5142, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 9.794117647058824e-06, |
|
"loss": 0.5551, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 9.77310924369748e-06, |
|
"loss": 0.4937, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 9.752100840336135e-06, |
|
"loss": 0.5637, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 9.731092436974791e-06, |
|
"loss": 0.5431, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 9.710084033613445e-06, |
|
"loss": 0.5548, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 9.689075630252102e-06, |
|
"loss": 0.5395, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 0.6081970930099487, |
|
"eval_runtime": 131.6568, |
|
"eval_samples_per_second": 4.041, |
|
"eval_steps_per_second": 1.01, |
|
"eval_wer": 0.3415825673330334, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 9.668067226890758e-06, |
|
"loss": 0.5705, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 9.647058823529412e-06, |
|
"loss": 0.5435, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 9.626050420168068e-06, |
|
"loss": 0.4879, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 9.605042016806723e-06, |
|
"loss": 0.5412, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 9.584033613445379e-06, |
|
"loss": 0.5287, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 9.563025210084035e-06, |
|
"loss": 0.5543, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 9.54201680672269e-06, |
|
"loss": 0.5252, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 9.521008403361344e-06, |
|
"loss": 0.5034, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 9.5e-06, |
|
"loss": 0.5514, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 9.478991596638657e-06, |
|
"loss": 0.5893, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 9.457983193277311e-06, |
|
"loss": 0.5859, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 9.436974789915967e-06, |
|
"loss": 0.5404, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 9.415966386554622e-06, |
|
"loss": 0.5362, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 9.394957983193278e-06, |
|
"loss": 0.5252, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 9.373949579831934e-06, |
|
"loss": 0.5532, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 9.352941176470589e-06, |
|
"loss": 0.5605, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 9.331932773109245e-06, |
|
"loss": 0.5585, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 9.3109243697479e-06, |
|
"loss": 0.5177, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 9.289915966386556e-06, |
|
"loss": 0.5592, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 9.268907563025212e-06, |
|
"loss": 0.5556, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 9.247899159663866e-06, |
|
"loss": 0.5304, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 9.226890756302523e-06, |
|
"loss": 0.5362, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 9.205882352941177e-06, |
|
"loss": 0.5443, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 9.184873949579832e-06, |
|
"loss": 0.5354, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 9.163865546218488e-06, |
|
"loss": 0.5676, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 9.142857142857144e-06, |
|
"loss": 0.5385, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 9.121848739495798e-06, |
|
"loss": 0.5218, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 9.100840336134455e-06, |
|
"loss": 0.5123, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 9.07983193277311e-06, |
|
"loss": 0.5692, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_loss": 0.5948615074157715, |
|
"eval_runtime": 131.2811, |
|
"eval_samples_per_second": 4.052, |
|
"eval_steps_per_second": 1.013, |
|
"eval_wer": 0.33181204602429776, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 9.058823529411765e-06, |
|
"loss": 0.5519, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"learning_rate": 9.037815126050421e-06, |
|
"loss": 0.5099, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 9.016806722689076e-06, |
|
"loss": 0.4983, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 8.995798319327732e-06, |
|
"loss": 0.5158, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 8.974789915966388e-06, |
|
"loss": 0.5672, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 8.953781512605043e-06, |
|
"loss": 0.5244, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 8.932773109243699e-06, |
|
"loss": 0.4964, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 8.911764705882354e-06, |
|
"loss": 0.5016, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 8.890756302521008e-06, |
|
"loss": 0.532, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 8.869747899159664e-06, |
|
"loss": 0.5434, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 8.84873949579832e-06, |
|
"loss": 0.5334, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"learning_rate": 8.827731092436975e-06, |
|
"loss": 0.5062, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 8.806722689075631e-06, |
|
"loss": 0.5314, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 8.785714285714286e-06, |
|
"loss": 0.5543, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 8.764705882352942e-06, |
|
"loss": 0.5478, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 8.743697478991598e-06, |
|
"loss": 0.5225, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 8.722689075630252e-06, |
|
"loss": 0.5084, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"learning_rate": 8.701680672268909e-06, |
|
"loss": 0.5226, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 8.680672268907563e-06, |
|
"loss": 0.5181, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 8.65966386554622e-06, |
|
"loss": 0.5648, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 8.638655462184876e-06, |
|
"loss": 0.537, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 8.61764705882353e-06, |
|
"loss": 0.5747, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 8.596638655462186e-06, |
|
"loss": 0.5388, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 8.57563025210084e-06, |
|
"loss": 0.5336, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 8.554621848739497e-06, |
|
"loss": 0.5465, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 8.533613445378151e-06, |
|
"loss": 0.5231, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 8.512605042016808e-06, |
|
"loss": 0.5345, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 8.491596638655462e-06, |
|
"loss": 0.5592, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_loss": 0.6045897006988525, |
|
"eval_runtime": 132.0329, |
|
"eval_samples_per_second": 4.029, |
|
"eval_steps_per_second": 1.007, |
|
"eval_wer": 0.3322620042424632, |
|
"step": 1728 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 8.470588235294118e-06, |
|
"loss": 0.5959, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"learning_rate": 8.449579831932774e-06, |
|
"loss": 0.5532, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"learning_rate": 8.428571428571429e-06, |
|
"loss": 0.5406, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 6.11, |
|
"learning_rate": 8.407563025210085e-06, |
|
"loss": 0.5133, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"learning_rate": 8.38655462184874e-06, |
|
"loss": 0.5282, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"learning_rate": 8.365546218487396e-06, |
|
"loss": 0.5323, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 6.21, |
|
"learning_rate": 8.344537815126052e-06, |
|
"loss": 0.5416, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 8.323529411764707e-06, |
|
"loss": 0.469, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"learning_rate": 8.302521008403363e-06, |
|
"loss": 0.4816, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"learning_rate": 8.281512605042017e-06, |
|
"loss": 0.5499, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 6.35, |
|
"learning_rate": 8.260504201680672e-06, |
|
"loss": 0.5841, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 8.239495798319328e-06, |
|
"loss": 0.5092, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"learning_rate": 8.218487394957984e-06, |
|
"loss": 0.5167, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"learning_rate": 8.197478991596639e-06, |
|
"loss": 0.5315, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 8.176470588235295e-06, |
|
"loss": 0.4991, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"learning_rate": 8.155462184873951e-06, |
|
"loss": 0.5728, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 8.134453781512605e-06, |
|
"loss": 0.534, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 8.113445378151262e-06, |
|
"loss": 0.5377, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"learning_rate": 8.092436974789916e-06, |
|
"loss": 0.5285, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 8.071428571428572e-06, |
|
"loss": 0.5373, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 8.050420168067229e-06, |
|
"loss": 0.5336, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 8.029411764705883e-06, |
|
"loss": 0.4982, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"learning_rate": 8.00840336134454e-06, |
|
"loss": 0.534, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"learning_rate": 7.987394957983194e-06, |
|
"loss": 0.4873, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"learning_rate": 7.966386554621848e-06, |
|
"loss": 0.5475, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"learning_rate": 7.945378151260504e-06, |
|
"loss": 0.5666, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"learning_rate": 7.92436974789916e-06, |
|
"loss": 0.5062, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 6.94, |
|
"learning_rate": 7.903361344537815e-06, |
|
"loss": 0.4941, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"learning_rate": 7.882352941176471e-06, |
|
"loss": 0.5172, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_loss": 0.5837918519973755, |
|
"eval_runtime": 131.1239, |
|
"eval_samples_per_second": 4.057, |
|
"eval_steps_per_second": 1.014, |
|
"eval_wer": 0.31850613871569067, |
|
"step": 2016 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"learning_rate": 7.861344537815126e-06, |
|
"loss": 0.5684, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 7.840336134453782e-06, |
|
"loss": 0.5345, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"learning_rate": 7.819327731092438e-06, |
|
"loss": 0.4978, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"learning_rate": 7.798319327731093e-06, |
|
"loss": 0.5151, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"learning_rate": 7.777310924369749e-06, |
|
"loss": 0.519, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"learning_rate": 7.756302521008405e-06, |
|
"loss": 0.5289, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 7.22, |
|
"learning_rate": 7.73529411764706e-06, |
|
"loss": 0.5416, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 7.26, |
|
"learning_rate": 7.714285714285716e-06, |
|
"loss": 0.5011, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"learning_rate": 7.69327731092437e-06, |
|
"loss": 0.5269, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"learning_rate": 7.672268907563026e-06, |
|
"loss": 0.5394, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"learning_rate": 7.651260504201681e-06, |
|
"loss": 0.5125, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 7.630252100840337e-06, |
|
"loss": 0.5249, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"learning_rate": 7.6092436974789916e-06, |
|
"loss": 0.5097, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 7.46, |
|
"learning_rate": 7.588235294117648e-06, |
|
"loss": 0.5186, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 7.567226890756303e-06, |
|
"loss": 0.5011, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 7.53, |
|
"learning_rate": 7.5462184873949584e-06, |
|
"loss": 0.5464, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"learning_rate": 7.525210084033614e-06, |
|
"loss": 0.5232, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"learning_rate": 7.504201680672269e-06, |
|
"loss": 0.4948, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 7.64, |
|
"learning_rate": 7.483193277310925e-06, |
|
"loss": 0.5235, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 7.67, |
|
"learning_rate": 7.462184873949581e-06, |
|
"loss": 0.5354, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 7.71, |
|
"learning_rate": 7.441176470588236e-06, |
|
"loss": 0.544, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 7.74, |
|
"learning_rate": 7.420168067226891e-06, |
|
"loss": 0.5063, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 7.78, |
|
"learning_rate": 7.3991596638655475e-06, |
|
"loss": 0.5067, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 7.378151260504203e-06, |
|
"loss": 0.4885, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 7.85, |
|
"learning_rate": 7.357142857142858e-06, |
|
"loss": 0.5029, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 7.336134453781513e-06, |
|
"loss": 0.5642, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 7.91, |
|
"learning_rate": 7.315126050420168e-06, |
|
"loss": 0.5236, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 7.95, |
|
"learning_rate": 7.294117647058823e-06, |
|
"loss": 0.4973, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 7.98, |
|
"learning_rate": 7.27310924369748e-06, |
|
"loss": 0.5108, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_loss": 0.6065585613250732, |
|
"eval_runtime": 133.6934, |
|
"eval_samples_per_second": 3.979, |
|
"eval_steps_per_second": 0.995, |
|
"eval_wer": 0.3211416082792312, |
|
"step": 2304 |
|
}, |
|
{ |
|
"epoch": 8.02, |
|
"learning_rate": 7.252100840336135e-06, |
|
"loss": 0.5786, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"learning_rate": 7.23109243697479e-06, |
|
"loss": 0.5407, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"learning_rate": 7.210084033613446e-06, |
|
"loss": 0.4974, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"learning_rate": 7.189075630252102e-06, |
|
"loss": 0.499, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"learning_rate": 7.168067226890757e-06, |
|
"loss": 0.5148, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 8.19, |
|
"learning_rate": 7.1470588235294125e-06, |
|
"loss": 0.5337, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 8.23, |
|
"learning_rate": 7.126050420168068e-06, |
|
"loss": 0.5337, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 8.26, |
|
"learning_rate": 7.105042016806723e-06, |
|
"loss": 0.4786, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 8.3, |
|
"learning_rate": 7.084033613445379e-06, |
|
"loss": 0.519, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 8.33, |
|
"learning_rate": 7.063025210084035e-06, |
|
"loss": 0.535, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 8.37, |
|
"learning_rate": 7.04201680672269e-06, |
|
"loss": 0.5086, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 8.4, |
|
"learning_rate": 7.0210084033613446e-06, |
|
"loss": 0.5093, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 8.44, |
|
"learning_rate": 7e-06, |
|
"loss": 0.475, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 8.47, |
|
"learning_rate": 6.978991596638656e-06, |
|
"loss": 0.5045, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"learning_rate": 6.9579831932773114e-06, |
|
"loss": 0.5231, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 8.54, |
|
"learning_rate": 6.936974789915967e-06, |
|
"loss": 0.4882, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 8.57, |
|
"learning_rate": 6.915966386554622e-06, |
|
"loss": 0.5021, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 8.61, |
|
"learning_rate": 6.8949579831932775e-06, |
|
"loss": 0.5154, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 8.64, |
|
"learning_rate": 6.873949579831934e-06, |
|
"loss": 0.5142, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 8.68, |
|
"learning_rate": 6.852941176470589e-06, |
|
"loss": 0.5319, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 8.71, |
|
"learning_rate": 6.831932773109244e-06, |
|
"loss": 0.5445, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"learning_rate": 6.8109243697479e-06, |
|
"loss": 0.5134, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 8.78, |
|
"learning_rate": 6.789915966386556e-06, |
|
"loss": 0.5008, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 8.82, |
|
"learning_rate": 6.768907563025211e-06, |
|
"loss": 0.4979, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 8.85, |
|
"learning_rate": 6.7478991596638666e-06, |
|
"loss": 0.5292, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 8.89, |
|
"learning_rate": 6.726890756302522e-06, |
|
"loss": 0.5521, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 8.92, |
|
"learning_rate": 6.705882352941176e-06, |
|
"loss": 0.5019, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 8.96, |
|
"learning_rate": 6.684873949579832e-06, |
|
"loss": 0.5079, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 8.99, |
|
"learning_rate": 6.663865546218488e-06, |
|
"loss": 0.4981, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_loss": 0.5958260297775269, |
|
"eval_runtime": 131.9283, |
|
"eval_samples_per_second": 4.032, |
|
"eval_steps_per_second": 1.008, |
|
"eval_wer": 0.31638490711576783, |
|
"step": 2592 |
|
}, |
|
{ |
|
"epoch": 9.03, |
|
"learning_rate": 6.642857142857143e-06, |
|
"loss": 0.523, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 9.06, |
|
"learning_rate": 6.621848739495799e-06, |
|
"loss": 0.503, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 9.1, |
|
"learning_rate": 6.600840336134454e-06, |
|
"loss": 0.5157, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 9.13, |
|
"learning_rate": 6.57983193277311e-06, |
|
"loss": 0.501, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 9.17, |
|
"learning_rate": 6.5588235294117655e-06, |
|
"loss": 0.5101, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 9.2, |
|
"learning_rate": 6.537815126050421e-06, |
|
"loss": 0.5184, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 9.24, |
|
"learning_rate": 6.516806722689076e-06, |
|
"loss": 0.5178, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 9.27, |
|
"learning_rate": 6.4957983193277315e-06, |
|
"loss": 0.4754, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 9.3, |
|
"learning_rate": 6.474789915966388e-06, |
|
"loss": 0.5115, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"learning_rate": 6.453781512605043e-06, |
|
"loss": 0.5263, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 9.37, |
|
"learning_rate": 6.432773109243698e-06, |
|
"loss": 0.5424, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 9.41, |
|
"learning_rate": 6.411764705882354e-06, |
|
"loss": 0.4897, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 9.44, |
|
"learning_rate": 6.390756302521008e-06, |
|
"loss": 0.4653, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 9.48, |
|
"learning_rate": 6.3697478991596636e-06, |
|
"loss": 0.524, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 9.51, |
|
"learning_rate": 6.34873949579832e-06, |
|
"loss": 0.4786, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 9.55, |
|
"learning_rate": 6.327731092436975e-06, |
|
"loss": 0.5414, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 9.58, |
|
"learning_rate": 6.3067226890756304e-06, |
|
"loss": 0.4963, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 9.62, |
|
"learning_rate": 6.285714285714286e-06, |
|
"loss": 0.483, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 9.65, |
|
"learning_rate": 6.264705882352942e-06, |
|
"loss": 0.5189, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 9.69, |
|
"learning_rate": 6.243697478991597e-06, |
|
"loss": 0.5364, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 9.72, |
|
"learning_rate": 6.222689075630253e-06, |
|
"loss": 0.5128, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 9.75, |
|
"learning_rate": 6.201680672268908e-06, |
|
"loss": 0.512, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 9.79, |
|
"learning_rate": 6.180672268907563e-06, |
|
"loss": 0.4781, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 9.82, |
|
"learning_rate": 6.1596638655462195e-06, |
|
"loss": 0.5159, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 9.86, |
|
"learning_rate": 6.138655462184875e-06, |
|
"loss": 0.5274, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 9.89, |
|
"learning_rate": 6.11764705882353e-06, |
|
"loss": 0.494, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 9.93, |
|
"learning_rate": 6.0966386554621856e-06, |
|
"loss": 0.5349, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 9.96, |
|
"learning_rate": 6.07563025210084e-06, |
|
"loss": 0.5046, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 6.054621848739496e-06, |
|
"loss": 0.5193, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_loss": 0.5888818502426147, |
|
"eval_runtime": 132.9604, |
|
"eval_samples_per_second": 4.001, |
|
"eval_steps_per_second": 1.0, |
|
"eval_wer": 0.3143922350067494, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 10.03, |
|
"learning_rate": 6.033613445378152e-06, |
|
"loss": 0.4943, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 10.07, |
|
"learning_rate": 6.012605042016807e-06, |
|
"loss": 0.478, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 10.1, |
|
"learning_rate": 5.991596638655462e-06, |
|
"loss": 0.4735, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 10.14, |
|
"learning_rate": 5.970588235294118e-06, |
|
"loss": 0.5069, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 10.17, |
|
"learning_rate": 5.949579831932774e-06, |
|
"loss": 0.5395, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 10.21, |
|
"learning_rate": 5.928571428571429e-06, |
|
"loss": 0.47, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 10.24, |
|
"learning_rate": 5.9075630252100845e-06, |
|
"loss": 0.4725, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 10.28, |
|
"learning_rate": 5.88655462184874e-06, |
|
"loss": 0.4931, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 10.31, |
|
"learning_rate": 5.865546218487396e-06, |
|
"loss": 0.4759, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 10.35, |
|
"learning_rate": 5.844537815126051e-06, |
|
"loss": 0.529, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 10.38, |
|
"learning_rate": 5.823529411764707e-06, |
|
"loss": 0.5001, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 10.42, |
|
"learning_rate": 5.802521008403362e-06, |
|
"loss": 0.5389, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 10.45, |
|
"learning_rate": 5.781512605042017e-06, |
|
"loss": 0.5405, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 10.48, |
|
"learning_rate": 5.760504201680672e-06, |
|
"loss": 0.5206, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 10.52, |
|
"learning_rate": 5.739495798319328e-06, |
|
"loss": 0.557, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 10.55, |
|
"learning_rate": 5.7184873949579834e-06, |
|
"loss": 0.5228, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 10.59, |
|
"learning_rate": 5.697478991596639e-06, |
|
"loss": 0.5261, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 10.62, |
|
"learning_rate": 5.676470588235294e-06, |
|
"loss": 0.4701, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 10.66, |
|
"learning_rate": 5.65546218487395e-06, |
|
"loss": 0.4977, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 10.69, |
|
"learning_rate": 5.634453781512606e-06, |
|
"loss": 0.5027, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 10.73, |
|
"learning_rate": 5.613445378151261e-06, |
|
"loss": 0.506, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 10.76, |
|
"learning_rate": 5.592436974789916e-06, |
|
"loss": 0.5185, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 10.8, |
|
"learning_rate": 5.571428571428572e-06, |
|
"loss": 0.5177, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 10.83, |
|
"learning_rate": 5.550420168067228e-06, |
|
"loss": 0.5073, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 10.87, |
|
"learning_rate": 5.529411764705883e-06, |
|
"loss": 0.525, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 10.9, |
|
"learning_rate": 5.5084033613445386e-06, |
|
"loss": 0.5193, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 10.94, |
|
"learning_rate": 5.487394957983194e-06, |
|
"loss": 0.4716, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 10.97, |
|
"learning_rate": 5.466386554621848e-06, |
|
"loss": 0.4988, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"eval_loss": 0.5690802335739136, |
|
"eval_runtime": 134.0737, |
|
"eval_samples_per_second": 3.968, |
|
"eval_steps_per_second": 0.992, |
|
"eval_wer": 0.31066400977052133, |
|
"step": 3168 |
|
}, |
|
{ |
|
"epoch": 11.01, |
|
"learning_rate": 5.445378151260505e-06, |
|
"loss": 0.5836, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 11.04, |
|
"learning_rate": 5.42436974789916e-06, |
|
"loss": 0.4776, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 11.08, |
|
"learning_rate": 5.403361344537815e-06, |
|
"loss": 0.4806, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 11.11, |
|
"learning_rate": 5.382352941176471e-06, |
|
"loss": 0.4893, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 11.15, |
|
"learning_rate": 5.361344537815126e-06, |
|
"loss": 0.5033, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 11.18, |
|
"learning_rate": 5.340336134453782e-06, |
|
"loss": 0.5383, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 11.21, |
|
"learning_rate": 5.3193277310924375e-06, |
|
"loss": 0.5112, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 11.25, |
|
"learning_rate": 5.298319327731093e-06, |
|
"loss": 0.5234, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 11.28, |
|
"learning_rate": 5.277310924369748e-06, |
|
"loss": 0.4606, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 11.32, |
|
"learning_rate": 5.256302521008404e-06, |
|
"loss": 0.4986, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 11.35, |
|
"learning_rate": 5.23529411764706e-06, |
|
"loss": 0.5325, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 11.39, |
|
"learning_rate": 5.214285714285715e-06, |
|
"loss": 0.5185, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 11.42, |
|
"learning_rate": 5.19327731092437e-06, |
|
"loss": 0.459, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 11.46, |
|
"learning_rate": 5.172268907563026e-06, |
|
"loss": 0.5038, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 11.49, |
|
"learning_rate": 5.15126050420168e-06, |
|
"loss": 0.5127, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 11.53, |
|
"learning_rate": 5.1302521008403364e-06, |
|
"loss": 0.5238, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 11.56, |
|
"learning_rate": 5.109243697478992e-06, |
|
"loss": 0.4919, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 11.6, |
|
"learning_rate": 5.088235294117647e-06, |
|
"loss": 0.4719, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 11.63, |
|
"learning_rate": 5.0672268907563025e-06, |
|
"loss": 0.4991, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 11.66, |
|
"learning_rate": 5.046218487394959e-06, |
|
"loss": 0.5098, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 11.7, |
|
"learning_rate": 5.025210084033614e-06, |
|
"loss": 0.5066, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 11.73, |
|
"learning_rate": 5.004201680672269e-06, |
|
"loss": 0.4925, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 11.77, |
|
"learning_rate": 4.983193277310925e-06, |
|
"loss": 0.4996, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 11.8, |
|
"learning_rate": 4.96218487394958e-06, |
|
"loss": 0.5129, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 11.84, |
|
"learning_rate": 4.941176470588236e-06, |
|
"loss": 0.4944, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 11.87, |
|
"learning_rate": 4.920168067226891e-06, |
|
"loss": 0.4929, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 11.91, |
|
"learning_rate": 4.899159663865546e-06, |
|
"loss": 0.4904, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 11.94, |
|
"learning_rate": 4.878151260504202e-06, |
|
"loss": 0.4992, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 11.98, |
|
"learning_rate": 4.857142857142858e-06, |
|
"loss": 0.4966, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"eval_loss": 0.5908366441726685, |
|
"eval_runtime": 130.8263, |
|
"eval_samples_per_second": 4.066, |
|
"eval_steps_per_second": 1.017, |
|
"eval_wer": 0.31272096162499197, |
|
"step": 3456 |
|
}, |
|
{ |
|
"epoch": 12.01, |
|
"learning_rate": 4.836134453781513e-06, |
|
"loss": 0.5368, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 12.05, |
|
"learning_rate": 4.815126050420168e-06, |
|
"loss": 0.509, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 12.08, |
|
"learning_rate": 4.7941176470588245e-06, |
|
"loss": 0.4688, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 12.12, |
|
"learning_rate": 4.77310924369748e-06, |
|
"loss": 0.4622, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 12.15, |
|
"learning_rate": 4.752100840336134e-06, |
|
"loss": 0.506, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 12.19, |
|
"learning_rate": 4.7310924369747905e-06, |
|
"loss": 0.4969, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 12.22, |
|
"learning_rate": 4.710084033613446e-06, |
|
"loss": 0.4873, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 12.26, |
|
"learning_rate": 4.689075630252101e-06, |
|
"loss": 0.489, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 12.29, |
|
"learning_rate": 4.6680672268907565e-06, |
|
"loss": 0.4942, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 12.33, |
|
"learning_rate": 4.647058823529412e-06, |
|
"loss": 0.4939, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 12.36, |
|
"learning_rate": 4.626050420168068e-06, |
|
"loss": 0.5565, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 12.39, |
|
"learning_rate": 4.6050420168067226e-06, |
|
"loss": 0.5037, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 12.43, |
|
"learning_rate": 4.584033613445379e-06, |
|
"loss": 0.4923, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 12.46, |
|
"learning_rate": 4.563025210084034e-06, |
|
"loss": 0.4823, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 12.5, |
|
"learning_rate": 4.5420168067226894e-06, |
|
"loss": 0.5044, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 12.53, |
|
"learning_rate": 4.521008403361345e-06, |
|
"loss": 0.4982, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 12.57, |
|
"learning_rate": 4.5e-06, |
|
"loss": 0.4721, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 12.6, |
|
"learning_rate": 4.478991596638656e-06, |
|
"loss": 0.4986, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 12.64, |
|
"learning_rate": 4.457983193277312e-06, |
|
"loss": 0.4926, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 12.67, |
|
"learning_rate": 4.436974789915966e-06, |
|
"loss": 0.4886, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 12.71, |
|
"learning_rate": 4.415966386554622e-06, |
|
"loss": 0.492, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 12.74, |
|
"learning_rate": 4.394957983193278e-06, |
|
"loss": 0.5318, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 12.78, |
|
"learning_rate": 4.373949579831933e-06, |
|
"loss": 0.4788, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 12.81, |
|
"learning_rate": 4.352941176470588e-06, |
|
"loss": 0.4754, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 12.85, |
|
"learning_rate": 4.3319327731092446e-06, |
|
"loss": 0.5176, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 12.88, |
|
"learning_rate": 4.3109243697479e-06, |
|
"loss": 0.5241, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 12.91, |
|
"learning_rate": 4.289915966386554e-06, |
|
"loss": 0.4888, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 12.95, |
|
"learning_rate": 4.268907563025211e-06, |
|
"loss": 0.4823, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 12.98, |
|
"learning_rate": 4.247899159663866e-06, |
|
"loss": 0.4801, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"eval_loss": 0.58124178647995, |
|
"eval_runtime": 131.995, |
|
"eval_samples_per_second": 4.03, |
|
"eval_steps_per_second": 1.008, |
|
"eval_wer": 0.31098540849778233, |
|
"step": 3744 |
|
}, |
|
{ |
|
"epoch": 13.02, |
|
"learning_rate": 4.226890756302521e-06, |
|
"loss": 0.5296, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 13.06, |
|
"learning_rate": 4.205882352941177e-06, |
|
"loss": 0.5093, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 13.09, |
|
"learning_rate": 4.184873949579833e-06, |
|
"loss": 0.4723, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 13.12, |
|
"learning_rate": 4.163865546218488e-06, |
|
"loss": 0.457, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 13.16, |
|
"learning_rate": 4.1428571428571435e-06, |
|
"loss": 0.4917, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 13.19, |
|
"learning_rate": 4.121848739495799e-06, |
|
"loss": 0.5177, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 13.23, |
|
"learning_rate": 4.100840336134454e-06, |
|
"loss": 0.4622, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 13.26, |
|
"learning_rate": 4.0798319327731095e-06, |
|
"loss": 0.5018, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 13.3, |
|
"learning_rate": 4.058823529411765e-06, |
|
"loss": 0.4981, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 13.33, |
|
"learning_rate": 4.03781512605042e-06, |
|
"loss": 0.4718, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 13.37, |
|
"learning_rate": 4.016806722689076e-06, |
|
"loss": 0.4832, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 13.4, |
|
"learning_rate": 3.995798319327732e-06, |
|
"loss": 0.5347, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 13.44, |
|
"learning_rate": 3.974789915966386e-06, |
|
"loss": 0.4505, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 13.47, |
|
"learning_rate": 3.953781512605042e-06, |
|
"loss": 0.4903, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 13.51, |
|
"learning_rate": 3.932773109243698e-06, |
|
"loss": 0.5229, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 13.54, |
|
"learning_rate": 3.911764705882353e-06, |
|
"loss": 0.5046, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 13.57, |
|
"learning_rate": 3.8907563025210084e-06, |
|
"loss": 0.4707, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 13.61, |
|
"learning_rate": 3.869747899159665e-06, |
|
"loss": 0.4842, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 13.64, |
|
"learning_rate": 3.84873949579832e-06, |
|
"loss": 0.4994, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 13.68, |
|
"learning_rate": 3.8277310924369745e-06, |
|
"loss": 0.5029, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 13.71, |
|
"learning_rate": 3.8067226890756302e-06, |
|
"loss": 0.4938, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 13.75, |
|
"learning_rate": 3.785714285714286e-06, |
|
"loss": 0.5022, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 13.78, |
|
"learning_rate": 3.7647058823529414e-06, |
|
"loss": 0.4759, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 13.82, |
|
"learning_rate": 3.743697478991597e-06, |
|
"loss": 0.5091, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 13.85, |
|
"learning_rate": 3.7226890756302525e-06, |
|
"loss": 0.4843, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 13.89, |
|
"learning_rate": 3.7016806722689082e-06, |
|
"loss": 0.5043, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 13.92, |
|
"learning_rate": 3.6806722689075636e-06, |
|
"loss": 0.4884, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 13.96, |
|
"learning_rate": 3.6596638655462185e-06, |
|
"loss": 0.4764, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 13.99, |
|
"learning_rate": 3.6386554621848743e-06, |
|
"loss": 0.5025, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"eval_loss": 0.5805001258850098, |
|
"eval_runtime": 132.3722, |
|
"eval_samples_per_second": 4.019, |
|
"eval_steps_per_second": 1.005, |
|
"eval_wer": 0.3046217136980138, |
|
"step": 4032 |
|
}, |
|
{ |
|
"epoch": 14.03, |
|
"learning_rate": 3.6176470588235296e-06, |
|
"loss": 0.5226, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 14.06, |
|
"learning_rate": 3.5966386554621854e-06, |
|
"loss": 0.4922, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 14.1, |
|
"learning_rate": 3.5756302521008407e-06, |
|
"loss": 0.4673, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 14.13, |
|
"learning_rate": 3.554621848739496e-06, |
|
"loss": 0.5106, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 14.17, |
|
"learning_rate": 3.533613445378152e-06, |
|
"loss": 0.4949, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 14.2, |
|
"learning_rate": 3.5126050420168067e-06, |
|
"loss": 0.4841, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 14.24, |
|
"learning_rate": 3.491596638655462e-06, |
|
"loss": 0.4896, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 14.27, |
|
"learning_rate": 3.470588235294118e-06, |
|
"loss": 0.483, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 14.3, |
|
"learning_rate": 3.449579831932773e-06, |
|
"loss": 0.4755, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 14.34, |
|
"learning_rate": 3.428571428571429e-06, |
|
"loss": 0.4812, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 14.37, |
|
"learning_rate": 3.4075630252100843e-06, |
|
"loss": 0.4835, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 14.41, |
|
"learning_rate": 3.38655462184874e-06, |
|
"loss": 0.4613, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 14.44, |
|
"learning_rate": 3.3655462184873954e-06, |
|
"loss": 0.4349, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 14.48, |
|
"learning_rate": 3.3445378151260503e-06, |
|
"loss": 0.4943, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 14.51, |
|
"learning_rate": 3.323529411764706e-06, |
|
"loss": 0.4983, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 14.55, |
|
"learning_rate": 3.3025210084033614e-06, |
|
"loss": 0.4892, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 14.58, |
|
"learning_rate": 3.281512605042017e-06, |
|
"loss": 0.5268, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 14.62, |
|
"learning_rate": 3.2605042016806726e-06, |
|
"loss": 0.488, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 14.65, |
|
"learning_rate": 3.2394957983193283e-06, |
|
"loss": 0.4964, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 14.69, |
|
"learning_rate": 3.2184873949579837e-06, |
|
"loss": 0.5091, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 14.72, |
|
"learning_rate": 3.1974789915966386e-06, |
|
"loss": 0.4816, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 14.75, |
|
"learning_rate": 3.1764705882352943e-06, |
|
"loss": 0.4648, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 14.79, |
|
"learning_rate": 3.1554621848739497e-06, |
|
"loss": 0.4498, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 14.82, |
|
"learning_rate": 3.1344537815126055e-06, |
|
"loss": 0.4997, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 14.86, |
|
"learning_rate": 3.113445378151261e-06, |
|
"loss": 0.5136, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 14.89, |
|
"learning_rate": 3.092436974789916e-06, |
|
"loss": 0.5009, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 14.93, |
|
"learning_rate": 3.071428571428572e-06, |
|
"loss": 0.4597, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 14.96, |
|
"learning_rate": 3.0504201680672273e-06, |
|
"loss": 0.4724, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"learning_rate": 3.0294117647058826e-06, |
|
"loss": 0.5048, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"eval_loss": 0.5906367301940918, |
|
"eval_runtime": 133.6961, |
|
"eval_samples_per_second": 3.979, |
|
"eval_steps_per_second": 0.995, |
|
"eval_wer": 0.313363759079514, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 15.03, |
|
"learning_rate": 3.008403361344538e-06, |
|
"loss": 0.5216, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 15.07, |
|
"learning_rate": 2.9873949579831933e-06, |
|
"loss": 0.4814, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 15.1, |
|
"learning_rate": 2.966386554621849e-06, |
|
"loss": 0.4715, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 15.14, |
|
"learning_rate": 2.9453781512605044e-06, |
|
"loss": 0.5006, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 15.17, |
|
"learning_rate": 2.92436974789916e-06, |
|
"loss": 0.5171, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 15.21, |
|
"learning_rate": 2.9033613445378155e-06, |
|
"loss": 0.4885, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 15.24, |
|
"learning_rate": 2.8823529411764704e-06, |
|
"loss": 0.4806, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 15.28, |
|
"learning_rate": 2.861344537815126e-06, |
|
"loss": 0.4416, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 15.31, |
|
"learning_rate": 2.8403361344537815e-06, |
|
"loss": 0.4799, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 15.35, |
|
"learning_rate": 2.8193277310924373e-06, |
|
"loss": 0.4737, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 15.38, |
|
"learning_rate": 2.7983193277310926e-06, |
|
"loss": 0.4705, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 15.42, |
|
"learning_rate": 2.7773109243697484e-06, |
|
"loss": 0.4735, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 15.45, |
|
"learning_rate": 2.7563025210084037e-06, |
|
"loss": 0.4891, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 15.48, |
|
"learning_rate": 2.7352941176470595e-06, |
|
"loss": 0.4813, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 15.52, |
|
"learning_rate": 2.7142857142857144e-06, |
|
"loss": 0.496, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 15.55, |
|
"learning_rate": 2.6932773109243698e-06, |
|
"loss": 0.4669, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 15.59, |
|
"learning_rate": 2.6722689075630255e-06, |
|
"loss": 0.4831, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 15.62, |
|
"learning_rate": 2.651260504201681e-06, |
|
"loss": 0.4524, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 15.66, |
|
"learning_rate": 2.6302521008403362e-06, |
|
"loss": 0.4802, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 15.69, |
|
"learning_rate": 2.609243697478992e-06, |
|
"loss": 0.4813, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 15.73, |
|
"learning_rate": 2.5882352941176473e-06, |
|
"loss": 0.4836, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 15.76, |
|
"learning_rate": 2.5672268907563027e-06, |
|
"loss": 0.4962, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 15.8, |
|
"learning_rate": 2.546218487394958e-06, |
|
"loss": 0.4936, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 15.83, |
|
"learning_rate": 2.5252100840336134e-06, |
|
"loss": 0.4919, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 15.87, |
|
"learning_rate": 2.504201680672269e-06, |
|
"loss": 0.543, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 15.9, |
|
"learning_rate": 2.4831932773109245e-06, |
|
"loss": 0.514, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 15.94, |
|
"learning_rate": 2.4642857142857147e-06, |
|
"loss": 0.4819, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 15.97, |
|
"learning_rate": 2.44327731092437e-06, |
|
"loss": 0.4772, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"eval_loss": 0.5693268775939941, |
|
"eval_runtime": 132.7597, |
|
"eval_samples_per_second": 4.007, |
|
"eval_steps_per_second": 1.002, |
|
"eval_wer": 0.3009577682072379, |
|
"step": 4608 |
|
}, |
|
{ |
|
"epoch": 16.01, |
|
"learning_rate": 2.4222689075630254e-06, |
|
"loss": 0.5274, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 16.04, |
|
"learning_rate": 2.4012605042016807e-06, |
|
"loss": 0.4773, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 16.08, |
|
"learning_rate": 2.380252100840336e-06, |
|
"loss": 0.4692, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 16.11, |
|
"learning_rate": 2.359243697478992e-06, |
|
"loss": 0.4599, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 16.15, |
|
"learning_rate": 2.338235294117647e-06, |
|
"loss": 0.4805, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 16.18, |
|
"learning_rate": 2.317226890756303e-06, |
|
"loss": 0.4887, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 16.21, |
|
"learning_rate": 2.296218487394958e-06, |
|
"loss": 0.4909, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 16.25, |
|
"learning_rate": 2.2752100840336136e-06, |
|
"loss": 0.4625, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 16.28, |
|
"learning_rate": 2.254201680672269e-06, |
|
"loss": 0.477, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 16.32, |
|
"learning_rate": 2.2331932773109248e-06, |
|
"loss": 0.4909, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 16.35, |
|
"learning_rate": 2.21218487394958e-06, |
|
"loss": 0.518, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 16.39, |
|
"learning_rate": 2.1911764705882354e-06, |
|
"loss": 0.4702, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 16.42, |
|
"learning_rate": 2.1701680672268908e-06, |
|
"loss": 0.49, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 16.46, |
|
"learning_rate": 2.1491596638655466e-06, |
|
"loss": 0.4882, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 16.49, |
|
"learning_rate": 2.128151260504202e-06, |
|
"loss": 0.4719, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 16.53, |
|
"learning_rate": 2.1071428571428572e-06, |
|
"loss": 0.5152, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 16.56, |
|
"learning_rate": 2.086134453781513e-06, |
|
"loss": 0.4672, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 16.6, |
|
"learning_rate": 2.065126050420168e-06, |
|
"loss": 0.4704, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 16.63, |
|
"learning_rate": 2.0441176470588237e-06, |
|
"loss": 0.4678, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 16.66, |
|
"learning_rate": 2.023109243697479e-06, |
|
"loss": 0.4737, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 16.7, |
|
"learning_rate": 2.002100840336135e-06, |
|
"loss": 0.4608, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 16.73, |
|
"learning_rate": 1.98109243697479e-06, |
|
"loss": 0.5042, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 16.77, |
|
"learning_rate": 1.9600840336134455e-06, |
|
"loss": 0.5033, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 16.8, |
|
"learning_rate": 1.9390756302521013e-06, |
|
"loss": 0.4717, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 16.84, |
|
"learning_rate": 1.9180672268907566e-06, |
|
"loss": 0.4869, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 16.87, |
|
"learning_rate": 1.897058823529412e-06, |
|
"loss": 0.4963, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 16.91, |
|
"learning_rate": 1.8760504201680673e-06, |
|
"loss": 0.4411, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 16.94, |
|
"learning_rate": 1.8550420168067228e-06, |
|
"loss": 0.4456, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 16.98, |
|
"learning_rate": 1.8340336134453782e-06, |
|
"loss": 0.4748, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"eval_loss": 0.5707325339317322, |
|
"eval_runtime": 133.2653, |
|
"eval_samples_per_second": 3.992, |
|
"eval_steps_per_second": 0.998, |
|
"eval_wer": 0.30282188082535194, |
|
"step": 4896 |
|
}, |
|
{ |
|
"epoch": 17.01, |
|
"learning_rate": 1.8130252100840337e-06, |
|
"loss": 0.5172, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 17.05, |
|
"learning_rate": 1.7920168067226893e-06, |
|
"loss": 0.4909, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 17.08, |
|
"learning_rate": 1.7710084033613448e-06, |
|
"loss": 0.4509, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 17.12, |
|
"learning_rate": 1.75e-06, |
|
"loss": 0.4508, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 17.15, |
|
"learning_rate": 1.7289915966386555e-06, |
|
"loss": 0.459, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 17.19, |
|
"learning_rate": 1.707983193277311e-06, |
|
"loss": 0.4737, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 17.22, |
|
"learning_rate": 1.6869747899159666e-06, |
|
"loss": 0.4742, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 17.26, |
|
"learning_rate": 1.665966386554622e-06, |
|
"loss": 0.4753, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 17.29, |
|
"learning_rate": 1.6449579831932775e-06, |
|
"loss": 0.4688, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 17.33, |
|
"learning_rate": 1.6239495798319329e-06, |
|
"loss": 0.4689, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 17.36, |
|
"learning_rate": 1.6029411764705884e-06, |
|
"loss": 0.4724, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 17.39, |
|
"learning_rate": 1.5819327731092438e-06, |
|
"loss": 0.4744, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 17.43, |
|
"learning_rate": 1.5609243697478993e-06, |
|
"loss": 0.4824, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 17.46, |
|
"learning_rate": 1.5399159663865549e-06, |
|
"loss": 0.4699, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 17.5, |
|
"learning_rate": 1.51890756302521e-06, |
|
"loss": 0.5166, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 17.53, |
|
"learning_rate": 1.4978991596638656e-06, |
|
"loss": 0.4653, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 17.57, |
|
"learning_rate": 1.4768907563025211e-06, |
|
"loss": 0.4809, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 17.6, |
|
"learning_rate": 1.4558823529411767e-06, |
|
"loss": 0.4605, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 17.64, |
|
"learning_rate": 1.434873949579832e-06, |
|
"loss": 0.4774, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 17.67, |
|
"learning_rate": 1.4159663865546218e-06, |
|
"loss": 0.483, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 17.71, |
|
"learning_rate": 1.3949579831932774e-06, |
|
"loss": 0.5036, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 17.74, |
|
"learning_rate": 1.373949579831933e-06, |
|
"loss": 0.466, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 17.78, |
|
"learning_rate": 1.3529411764705883e-06, |
|
"loss": 0.4475, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 17.81, |
|
"learning_rate": 1.3319327731092438e-06, |
|
"loss": 0.4919, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 17.85, |
|
"learning_rate": 1.3109243697478994e-06, |
|
"loss": 0.4884, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 17.88, |
|
"learning_rate": 1.289915966386555e-06, |
|
"loss": 0.4847, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 17.91, |
|
"learning_rate": 1.26890756302521e-06, |
|
"loss": 0.4636, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 17.95, |
|
"learning_rate": 1.2478991596638656e-06, |
|
"loss": 0.4921, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 17.98, |
|
"learning_rate": 1.226890756302521e-06, |
|
"loss": 0.4745, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"eval_loss": 0.570929229259491, |
|
"eval_runtime": 132.8501, |
|
"eval_samples_per_second": 4.005, |
|
"eval_steps_per_second": 1.001, |
|
"eval_wer": 0.301921964389021, |
|
"step": 5184 |
|
}, |
|
{ |
|
"epoch": 18.02, |
|
"learning_rate": 1.2058823529411765e-06, |
|
"loss": 0.5153, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 18.06, |
|
"learning_rate": 1.184873949579832e-06, |
|
"loss": 0.4541, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 18.09, |
|
"learning_rate": 1.1638655462184874e-06, |
|
"loss": 0.474, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 18.12, |
|
"learning_rate": 1.142857142857143e-06, |
|
"loss": 0.4608, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 18.16, |
|
"learning_rate": 1.1218487394957985e-06, |
|
"loss": 0.4932, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 18.19, |
|
"learning_rate": 1.1008403361344539e-06, |
|
"loss": 0.4808, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 18.23, |
|
"learning_rate": 1.0798319327731094e-06, |
|
"loss": 0.4657, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 18.26, |
|
"learning_rate": 1.0588235294117648e-06, |
|
"loss": 0.4375, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 18.3, |
|
"learning_rate": 1.0378151260504203e-06, |
|
"loss": 0.4985, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 18.33, |
|
"learning_rate": 1.0168067226890757e-06, |
|
"loss": 0.505, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 18.37, |
|
"learning_rate": 9.95798319327731e-07, |
|
"loss": 0.4977, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 18.4, |
|
"learning_rate": 9.747899159663866e-07, |
|
"loss": 0.4706, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 18.44, |
|
"learning_rate": 9.537815126050421e-07, |
|
"loss": 0.4435, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 18.47, |
|
"learning_rate": 9.327731092436976e-07, |
|
"loss": 0.4855, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 18.51, |
|
"learning_rate": 9.117647058823529e-07, |
|
"loss": 0.4624, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 18.54, |
|
"learning_rate": 8.907563025210085e-07, |
|
"loss": 0.5086, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 18.57, |
|
"learning_rate": 8.697478991596639e-07, |
|
"loss": 0.4876, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 18.61, |
|
"learning_rate": 8.487394957983195e-07, |
|
"loss": 0.4862, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 18.64, |
|
"learning_rate": 8.277310924369748e-07, |
|
"loss": 0.4466, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 18.68, |
|
"learning_rate": 8.067226890756304e-07, |
|
"loss": 0.4713, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 18.71, |
|
"learning_rate": 7.857142857142857e-07, |
|
"loss": 0.4689, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 18.75, |
|
"learning_rate": 7.647058823529413e-07, |
|
"loss": 0.4741, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 18.78, |
|
"learning_rate": 7.436974789915967e-07, |
|
"loss": 0.4395, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 18.82, |
|
"learning_rate": 7.226890756302522e-07, |
|
"loss": 0.4878, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 18.85, |
|
"learning_rate": 7.016806722689076e-07, |
|
"loss": 0.5032, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 18.89, |
|
"learning_rate": 6.806722689075631e-07, |
|
"loss": 0.5066, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 18.92, |
|
"learning_rate": 6.596638655462185e-07, |
|
"loss": 0.4665, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 18.96, |
|
"learning_rate": 6.38655462184874e-07, |
|
"loss": 0.4561, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 18.99, |
|
"learning_rate": 6.176470588235295e-07, |
|
"loss": 0.4548, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 19.0, |
|
"eval_loss": 0.571989119052887, |
|
"eval_runtime": 132.9619, |
|
"eval_samples_per_second": 4.001, |
|
"eval_steps_per_second": 1.0, |
|
"eval_wer": 0.30044353024362025, |
|
"step": 5472 |
|
}, |
|
{ |
|
"epoch": 19.03, |
|
"learning_rate": 5.96638655462185e-07, |
|
"loss": 0.4904, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 19.06, |
|
"learning_rate": 5.756302521008403e-07, |
|
"loss": 0.4573, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 19.1, |
|
"learning_rate": 5.546218487394958e-07, |
|
"loss": 0.4248, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 19.13, |
|
"learning_rate": 5.336134453781513e-07, |
|
"loss": 0.4934, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 19.17, |
|
"learning_rate": 5.126050420168068e-07, |
|
"loss": 0.4961, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 19.2, |
|
"learning_rate": 4.915966386554622e-07, |
|
"loss": 0.4658, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 19.24, |
|
"learning_rate": 4.7058823529411767e-07, |
|
"loss": 0.5053, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 19.27, |
|
"learning_rate": 4.4957983193277317e-07, |
|
"loss": 0.4687, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 19.3, |
|
"learning_rate": 4.285714285714286e-07, |
|
"loss": 0.469, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 19.34, |
|
"learning_rate": 4.0756302521008407e-07, |
|
"loss": 0.4819, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 19.37, |
|
"learning_rate": 3.865546218487395e-07, |
|
"loss": 0.4649, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 19.41, |
|
"learning_rate": 3.65546218487395e-07, |
|
"loss": 0.4558, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 19.44, |
|
"learning_rate": 3.4453781512605047e-07, |
|
"loss": 0.4492, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 19.48, |
|
"learning_rate": 3.235294117647059e-07, |
|
"loss": 0.4556, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 19.51, |
|
"learning_rate": 3.0252100840336137e-07, |
|
"loss": 0.4847, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 19.55, |
|
"learning_rate": 2.815126050420168e-07, |
|
"loss": 0.4762, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 19.58, |
|
"learning_rate": 2.605042016806723e-07, |
|
"loss": 0.4659, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 19.62, |
|
"learning_rate": 2.3949579831932777e-07, |
|
"loss": 0.4651, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 19.65, |
|
"learning_rate": 2.184873949579832e-07, |
|
"loss": 0.4878, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 19.69, |
|
"learning_rate": 1.9747899159663866e-07, |
|
"loss": 0.4809, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 19.72, |
|
"learning_rate": 1.7647058823529414e-07, |
|
"loss": 0.4631, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 19.75, |
|
"learning_rate": 1.554621848739496e-07, |
|
"loss": 0.4607, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 19.79, |
|
"learning_rate": 1.3445378151260506e-07, |
|
"loss": 0.4423, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 19.82, |
|
"learning_rate": 1.1344537815126051e-07, |
|
"loss": 0.502, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 19.86, |
|
"learning_rate": 9.243697478991597e-08, |
|
"loss": 0.5033, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 19.89, |
|
"learning_rate": 7.142857142857144e-08, |
|
"loss": 0.4968, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 19.93, |
|
"learning_rate": 5.04201680672269e-08, |
|
"loss": 0.4721, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 19.96, |
|
"learning_rate": 2.9411764705882354e-08, |
|
"loss": 0.4608, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"learning_rate": 8.403361344537816e-09, |
|
"loss": 0.4619, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"eval_loss": 0.5728763937950134, |
|
"eval_runtime": 133.6384, |
|
"eval_samples_per_second": 3.981, |
|
"eval_steps_per_second": 0.995, |
|
"eval_wer": 0.3018576846435688, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"step": 5760, |
|
"total_flos": 5.436639839854258e+19, |
|
"train_loss": 0.5082269724872377, |
|
"train_runtime": 42641.5901, |
|
"train_samples_per_second": 2.165, |
|
"train_steps_per_second": 0.135 |
|
} |
|
], |
|
"max_steps": 5760, |
|
"num_train_epochs": 20, |
|
"total_flos": 5.436639839854258e+19, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|