|
{ |
|
"best_metric": 0.4326765537261963, |
|
"best_model_checkpoint": "ai-light-dance_singing_ft_wav2vec2-large-xlsr-53/checkpoint-4416", |
|
"epoch": 9.998756078253987, |
|
"global_step": 5520, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 37.4798, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 30.4795, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.6e-06, |
|
"loss": 34.5198, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 3.6e-06, |
|
"loss": 33.3812, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.6e-06, |
|
"loss": 35.0926, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 36.0086, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 6.6e-06, |
|
"loss": 32.2553, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 7.6e-06, |
|
"loss": 27.7818, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 8.599999999999999e-06, |
|
"loss": 26.3508, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 16.2226, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.06e-05, |
|
"loss": 9.8169, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.16e-05, |
|
"loss": 6.8364, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.2600000000000001e-05, |
|
"loss": 5.7286, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.3600000000000002e-05, |
|
"loss": 5.4131, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.4599999999999999e-05, |
|
"loss": 5.0572, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.56e-05, |
|
"loss": 4.1076, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.66e-05, |
|
"loss": 3.8991, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.76e-05, |
|
"loss": 4.0341, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.86e-05, |
|
"loss": 3.8305, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9600000000000002e-05, |
|
"loss": 3.5335, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.06e-05, |
|
"loss": 3.5695, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.16e-05, |
|
"loss": 3.5874, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.26e-05, |
|
"loss": 3.4865, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.36e-05, |
|
"loss": 3.3757, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.46e-05, |
|
"loss": 3.3191, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.5600000000000002e-05, |
|
"loss": 3.3174, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.6600000000000003e-05, |
|
"loss": 3.1491, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.7600000000000003e-05, |
|
"loss": 3.2675, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.86e-05, |
|
"loss": 3.1696, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.96e-05, |
|
"loss": 3.0987, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 3.06e-05, |
|
"loss": 2.9917, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 3.16e-05, |
|
"loss": 2.8617, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.26e-05, |
|
"loss": 2.8357, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.3600000000000004e-05, |
|
"loss": 2.8053, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.46e-05, |
|
"loss": 2.8275, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.56e-05, |
|
"loss": 2.6479, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.66e-05, |
|
"loss": 2.5326, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.76e-05, |
|
"loss": 2.495, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.86e-05, |
|
"loss": 2.3204, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.960000000000001e-05, |
|
"loss": 2.2974, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.0600000000000004e-05, |
|
"loss": 2.203, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.16e-05, |
|
"loss": 2.0697, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.26e-05, |
|
"loss": 2.0017, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.36e-05, |
|
"loss": 1.8623, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.46e-05, |
|
"loss": 1.7572, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.5600000000000004e-05, |
|
"loss": 1.6999, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.660000000000001e-05, |
|
"loss": 1.711, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.76e-05, |
|
"loss": 1.6011, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.86e-05, |
|
"loss": 1.7313, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.96e-05, |
|
"loss": 1.6939, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.99998237600688e-05, |
|
"loss": 1.6014, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.9998746747265604e-05, |
|
"loss": 1.5156, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.999669067486149e-05, |
|
"loss": 1.6563, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.999365562338116e-05, |
|
"loss": 1.4693, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.998964171169046e-05, |
|
"loss": 1.4089, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.4750380516052246, |
|
"eval_runtime": 322.5668, |
|
"eval_samples_per_second": 6.092, |
|
"eval_steps_per_second": 3.047, |
|
"eval_wer": 0.90536627441316, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.99846490969916e-05, |
|
"loss": 1.436, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 4.9978677974817076e-05, |
|
"loss": 1.3257, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 4.997172857902201e-05, |
|
"loss": 1.3687, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 4.996380118177494e-05, |
|
"loss": 1.2997, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 4.99548960935472e-05, |
|
"loss": 1.3143, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 4.994501366310076e-05, |
|
"loss": 1.3108, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 4.9934154277474564e-05, |
|
"loss": 1.2617, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 4.992231836196937e-05, |
|
"loss": 1.2921, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.9909506380131063e-05, |
|
"loss": 1.2369, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 4.989571883373257e-05, |
|
"loss": 1.2625, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 4.988095626275414e-05, |
|
"loss": 1.2738, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 4.9865219245362226e-05, |
|
"loss": 1.242, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 4.984850839788685e-05, |
|
"loss": 1.2433, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 4.9830824374797445e-05, |
|
"loss": 1.2075, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 4.981216786867723e-05, |
|
"loss": 1.2052, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 4.97925396101961e-05, |
|
"loss": 1.1486, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.977194036808198e-05, |
|
"loss": 1.1154, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.975037094909075e-05, |
|
"loss": 1.089, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.972783219797461e-05, |
|
"loss": 1.152, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.970432499744906e-05, |
|
"loss": 1.1418, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.967985026815826e-05, |
|
"loss": 1.176, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.9654408968639006e-05, |
|
"loss": 1.129, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.962800209528319e-05, |
|
"loss": 1.0695, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 4.9600630682298764e-05, |
|
"loss": 1.0066, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 4.957229580166928e-05, |
|
"loss": 1.0228, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 4.954299856311181e-05, |
|
"loss": 1.059, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 4.951274011403361e-05, |
|
"loss": 1.0438, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 4.9481521639487074e-05, |
|
"loss": 1.027, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.9449344362123385e-05, |
|
"loss": 1.0773, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.941620954214462e-05, |
|
"loss": 0.9679, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.9382118477254357e-05, |
|
"loss": 0.9833, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.9347072502606925e-05, |
|
"loss": 0.9811, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.9311072990755044e-05, |
|
"loss": 0.973, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.92741213515961e-05, |
|
"loss": 0.948, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.923621903231691e-05, |
|
"loss": 0.928, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 4.919736751733709e-05, |
|
"loss": 0.9791, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.915756832825086e-05, |
|
"loss": 0.9452, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 4.911682302376748e-05, |
|
"loss": 0.9401, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 4.907513319965022e-05, |
|
"loss": 1.0319, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 4.9032500488653824e-05, |
|
"loss": 0.8958, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 4.89889265604606e-05, |
|
"loss": 0.9368, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 4.8944413121615e-05, |
|
"loss": 0.9165, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 4.889896191545681e-05, |
|
"loss": 0.8891, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 4.8852574722052846e-05, |
|
"loss": 0.8636, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.881002747966038e-05, |
|
"loss": 0.8955, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 4.876186694580448e-05, |
|
"loss": 0.9004, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 4.871277579393783e-05, |
|
"loss": 0.8291, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 4.8662755946683346e-05, |
|
"loss": 0.7727, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 4.861180936303571e-05, |
|
"loss": 0.8485, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 4.85599380382846e-05, |
|
"loss": 0.8433, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 4.8507144003936625e-05, |
|
"loss": 0.9358, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 4.8453429327635656e-05, |
|
"loss": 0.823, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 4.8398796113081945e-05, |
|
"loss": 0.8033, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 4.834324649994969e-05, |
|
"loss": 0.8461, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 4.828678266380323e-05, |
|
"loss": 0.7995, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 0.9043775200843811, |
|
"eval_runtime": 325.3455, |
|
"eval_samples_per_second": 6.04, |
|
"eval_steps_per_second": 3.021, |
|
"eval_wer": 0.6162898157846097, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.8229406816011855e-05, |
|
"loss": 0.9137, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 4.81711212036632e-05, |
|
"loss": 0.864, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.8111928109475226e-05, |
|
"loss": 0.7631, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.805182985170685e-05, |
|
"loss": 0.7677, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.799082878406709e-05, |
|
"loss": 0.7535, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.792892729562297e-05, |
|
"loss": 0.7743, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.786612781070587e-05, |
|
"loss": 0.7485, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.7802432788816646e-05, |
|
"loss": 0.7372, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 4.7737844724529245e-05, |
|
"loss": 0.7217, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 4.7672366147393065e-05, |
|
"loss": 0.805, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 4.760599962183383e-05, |
|
"loss": 0.7733, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 4.753874774705321e-05, |
|
"loss": 0.738, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 4.7470613156926983e-05, |
|
"loss": 0.7428, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 4.740159851990191e-05, |
|
"loss": 0.7261, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 4.733170653889119e-05, |
|
"loss": 0.7504, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 4.726093995116868e-05, |
|
"loss": 0.74, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 4.7189301528261566e-05, |
|
"loss": 0.6939, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 4.711679407584195e-05, |
|
"loss": 0.7243, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 4.7043420433616866e-05, |
|
"loss": 0.6903, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 4.696918347521714e-05, |
|
"loss": 0.7343, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 4.6894086108084776e-05, |
|
"loss": 0.7062, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 4.681813127335918e-05, |
|
"loss": 0.6893, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 4.674132194576185e-05, |
|
"loss": 0.6801, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 4.666366113347998e-05, |
|
"loss": 0.689, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 4.6585151878048605e-05, |
|
"loss": 0.6905, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 4.6505797254231474e-05, |
|
"loss": 0.677, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 4.6425600369900635e-05, |
|
"loss": 0.6975, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 4.634456436591472e-05, |
|
"loss": 0.7028, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 4.6262692415995944e-05, |
|
"loss": 0.79, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 4.617998772660578e-05, |
|
"loss": 0.7154, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 4.609645353681942e-05, |
|
"loss": 0.6985, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 4.601209311819889e-05, |
|
"loss": 0.6251, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 4.592690977466492e-05, |
|
"loss": 0.6557, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 4.584090684236757e-05, |
|
"loss": 0.6233, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 4.575408768955556e-05, |
|
"loss": 0.6361, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 4.566645571644433e-05, |
|
"loss": 0.6998, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 4.557801435508292e-05, |
|
"loss": 0.6774, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 4.548876706921954e-05, |
|
"loss": 0.6703, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.5398717354165884e-05, |
|
"loss": 0.6751, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.5307868736660264e-05, |
|
"loss": 0.6658, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.52162247747295e-05, |
|
"loss": 0.7303, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.5123789057549546e-05, |
|
"loss": 0.6642, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 4.503056520530495e-05, |
|
"loss": 0.6194, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 4.493655686904703e-05, |
|
"loss": 0.6358, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 4.484176773055091e-05, |
|
"loss": 0.6588, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 4.474620150217136e-05, |
|
"loss": 0.6471, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 4.464986192669733e-05, |
|
"loss": 0.5713, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 4.455275277720542e-05, |
|
"loss": 0.618, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 4.445487785691209e-05, |
|
"loss": 0.5981, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 4.435624099902472e-05, |
|
"loss": 0.5877, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 4.425684606659146e-05, |
|
"loss": 0.6088, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 4.415669695234999e-05, |
|
"loss": 0.599, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 4.405579757857499e-05, |
|
"loss": 0.5592, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.395415189692459e-05, |
|
"loss": 0.5883, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 4.3851763888285564e-05, |
|
"loss": 0.6232, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 0.6645084023475647, |
|
"eval_runtime": 327.053, |
|
"eval_samples_per_second": 6.008, |
|
"eval_steps_per_second": 3.006, |
|
"eval_wer": 0.39803462510011706, |
|
"step": 1656 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 4.3748637562617455e-05, |
|
"loss": 0.6457, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 4.364477695879551e-05, |
|
"loss": 0.56, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 4.354018614445247e-05, |
|
"loss": 0.5492, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 4.343486921581934e-05, |
|
"loss": 0.5424, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 4.332883029756488e-05, |
|
"loss": 0.5244, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 4.322207354263412e-05, |
|
"loss": 0.5997, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 4.3114603132085705e-05, |
|
"loss": 0.5258, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 4.300642327492811e-05, |
|
"loss": 0.5644, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 4.2897538207954847e-05, |
|
"loss": 0.5319, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 4.2787952195578484e-05, |
|
"loss": 0.5171, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 4.267766952966369e-05, |
|
"loss": 0.5303, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 4.25666945293591e-05, |
|
"loss": 0.563, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 4.245503154092815e-05, |
|
"loss": 0.5612, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 4.234268493757893e-05, |
|
"loss": 0.5184, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 4.2229659119292835e-05, |
|
"loss": 0.5708, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 4.2115958512652264e-05, |
|
"loss": 0.6133, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 4.200158757066727e-05, |
|
"loss": 0.5336, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 4.1886550772601157e-05, |
|
"loss": 0.5559, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 4.1770852623795034e-05, |
|
"loss": 0.5293, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 4.16544976554914e-05, |
|
"loss": 0.5399, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 4.1537490424656633e-05, |
|
"loss": 0.5397, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 4.1419835513802585e-05, |
|
"loss": 0.4903, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 4.130153753080703e-05, |
|
"loss": 0.5467, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 4.1182601108733284e-05, |
|
"loss": 0.4755, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 4.106303090564869e-05, |
|
"loss": 0.5584, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 4.09428316044422e-05, |
|
"loss": 0.5254, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 4.082200791264101e-05, |
|
"loss": 0.4982, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 4.070056456222615e-05, |
|
"loss": 0.518, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 4.0578506309447185e-05, |
|
"loss": 0.4671, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 4.045583793463592e-05, |
|
"loss": 0.4879, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 4.033256424201919e-05, |
|
"loss": 0.539, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 4.0208690059530715e-05, |
|
"loss": 0.5025, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 4.0084220238622015e-05, |
|
"loss": 0.5072, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 3.9959159654072374e-05, |
|
"loss": 0.5173, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 3.9833513203797983e-05, |
|
"loss": 0.5022, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 3.970728580866005e-05, |
|
"loss": 0.5031, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 3.958048241227213e-05, |
|
"loss": 0.5372, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 3.9453107980806476e-05, |
|
"loss": 0.5042, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 3.9325167502799575e-05, |
|
"loss": 0.4658, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 3.919666598895675e-05, |
|
"loss": 0.4794, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 3.906760847195591e-05, |
|
"loss": 0.5384, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 3.893800000625049e-05, |
|
"loss": 0.466, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 3.880784566787145e-05, |
|
"loss": 0.5098, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 3.867715055422854e-05, |
|
"loss": 0.5207, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 3.8545919783910546e-05, |
|
"loss": 0.4768, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 3.8414158496484954e-05, |
|
"loss": 0.5256, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 3.8281871852296604e-05, |
|
"loss": 0.525, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 3.814906503226556e-05, |
|
"loss": 0.5265, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 3.801574323768426e-05, |
|
"loss": 0.5472, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 3.788191169001379e-05, |
|
"loss": 0.4927, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 3.7747575630679356e-05, |
|
"loss": 0.5349, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 3.761274032086504e-05, |
|
"loss": 0.4691, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 3.747741104130777e-05, |
|
"loss": 0.5086, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 3.734159309209044e-05, |
|
"loss": 0.4996, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 3.72052917924344e-05, |
|
"loss": 0.5351, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 0.567396879196167, |
|
"eval_runtime": 326.4464, |
|
"eval_samples_per_second": 6.019, |
|
"eval_steps_per_second": 3.011, |
|
"eval_wer": 0.3120263692933276, |
|
"step": 2208 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 3.706851248049109e-05, |
|
"loss": 0.5663, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 3.6931260513132994e-05, |
|
"loss": 0.4713, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 3.679354126574384e-05, |
|
"loss": 0.449, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 3.665536013200806e-05, |
|
"loss": 0.4667, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 3.651672252369957e-05, |
|
"loss": 0.4799, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 3.6377633870469826e-05, |
|
"loss": 0.4875, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 3.6238099619635174e-05, |
|
"loss": 0.517, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 3.609812523596345e-05, |
|
"loss": 0.4564, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 3.595771620146005e-05, |
|
"loss": 0.4734, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 3.5816878015153175e-05, |
|
"loss": 0.4858, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 3.567561619287847e-05, |
|
"loss": 0.4946, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 3.5533936267063e-05, |
|
"loss": 0.4382, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 3.539184378650861e-05, |
|
"loss": 0.4085, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 3.524934431617455e-05, |
|
"loss": 0.4724, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 3.510644343695958e-05, |
|
"loss": 0.4201, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 3.496314674548338e-05, |
|
"loss": 0.4801, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 3.4819459853867326e-05, |
|
"loss": 0.4561, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 3.467538838951477e-05, |
|
"loss": 0.4789, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 3.4530937994890584e-05, |
|
"loss": 0.5042, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 3.438611432730018e-05, |
|
"loss": 0.4384, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 3.4240923058668005e-05, |
|
"loss": 0.471, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 3.409536987531532e-05, |
|
"loss": 0.4493, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 3.3949460477737554e-05, |
|
"loss": 0.4414, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 3.380320058038105e-05, |
|
"loss": 0.4743, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 3.365659591141922e-05, |
|
"loss": 0.4236, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 3.350965221252824e-05, |
|
"loss": 0.4786, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 3.336237523866218e-05, |
|
"loss": 0.4143, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 3.321477075782761e-05, |
|
"loss": 0.4266, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 3.306684455085766e-05, |
|
"loss": 0.4141, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 3.291860241118571e-05, |
|
"loss": 0.449, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 3.277005014461841e-05, |
|
"loss": 0.4773, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 3.2621193569108304e-05, |
|
"loss": 0.4369, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 3.247203851452604e-05, |
|
"loss": 0.4379, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 3.2322590822431974e-05, |
|
"loss": 0.4297, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 3.217285634584743e-05, |
|
"loss": 0.4278, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 3.2022840949025476e-05, |
|
"loss": 0.5285, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 3.187255050722121e-05, |
|
"loss": 0.507, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 3.172199090646169e-05, |
|
"loss": 0.4244, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 3.157116804331542e-05, |
|
"loss": 0.4728, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 3.1420087824661423e-05, |
|
"loss": 0.4355, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 3.1268756167457825e-05, |
|
"loss": 0.4467, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 3.1117178998510235e-05, |
|
"loss": 0.4502, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 3.0965362254239544e-05, |
|
"loss": 0.4058, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 3.081331188044947e-05, |
|
"loss": 0.4205, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 3.066103383209364e-05, |
|
"loss": 0.4298, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 3.050853407304245e-05, |
|
"loss": 0.4399, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 3.0355818575849445e-05, |
|
"loss": 0.4331, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 3.020289332151738e-05, |
|
"loss": 0.4313, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 3.0049764299264045e-05, |
|
"loss": 0.4782, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 2.9896437506287654e-05, |
|
"loss": 0.4071, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 2.9742918947532e-05, |
|
"loss": 0.4409, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 2.9589214635451225e-05, |
|
"loss": 0.3998, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 2.9435330589774402e-05, |
|
"loss": 0.409, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 2.9281272837269748e-05, |
|
"loss": 0.4147, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 2.9127047411508595e-05, |
|
"loss": 0.4297, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 2.8972660352629084e-05, |
|
"loss": 0.472, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_loss": 0.5167461037635803, |
|
"eval_runtime": 326.8927, |
|
"eval_samples_per_second": 6.011, |
|
"eval_steps_per_second": 3.007, |
|
"eval_wer": 0.25793235167272505, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"learning_rate": 2.8818117707099622e-05, |
|
"loss": 0.4107, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 2.8663425527482052e-05, |
|
"loss": 0.417, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 2.8508589872194637e-05, |
|
"loss": 0.365, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"learning_rate": 2.835361680527475e-05, |
|
"loss": 0.3937, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 2.819851239614143e-05, |
|
"loss": 0.4067, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"learning_rate": 2.8043282719357615e-05, |
|
"loss": 0.3918, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 2.78879338543923e-05, |
|
"loss": 0.4142, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 2.7732471885382388e-05, |
|
"loss": 0.3869, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 2.757690290089443e-05, |
|
"loss": 0.4078, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"learning_rate": 2.742123299368617e-05, |
|
"loss": 0.3999, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 2.7265468260467935e-05, |
|
"loss": 0.3825, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"learning_rate": 2.7109614801663834e-05, |
|
"loss": 0.3845, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 2.6953678721172866e-05, |
|
"loss": 0.352, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 2.6797666126129856e-05, |
|
"loss": 0.3918, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 2.664158312666625e-05, |
|
"loss": 0.3916, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"learning_rate": 2.648543583567088e-05, |
|
"loss": 0.3676, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 2.6329230368550467e-05, |
|
"loss": 0.3658, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"learning_rate": 2.61729728429902e-05, |
|
"loss": 0.345, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 2.601666937871407e-05, |
|
"loss": 0.3782, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 2.5860326097245252e-05, |
|
"loss": 0.476, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 2.5703949121666332e-05, |
|
"loss": 0.3827, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 2.554754457637949e-05, |
|
"loss": 0.3718, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"learning_rate": 2.539111858686667e-05, |
|
"loss": 0.3581, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 2.5234677279449665e-05, |
|
"loss": 0.4227, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 2.5078226781050162e-05, |
|
"loss": 0.3806, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 2.492177321894984e-05, |
|
"loss": 0.3421, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"learning_rate": 2.476532272055034e-05, |
|
"loss": 0.3527, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 2.4608881413133333e-05, |
|
"loss": 0.3788, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 2.4452455423620513e-05, |
|
"loss": 0.4043, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"learning_rate": 2.429605087833367e-05, |
|
"loss": 0.3767, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 2.4139673902754754e-05, |
|
"loss": 0.37, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"learning_rate": 2.3983330621285934e-05, |
|
"loss": 0.3796, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 2.3827027157009806e-05, |
|
"loss": 0.3464, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"learning_rate": 2.367076963144954e-05, |
|
"loss": 0.3776, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 2.3514564164329127e-05, |
|
"loss": 0.3424, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"learning_rate": 2.335841687333375e-05, |
|
"loss": 0.344, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"learning_rate": 2.3202333873870153e-05, |
|
"loss": 0.378, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 2.304632127882714e-05, |
|
"loss": 0.3645, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"learning_rate": 2.2890385198336172e-05, |
|
"loss": 0.4108, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 2.273453173953207e-05, |
|
"loss": 0.4115, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 2.2578767006313836e-05, |
|
"loss": 0.3887, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 2.242309709910558e-05, |
|
"loss": 0.3958, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 2.2267528114617618e-05, |
|
"loss": 0.3292, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 2.2112066145607703e-05, |
|
"loss": 0.3516, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"learning_rate": 2.1956717280642394e-05, |
|
"loss": 0.3596, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 2.180148760385858e-05, |
|
"loss": 0.3403, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 2.1646383194725255e-05, |
|
"loss": 0.3735, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 2.149141012780538e-05, |
|
"loss": 0.3716, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 2.1336574472517957e-05, |
|
"loss": 0.3734, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 2.1181882292900387e-05, |
|
"loss": 0.3646, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"learning_rate": 2.1027339647370928e-05, |
|
"loss": 0.3552, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 2.0872952588491417e-05, |
|
"loss": 0.3759, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 2.071872716273026e-05, |
|
"loss": 0.3735, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"learning_rate": 2.0564669410225607e-05, |
|
"loss": 0.3723, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 2.0410785364548788e-05, |
|
"loss": 0.3913, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_loss": 0.4552823305130005, |
|
"eval_runtime": 329.9327, |
|
"eval_samples_per_second": 5.956, |
|
"eval_steps_per_second": 2.979, |
|
"eval_wer": 0.2334729837964389, |
|
"step": 3312 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 2.025708105246801e-05, |
|
"loss": 0.3719, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 2.0103562493712345e-05, |
|
"loss": 0.3468, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 1.9950235700735954e-05, |
|
"loss": 0.3125, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"learning_rate": 1.979710667848262e-05, |
|
"loss": 0.353, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 1.964418142415056e-05, |
|
"loss": 0.328, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 1.949146592695755e-05, |
|
"loss": 0.3742, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 1.933896616790636e-05, |
|
"loss": 0.3439, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"learning_rate": 1.918668811955054e-05, |
|
"loss": 0.3292, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 1.903463774576045e-05, |
|
"loss": 0.3368, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"learning_rate": 1.8882821001489764e-05, |
|
"loss": 0.3332, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"learning_rate": 1.8731243832542174e-05, |
|
"loss": 0.3517, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 6.21, |
|
"learning_rate": 1.8579912175338586e-05, |
|
"loss": 0.3233, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"learning_rate": 1.8428831956684578e-05, |
|
"loss": 0.3212, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 1.827800909353831e-05, |
|
"loss": 0.3421, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"learning_rate": 1.8127449492778797e-05, |
|
"loss": 0.3297, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 1.7977159050974523e-05, |
|
"loss": 0.3569, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"learning_rate": 1.7827143654152566e-05, |
|
"loss": 0.3342, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"learning_rate": 1.767740917756803e-05, |
|
"loss": 0.3201, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 1.752796148547397e-05, |
|
"loss": 0.3487, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"learning_rate": 1.7378806430891705e-05, |
|
"loss": 0.3121, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 1.72299498553816e-05, |
|
"loss": 0.3449, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 1.708139758881429e-05, |
|
"loss": 0.3271, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"learning_rate": 1.693315544914234e-05, |
|
"loss": 0.3451, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"learning_rate": 1.67852292421724e-05, |
|
"loss": 0.3488, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"learning_rate": 1.6637624761337828e-05, |
|
"loss": 0.3005, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"learning_rate": 1.6490347787471764e-05, |
|
"loss": 0.3687, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"learning_rate": 1.634340408858079e-05, |
|
"loss": 0.3181, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 1.619679941961895e-05, |
|
"loss": 0.3097, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"learning_rate": 1.6050539522262448e-05, |
|
"loss": 0.3332, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"learning_rate": 1.5904630124684685e-05, |
|
"loss": 0.3495, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 1.5759076941331998e-05, |
|
"loss": 0.3475, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 1.5613885672699823e-05, |
|
"loss": 0.33, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"learning_rate": 1.5469062005109425e-05, |
|
"loss": 0.3128, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"learning_rate": 1.532461161048523e-05, |
|
"loss": 0.319, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"learning_rate": 1.5180540146132676e-05, |
|
"loss": 0.2913, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 6.65, |
|
"learning_rate": 1.5036853254516627e-05, |
|
"loss": 0.3207, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 1.489355656304042e-05, |
|
"loss": 0.3404, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 6.68, |
|
"learning_rate": 1.4750655683825454e-05, |
|
"loss": 0.3198, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 1.46081562134914e-05, |
|
"loss": 0.3206, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"learning_rate": 1.4466063732937002e-05, |
|
"loss": 0.3159, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 6.74, |
|
"learning_rate": 1.4324383807121544e-05, |
|
"loss": 0.3325, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"learning_rate": 1.4183121984846836e-05, |
|
"loss": 0.3358, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"learning_rate": 1.4042283798539956e-05, |
|
"loss": 0.3157, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"learning_rate": 1.3901874764036555e-05, |
|
"loss": 0.3324, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 1.3761900380364834e-05, |
|
"loss": 0.3592, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"learning_rate": 1.3622366129530168e-05, |
|
"loss": 0.3314, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"learning_rate": 1.3483277476300444e-05, |
|
"loss": 0.328, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"learning_rate": 1.3344639867991959e-05, |
|
"loss": 0.3109, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 1.3206458734256177e-05, |
|
"loss": 0.3078, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"learning_rate": 1.3068739486867016e-05, |
|
"loss": 0.2891, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 6.92, |
|
"learning_rate": 1.2931487519508918e-05, |
|
"loss": 0.3151, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 6.94, |
|
"learning_rate": 1.2794708207565607e-05, |
|
"loss": 0.3348, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"learning_rate": 1.2658406907909565e-05, |
|
"loss": 0.3113, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 1.2522588958692244e-05, |
|
"loss": 0.3002, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"learning_rate": 1.2387259679134966e-05, |
|
"loss": 0.3306, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_loss": 0.4476345479488373, |
|
"eval_runtime": 348.5539, |
|
"eval_samples_per_second": 5.638, |
|
"eval_steps_per_second": 2.82, |
|
"eval_wer": 0.21141642535888117, |
|
"step": 3864 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"learning_rate": 1.2252424369320657e-05, |
|
"loss": 0.358, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 1.2118088309986217e-05, |
|
"loss": 0.2947, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 1.198425676231574e-05, |
|
"loss": 0.2911, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 7.07, |
|
"learning_rate": 1.1850934967734444e-05, |
|
"loss": 0.3003, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"learning_rate": 1.1718128147703405e-05, |
|
"loss": 0.301, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"learning_rate": 1.1585841503515058e-05, |
|
"loss": 0.2786, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"learning_rate": 1.1454080216089468e-05, |
|
"loss": 0.2985, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 7.14, |
|
"learning_rate": 1.1322849445771475e-05, |
|
"loss": 0.2934, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"learning_rate": 1.119215433212855e-05, |
|
"loss": 0.3019, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 7.17, |
|
"learning_rate": 1.1061999993749509e-05, |
|
"loss": 0.2956, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"learning_rate": 1.0932391528044094e-05, |
|
"loss": 0.3472, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 1.080333401104326e-05, |
|
"loss": 0.2843, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"learning_rate": 1.0674832497200426e-05, |
|
"loss": 0.2653, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 1.0546892019193525e-05, |
|
"loss": 0.2882, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 7.26, |
|
"learning_rate": 1.0419517587727874e-05, |
|
"loss": 0.2849, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"learning_rate": 1.0292714191339947e-05, |
|
"loss": 0.3381, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"learning_rate": 1.0166486796202012e-05, |
|
"loss": 0.3139, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 7.32, |
|
"learning_rate": 1.0040840345927624e-05, |
|
"loss": 0.2992, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"learning_rate": 9.915779761377989e-06, |
|
"loss": 0.2963, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 7.35, |
|
"learning_rate": 9.791309940469282e-06, |
|
"loss": 0.2862, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"learning_rate": 9.667435757980808e-06, |
|
"loss": 0.3125, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 9.544162065364081e-06, |
|
"loss": 0.287, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 7.41, |
|
"learning_rate": 9.421493690552813e-06, |
|
"loss": 0.2984, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"learning_rate": 9.299435437773846e-06, |
|
"loss": 0.3244, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"learning_rate": 9.177992087358998e-06, |
|
"loss": 0.3107, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 7.46, |
|
"learning_rate": 9.057168395557804e-06, |
|
"loss": 0.2901, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 7.48, |
|
"learning_rate": 8.93696909435132e-06, |
|
"loss": 0.2779, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 8.817398891266718e-06, |
|
"loss": 0.3215, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"learning_rate": 8.698462469192967e-06, |
|
"loss": 0.2844, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"learning_rate": 8.580164486197418e-06, |
|
"loss": 0.2986, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"learning_rate": 8.462509575343372e-06, |
|
"loss": 0.3233, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"learning_rate": 8.34550234450861e-06, |
|
"loss": 0.3123, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"learning_rate": 8.229147376204971e-06, |
|
"loss": 0.3097, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"learning_rate": 8.113449227398848e-06, |
|
"loss": 0.3313, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"learning_rate": 7.99841242933273e-06, |
|
"loss": 0.2779, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 7.64, |
|
"learning_rate": 7.884041487347738e-06, |
|
"loss": 0.334, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"learning_rate": 7.770340880707164e-06, |
|
"loss": 0.2955, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 7.68, |
|
"learning_rate": 7.657315062421073e-06, |
|
"loss": 0.2819, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 7.7, |
|
"learning_rate": 7.544968459071852e-06, |
|
"loss": 0.2979, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"learning_rate": 7.433305470640911e-06, |
|
"loss": 0.297, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 7.73, |
|
"learning_rate": 7.3223304703363135e-06, |
|
"loss": 0.303, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"learning_rate": 7.212047804421518e-06, |
|
"loss": 0.2679, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 7.77, |
|
"learning_rate": 7.102461792045157e-06, |
|
"loss": 0.3218, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"learning_rate": 6.993576725071887e-06, |
|
"loss": 0.2984, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 6.885396867914301e-06, |
|
"loss": 0.2909, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"learning_rate": 6.77792645736588e-06, |
|
"loss": 0.3112, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"learning_rate": 6.671169702435126e-06, |
|
"loss": 0.3036, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 7.86, |
|
"learning_rate": 6.565130784180668e-06, |
|
"loss": 0.2577, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 6.4598138555475335e-06, |
|
"loss": 0.3099, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 7.9, |
|
"learning_rate": 6.355223041204497e-06, |
|
"loss": 0.256, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"learning_rate": 6.251362437382544e-06, |
|
"loss": 0.2952, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 7.93, |
|
"learning_rate": 6.148236111714445e-06, |
|
"loss": 0.2903, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 7.95, |
|
"learning_rate": 6.045848103075422e-06, |
|
"loss": 0.3062, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 5.944202421425016e-06, |
|
"loss": 0.3072, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"learning_rate": 5.8433030476500175e-06, |
|
"loss": 0.3028, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_loss": 0.4326765537261963, |
|
"eval_runtime": 348.9244, |
|
"eval_samples_per_second": 5.632, |
|
"eval_steps_per_second": 2.817, |
|
"eval_wer": 0.20430041279033948, |
|
"step": 4416 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"learning_rate": 5.74315393340854e-06, |
|
"loss": 0.3111, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 8.03, |
|
"learning_rate": 5.643759000975285e-06, |
|
"loss": 0.259, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 8.04, |
|
"learning_rate": 5.545122143087916e-06, |
|
"loss": 0.2898, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"learning_rate": 5.447247222794585e-06, |
|
"loss": 0.2948, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 8.08, |
|
"learning_rate": 5.350138073302674e-06, |
|
"loss": 0.2785, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 8.1, |
|
"learning_rate": 5.253798497828643e-06, |
|
"loss": 0.286, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"learning_rate": 5.15823226944909e-06, |
|
"loss": 0.2898, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 8.13, |
|
"learning_rate": 5.06344313095298e-06, |
|
"loss": 0.2911, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 8.15, |
|
"learning_rate": 4.969434794695055e-06, |
|
"loss": 0.2758, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 8.17, |
|
"learning_rate": 4.876210942450457e-06, |
|
"loss": 0.2824, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 8.19, |
|
"learning_rate": 4.783775225270507e-06, |
|
"loss": 0.2816, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 8.21, |
|
"learning_rate": 4.692131263339744e-06, |
|
"loss": 0.2624, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 8.22, |
|
"learning_rate": 4.601282645834127e-06, |
|
"loss": 0.2857, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"learning_rate": 4.511232930780465e-06, |
|
"loss": 0.2785, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 8.26, |
|
"learning_rate": 4.4219856449170785e-06, |
|
"loss": 0.2722, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 8.28, |
|
"learning_rate": 4.333544283555674e-06, |
|
"loss": 0.2804, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 8.3, |
|
"learning_rate": 4.245912310444453e-06, |
|
"loss": 0.2692, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 8.31, |
|
"learning_rate": 4.15909315763243e-06, |
|
"loss": 0.2829, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 8.33, |
|
"learning_rate": 4.073090225335077e-06, |
|
"loss": 0.2711, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 8.35, |
|
"learning_rate": 3.98790688180111e-06, |
|
"loss": 0.2806, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 8.37, |
|
"learning_rate": 3.903546463180577e-06, |
|
"loss": 0.2831, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 8.39, |
|
"learning_rate": 3.820012273394224e-06, |
|
"loss": 0.2816, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 8.41, |
|
"learning_rate": 3.737307584004063e-06, |
|
"loss": 0.2916, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 8.42, |
|
"learning_rate": 3.655435634085283e-06, |
|
"loss": 0.2673, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 8.44, |
|
"learning_rate": 3.5743996300993677e-06, |
|
"loss": 0.2895, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 8.46, |
|
"learning_rate": 3.4942027457685266e-06, |
|
"loss": 0.2993, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 8.48, |
|
"learning_rate": 3.4148481219513915e-06, |
|
"loss": 0.2603, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"learning_rate": 3.3363388665200197e-06, |
|
"loss": 0.2875, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"learning_rate": 3.258678054238157e-06, |
|
"loss": 0.2644, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 8.53, |
|
"learning_rate": 3.18186872664083e-06, |
|
"loss": 0.2659, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 8.55, |
|
"learning_rate": 3.1059138919152215e-06, |
|
"loss": 0.2907, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 8.57, |
|
"learning_rate": 3.0308165247828673e-06, |
|
"loss": 0.2609, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"learning_rate": 2.956579566383136e-06, |
|
"loss": 0.263, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 8.6, |
|
"learning_rate": 2.8832059241580556e-06, |
|
"loss": 0.2542, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 8.62, |
|
"learning_rate": 2.810698471738432e-06, |
|
"loss": 0.251, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 8.64, |
|
"learning_rate": 2.739060048831327e-06, |
|
"loss": 0.2873, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 8.66, |
|
"learning_rate": 2.6682934611088033e-06, |
|
"loss": 0.2812, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 8.68, |
|
"learning_rate": 2.5984014800980947e-06, |
|
"loss": 0.2952, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 8.69, |
|
"learning_rate": 2.52938684307302e-06, |
|
"loss": 0.2736, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 8.71, |
|
"learning_rate": 2.4612522529467915e-06, |
|
"loss": 0.2394, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 8.73, |
|
"learning_rate": 2.39400037816617e-06, |
|
"loss": 0.2787, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"learning_rate": 2.327633852606942e-06, |
|
"loss": 0.2489, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 8.77, |
|
"learning_rate": 2.2621552754707565e-06, |
|
"loss": 0.2807, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 8.79, |
|
"learning_rate": 2.1975672111833573e-06, |
|
"loss": 0.2721, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"learning_rate": 2.133872189294128e-06, |
|
"loss": 0.2644, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 8.82, |
|
"learning_rate": 2.0710727043770305e-06, |
|
"loss": 0.2604, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 8.84, |
|
"learning_rate": 2.009171215932909e-06, |
|
"loss": 0.2694, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 8.86, |
|
"learning_rate": 1.9481701482931547e-06, |
|
"loss": 0.2327, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 8.88, |
|
"learning_rate": 1.8880718905247757e-06, |
|
"loss": 0.2637, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 8.89, |
|
"learning_rate": 1.8288787963368054e-06, |
|
"loss": 0.2854, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 8.91, |
|
"learning_rate": 1.770593183988148e-06, |
|
"loss": 0.2998, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"learning_rate": 1.7132173361967708e-06, |
|
"loss": 0.2399, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 8.95, |
|
"learning_rate": 1.6567535000503094e-06, |
|
"loss": 0.2958, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 8.97, |
|
"learning_rate": 1.601203886918054e-06, |
|
"loss": 0.2928, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 8.98, |
|
"learning_rate": 1.5465706723643475e-06, |
|
"loss": 0.317, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_loss": 0.4354780912399292, |
|
"eval_runtime": 331.7778, |
|
"eval_samples_per_second": 5.923, |
|
"eval_steps_per_second": 2.963, |
|
"eval_wer": 0.20325303431704764, |
|
"step": 4968 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"learning_rate": 1.492855996063386e-06, |
|
"loss": 0.3201, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 9.02, |
|
"learning_rate": 1.4400619617154e-06, |
|
"loss": 0.2655, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 9.04, |
|
"learning_rate": 1.3881906369642983e-06, |
|
"loss": 0.2739, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 9.06, |
|
"learning_rate": 1.3372440533166553e-06, |
|
"loss": 0.2593, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 9.08, |
|
"learning_rate": 1.2872242060621708e-06, |
|
"loss": 0.2424, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 9.09, |
|
"learning_rate": 1.2381330541955199e-06, |
|
"loss": 0.2751, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 9.11, |
|
"learning_rate": 1.1899725203396218e-06, |
|
"loss": 0.2338, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 9.13, |
|
"learning_rate": 1.142744490670361e-06, |
|
"loss": 0.2638, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 9.15, |
|
"learning_rate": 1.096450814842684e-06, |
|
"loss": 0.2569, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 9.17, |
|
"learning_rate": 1.0510933059181993e-06, |
|
"loss": 0.2711, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 9.18, |
|
"learning_rate": 1.0066737402941367e-06, |
|
"loss": 0.2768, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 9.2, |
|
"learning_rate": 9.631938576337983e-07, |
|
"loss": 0.2725, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 9.22, |
|
"learning_rate": 9.206553607984086e-07, |
|
"loss": 0.262, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 9.24, |
|
"learning_rate": 8.790599157804363e-07, |
|
"loss": 0.2672, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 9.26, |
|
"learning_rate": 8.384091516383364e-07, |
|
"loss": 0.2919, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 9.28, |
|
"learning_rate": 7.987046604327531e-07, |
|
"loss": 0.2694, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 9.29, |
|
"learning_rate": 7.599479971641732e-07, |
|
"loss": 0.2445, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 9.31, |
|
"learning_rate": 7.22140679712019e-07, |
|
"loss": 0.2644, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 9.33, |
|
"learning_rate": 6.852841887752038e-07, |
|
"loss": 0.2533, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 9.35, |
|
"learning_rate": 6.493799678141427e-07, |
|
"loss": 0.2593, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 9.37, |
|
"learning_rate": 6.144294229942171e-07, |
|
"loss": 0.2769, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 9.38, |
|
"learning_rate": 5.804339231306993e-07, |
|
"loss": 0.2644, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 9.4, |
|
"learning_rate": 5.473947996351564e-07, |
|
"loss": 0.2774, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 9.42, |
|
"learning_rate": 5.153133464632976e-07, |
|
"loss": 0.2439, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 9.44, |
|
"learning_rate": 4.841908200643008e-07, |
|
"loss": 0.2417, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 9.46, |
|
"learning_rate": 4.540284393315969e-07, |
|
"loss": 0.274, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 9.47, |
|
"learning_rate": 4.24827385555146e-07, |
|
"loss": 0.2509, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 9.49, |
|
"learning_rate": 3.965888023751585e-07, |
|
"loss": 0.2868, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 9.51, |
|
"learning_rate": 3.6931379573731107e-07, |
|
"loss": 0.2394, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 9.53, |
|
"learning_rate": 3.4300343384944535e-07, |
|
"loss": 0.2635, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 9.55, |
|
"learning_rate": 3.1765874713970687e-07, |
|
"loss": 0.2888, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 9.56, |
|
"learning_rate": 2.9328072821621367e-07, |
|
"loss": 0.2766, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 9.58, |
|
"learning_rate": 2.698703318281648e-07, |
|
"loss": 0.2845, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 9.6, |
|
"learning_rate": 2.4742847482845387e-07, |
|
"loss": 0.2584, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 9.62, |
|
"learning_rate": 2.2595603613776717e-07, |
|
"loss": 0.2463, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 9.64, |
|
"learning_rate": 2.0545385671014995e-07, |
|
"loss": 0.2814, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 9.65, |
|
"learning_rate": 1.8592273950007732e-07, |
|
"loss": 0.2984, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 9.67, |
|
"learning_rate": 1.673634494310128e-07, |
|
"loss": 0.2583, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 9.69, |
|
"learning_rate": 1.4977671336543486e-07, |
|
"loss": 0.2807, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 9.71, |
|
"learning_rate": 1.3316322007638492e-07, |
|
"loss": 0.2666, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 9.73, |
|
"learning_rate": 1.1752362022048602e-07, |
|
"loss": 0.2781, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 9.75, |
|
"learning_rate": 1.0285852631245485e-07, |
|
"loss": 0.2787, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 9.76, |
|
"learning_rate": 8.916851270112936e-08, |
|
"loss": 0.2868, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 9.78, |
|
"learning_rate": 7.645411554695337e-08, |
|
"loss": 0.2587, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 9.8, |
|
"learning_rate": 6.471583280099613e-08, |
|
"loss": 0.2656, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 9.82, |
|
"learning_rate": 5.3954124185440216e-08, |
|
"loss": 0.281, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 9.84, |
|
"learning_rate": 4.416941117557638e-08, |
|
"loss": 0.2732, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 9.85, |
|
"learning_rate": 3.536207698330851e-08, |
|
"loss": 0.241, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 9.87, |
|
"learning_rate": 2.753246654212671e-08, |
|
"loss": 0.2628, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 9.89, |
|
"learning_rate": 2.0680886493620876e-08, |
|
"loss": 0.2599, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"learning_rate": 1.4807605175445882e-08, |
|
"loss": 0.2649, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 9.93, |
|
"learning_rate": 9.912852610832746e-09, |
|
"loss": 0.266, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 9.94, |
|
"learning_rate": 5.996820499573619e-09, |
|
"loss": 0.244, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 9.96, |
|
"learning_rate": 3.0596622105083516e-09, |
|
"loss": 0.2666, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 9.98, |
|
"learning_rate": 1.101492775523738e-09, |
|
"loss": 0.2807, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 1.2238888505156088e-10, |
|
"loss": 0.2494, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_loss": 0.44054338335990906, |
|
"eval_runtime": 327.5646, |
|
"eval_samples_per_second": 5.999, |
|
"eval_steps_per_second": 3.001, |
|
"eval_wer": 0.20223646109297025, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"step": 5520, |
|
"total_flos": 5.359318129634805e+19, |
|
"train_loss": 1.2389012200676877, |
|
"train_runtime": 69709.0182, |
|
"train_samples_per_second": 2.537, |
|
"train_steps_per_second": 0.079 |
|
} |
|
], |
|
"max_steps": 5520, |
|
"num_train_epochs": 10, |
|
"total_flos": 5.359318129634805e+19, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|