|
{ |
|
"best_metric": 1.0490120649337769, |
|
"best_model_checkpoint": "/work/twskvfb446/facebook/wav2vec2-large-lv60_voidful/phoneme_byt5_SpeechMixEEDT5_w2v2-large_t5lephone-small_bs256/checkpoint-7868", |
|
"epoch": 13.998445480790584, |
|
"global_step": 7868, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 4.9943, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 4.4785, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.2e-05, |
|
"loss": 4.191, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 3.9363, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2e-05, |
|
"loss": 3.6209, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 2.4e-05, |
|
"loss": 3.3071, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.8e-05, |
|
"loss": 3.0288, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 2.844, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 3.6e-05, |
|
"loss": 2.7064, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4e-05, |
|
"loss": 2.5839, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 3.997613365155132e-05, |
|
"loss": 2.505, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.995226730310263e-05, |
|
"loss": 2.4443, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 3.992840095465394e-05, |
|
"loss": 2.3822, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 3.9904534606205255e-05, |
|
"loss": 2.2834, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.988066825775656e-05, |
|
"loss": 2.1641, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.985680190930788e-05, |
|
"loss": 2.092, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.983293556085919e-05, |
|
"loss": 2.025, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.9809069212410506e-05, |
|
"loss": 1.9894, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.978520286396182e-05, |
|
"loss": 1.9449, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.976133651551313e-05, |
|
"loss": 1.9187, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.973747016706444e-05, |
|
"loss": 1.8856, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.971360381861575e-05, |
|
"loss": 1.8647, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 3.9689737470167066e-05, |
|
"loss": 1.8505, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 3.966587112171838e-05, |
|
"loss": 1.8383, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 3.9642004773269695e-05, |
|
"loss": 1.8252, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 3.961813842482101e-05, |
|
"loss": 1.8093, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 3.959427207637232e-05, |
|
"loss": 1.7959, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 3.957040572792363e-05, |
|
"loss": 1.7828, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 3.954653937947494e-05, |
|
"loss": 1.7763, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 3.9522673031026254e-05, |
|
"loss": 1.7738, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 3.949880668257757e-05, |
|
"loss": 1.7619, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 3.947494033412888e-05, |
|
"loss": 1.7543, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 3.94510739856802e-05, |
|
"loss": 1.7457, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.9427207637231506e-05, |
|
"loss": 1.74, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.940334128878282e-05, |
|
"loss": 1.7339, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.937947494033413e-05, |
|
"loss": 1.7328, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.935560859188544e-05, |
|
"loss": 1.7171, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.933174224343676e-05, |
|
"loss": 1.713, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.930787589498807e-05, |
|
"loss": 1.7056, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.9284009546539386e-05, |
|
"loss": 1.704, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.9260143198090694e-05, |
|
"loss": 1.6972, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.923627684964201e-05, |
|
"loss": 1.695, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.9212410501193317e-05, |
|
"loss": 1.6824, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.918854415274463e-05, |
|
"loss": 1.6853, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.9164677804295946e-05, |
|
"loss": 1.6796, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.914081145584726e-05, |
|
"loss": 1.6829, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.9116945107398575e-05, |
|
"loss": 1.6773, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.909307875894988e-05, |
|
"loss": 1.672, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.90692124105012e-05, |
|
"loss": 1.6617, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.9045346062052505e-05, |
|
"loss": 1.6661, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.902147971360382e-05, |
|
"loss": 1.66, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.8997613365155134e-05, |
|
"loss": 1.6593, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.897374701670645e-05, |
|
"loss": 1.6523, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.894988066825776e-05, |
|
"loss": 1.6483, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.892601431980907e-05, |
|
"loss": 1.6415, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.8902147971360386e-05, |
|
"loss": 1.6417, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_cer": 4.8418803418803416, |
|
"eval_loss": 1.442280650138855, |
|
"eval_runtime": 4.8026, |
|
"eval_samples_per_second": 2.082, |
|
"eval_steps_per_second": 0.208, |
|
"eval_wer": 1.1428571428571428, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8878281622911693e-05, |
|
"loss": 1.7786, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.885441527446301e-05, |
|
"loss": 1.6225, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.883054892601432e-05, |
|
"loss": 1.6146, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.880668257756564e-05, |
|
"loss": 1.6069, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.878281622911695e-05, |
|
"loss": 1.5974, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.875894988066826e-05, |
|
"loss": 1.5803, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.8735083532219574e-05, |
|
"loss": 1.5726, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.871121718377089e-05, |
|
"loss": 1.564, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.8687350835322197e-05, |
|
"loss": 1.5567, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.866348448687351e-05, |
|
"loss": 1.556, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.8639618138424826e-05, |
|
"loss": 1.5439, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.861575178997614e-05, |
|
"loss": 1.5427, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.859188544152745e-05, |
|
"loss": 1.5358, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 3.856801909307876e-05, |
|
"loss": 1.5316, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 3.854415274463008e-05, |
|
"loss": 1.5197, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 3.8520286396181385e-05, |
|
"loss": 1.5193, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 3.84964200477327e-05, |
|
"loss": 1.5078, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 3.8472553699284014e-05, |
|
"loss": 1.5033, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 3.844868735083532e-05, |
|
"loss": 1.4989, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 3.8424821002386637e-05, |
|
"loss": 1.4983, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 3.840095465393795e-05, |
|
"loss": 1.4935, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 3.8377088305489266e-05, |
|
"loss": 1.4875, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 3.8353221957040573e-05, |
|
"loss": 1.4827, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 3.832935560859189e-05, |
|
"loss": 1.4844, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 3.83054892601432e-05, |
|
"loss": 1.4736, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 3.828162291169451e-05, |
|
"loss": 1.4708, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 3.8257756563245825e-05, |
|
"loss": 1.4708, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 3.823389021479714e-05, |
|
"loss": 1.467, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 3.8210023866348454e-05, |
|
"loss": 1.4656, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 3.818615751789976e-05, |
|
"loss": 1.4631, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 3.8162291169451077e-05, |
|
"loss": 1.4568, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 3.813842482100239e-05, |
|
"loss": 1.4552, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 3.81145584725537e-05, |
|
"loss": 1.4493, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 3.8090692124105013e-05, |
|
"loss": 1.4449, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 3.806682577565633e-05, |
|
"loss": 1.447, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 3.804295942720764e-05, |
|
"loss": 1.4419, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 3.801909307875896e-05, |
|
"loss": 1.4418, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 3.7995226730310265e-05, |
|
"loss": 1.4414, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 3.797136038186158e-05, |
|
"loss": 1.4353, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 3.794749403341289e-05, |
|
"loss": 1.429, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 3.79236276849642e-05, |
|
"loss": 1.4349, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 3.7899761336515517e-05, |
|
"loss": 1.4319, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 3.787589498806683e-05, |
|
"loss": 1.4227, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 3.7852028639618146e-05, |
|
"loss": 1.4191, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 3.7828162291169453e-05, |
|
"loss": 1.4207, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 3.780429594272077e-05, |
|
"loss": 1.4152, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.7780429594272076e-05, |
|
"loss": 1.4132, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 3.775656324582339e-05, |
|
"loss": 1.4123, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 3.7732696897374705e-05, |
|
"loss": 1.407, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 3.770883054892602e-05, |
|
"loss": 1.4065, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 3.7684964200477334e-05, |
|
"loss": 1.4069, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 3.766109785202864e-05, |
|
"loss": 1.4129, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 3.7637231503579957e-05, |
|
"loss": 1.4008, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 3.7613365155131264e-05, |
|
"loss": 1.3963, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 3.758949880668258e-05, |
|
"loss": 1.3955, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 3.7565632458233893e-05, |
|
"loss": 1.3941, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_cer": 4.794871794871795, |
|
"eval_loss": 1.2559969425201416, |
|
"eval_runtime": 4.8324, |
|
"eval_samples_per_second": 2.069, |
|
"eval_steps_per_second": 0.207, |
|
"eval_wer": 1.0649350649350648, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 3.754176610978521e-05, |
|
"loss": 1.5184, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 3.751789976133652e-05, |
|
"loss": 1.3922, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 3.749403341288783e-05, |
|
"loss": 1.3902, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 3.7470167064439145e-05, |
|
"loss": 1.3891, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 3.744630071599045e-05, |
|
"loss": 1.387, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 3.742243436754177e-05, |
|
"loss": 1.377, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 3.739856801909308e-05, |
|
"loss": 1.3807, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 3.7374701670644397e-05, |
|
"loss": 1.3814, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.735083532219571e-05, |
|
"loss": 1.3826, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.732696897374702e-05, |
|
"loss": 1.3762, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.7303102625298333e-05, |
|
"loss": 1.375, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.727923627684964e-05, |
|
"loss": 1.3777, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.7255369928400956e-05, |
|
"loss": 1.3676, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.723150357995227e-05, |
|
"loss": 1.3678, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.7207637231503585e-05, |
|
"loss": 1.3743, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 3.71837708830549e-05, |
|
"loss": 1.3717, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 3.715990453460621e-05, |
|
"loss": 1.3657, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 3.713603818615752e-05, |
|
"loss": 1.3609, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 3.711217183770883e-05, |
|
"loss": 1.3649, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 3.7088305489260144e-05, |
|
"loss": 1.3633, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 3.706443914081146e-05, |
|
"loss": 1.3576, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 3.7040572792362773e-05, |
|
"loss": 1.3635, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 3.701670644391409e-05, |
|
"loss": 1.3583, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 3.6992840095465396e-05, |
|
"loss": 1.354, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 3.696897374701671e-05, |
|
"loss": 1.3562, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 3.694510739856802e-05, |
|
"loss": 1.3581, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 3.692124105011933e-05, |
|
"loss": 1.3536, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 3.689737470167065e-05, |
|
"loss": 1.3502, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 3.687350835322196e-05, |
|
"loss": 1.3484, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 3.6849642004773277e-05, |
|
"loss": 1.3437, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 3.6825775656324584e-05, |
|
"loss": 1.3521, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 3.68019093078759e-05, |
|
"loss": 1.3466, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 3.677804295942721e-05, |
|
"loss": 1.3434, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 3.675417661097852e-05, |
|
"loss": 1.3409, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 3.6730310262529836e-05, |
|
"loss": 1.3444, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 3.670644391408115e-05, |
|
"loss": 1.3463, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 3.6682577565632465e-05, |
|
"loss": 1.3402, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 3.665871121718377e-05, |
|
"loss": 1.3381, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 3.663484486873509e-05, |
|
"loss": 1.3344, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 3.6610978520286395e-05, |
|
"loss": 1.3362, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 3.658711217183771e-05, |
|
"loss": 1.34, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 3.6563245823389024e-05, |
|
"loss": 1.3396, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.653937947494034e-05, |
|
"loss": 1.3375, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.6515513126491654e-05, |
|
"loss": 1.3297, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.649164677804296e-05, |
|
"loss": 1.3236, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.6467780429594276e-05, |
|
"loss": 1.3289, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 3.6443914081145584e-05, |
|
"loss": 1.3264, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 3.64200477326969e-05, |
|
"loss": 1.3268, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 3.639618138424821e-05, |
|
"loss": 1.3243, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 3.637231503579953e-05, |
|
"loss": 1.3308, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 3.634844868735084e-05, |
|
"loss": 1.3245, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.632458233890215e-05, |
|
"loss": 1.321, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 3.6300715990453464e-05, |
|
"loss": 1.3189, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 3.627684964200477e-05, |
|
"loss": 1.3241, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 3.625298329355609e-05, |
|
"loss": 1.3239, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 3.62291169451074e-05, |
|
"loss": 1.323, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_cer": 4.784188034188034, |
|
"eval_loss": 1.2157490253448486, |
|
"eval_runtime": 4.5874, |
|
"eval_samples_per_second": 2.18, |
|
"eval_steps_per_second": 0.218, |
|
"eval_wer": 1.0129870129870129, |
|
"step": 1686 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 3.6205250596658716e-05, |
|
"loss": 1.4321, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 3.618138424821003e-05, |
|
"loss": 1.3181, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 3.615751789976134e-05, |
|
"loss": 1.3191, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 3.613365155131265e-05, |
|
"loss": 1.3159, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 3.610978520286396e-05, |
|
"loss": 1.3134, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 3.6085918854415275e-05, |
|
"loss": 1.3112, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 3.606205250596659e-05, |
|
"loss": 1.3127, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 3.6038186157517904e-05, |
|
"loss": 1.3097, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 3.601431980906922e-05, |
|
"loss": 1.3095, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 3.599045346062053e-05, |
|
"loss": 1.3081, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 3.596658711217184e-05, |
|
"loss": 1.3117, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 3.594272076372315e-05, |
|
"loss": 1.3107, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 3.5918854415274464e-05, |
|
"loss": 1.3031, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 3.589498806682578e-05, |
|
"loss": 1.3074, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 3.587112171837709e-05, |
|
"loss": 1.3048, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 3.584725536992841e-05, |
|
"loss": 1.3014, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 3.5823389021479715e-05, |
|
"loss": 1.3064, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 3.579952267303103e-05, |
|
"loss": 1.3072, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 3.577565632458234e-05, |
|
"loss": 1.301, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 3.575178997613365e-05, |
|
"loss": 1.2968, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 3.572792362768497e-05, |
|
"loss": 1.2954, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 3.570405727923628e-05, |
|
"loss": 1.2983, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 3.5680190930787596e-05, |
|
"loss": 1.2948, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 3.5656324582338904e-05, |
|
"loss": 1.2944, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 3.563245823389022e-05, |
|
"loss": 1.2964, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 3.560859188544153e-05, |
|
"loss": 1.2975, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 3.558472553699284e-05, |
|
"loss": 1.2959, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 3.5560859188544155e-05, |
|
"loss": 1.2894, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 3.553699284009547e-05, |
|
"loss": 1.2868, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 3.5513126491646784e-05, |
|
"loss": 1.3002, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 3.548926014319809e-05, |
|
"loss": 1.2964, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 3.546539379474941e-05, |
|
"loss": 1.2967, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 3.544152744630072e-05, |
|
"loss": 1.2947, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 3.541766109785203e-05, |
|
"loss": 1.2908, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 3.5393794749403344e-05, |
|
"loss": 1.2893, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 3.536992840095466e-05, |
|
"loss": 1.2872, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 3.5346062052505966e-05, |
|
"loss": 1.286, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 3.532219570405728e-05, |
|
"loss": 1.2875, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 3.5298329355608595e-05, |
|
"loss": 1.2898, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 3.527446300715991e-05, |
|
"loss": 1.2854, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 3.525059665871122e-05, |
|
"loss": 1.288, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 3.522673031026253e-05, |
|
"loss": 1.2852, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 3.520286396181385e-05, |
|
"loss": 1.2856, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 3.5178997613365155e-05, |
|
"loss": 1.281, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 3.515513126491647e-05, |
|
"loss": 1.2879, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 3.5131264916467784e-05, |
|
"loss": 1.2849, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 3.51073985680191e-05, |
|
"loss": 1.2858, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 3.5083532219570406e-05, |
|
"loss": 1.2851, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 3.505966587112172e-05, |
|
"loss": 1.2801, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 3.5035799522673035e-05, |
|
"loss": 1.289, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 3.501193317422434e-05, |
|
"loss": 1.2839, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 3.498806682577566e-05, |
|
"loss": 1.2742, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 3.496420047732697e-05, |
|
"loss": 1.2762, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 3.494033412887829e-05, |
|
"loss": 1.2748, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 3.49164677804296e-05, |
|
"loss": 1.277, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 3.489260143198091e-05, |
|
"loss": 1.275, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_cer": 4.788461538461538, |
|
"eval_loss": 1.1931421756744385, |
|
"eval_runtime": 4.8209, |
|
"eval_samples_per_second": 2.074, |
|
"eval_steps_per_second": 0.207, |
|
"eval_wer": 0.974025974025974, |
|
"step": 2248 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 3.4868735083532224e-05, |
|
"loss": 1.3891, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 3.484486873508353e-05, |
|
"loss": 1.2736, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 3.4821002386634846e-05, |
|
"loss": 1.2706, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 3.479713603818616e-05, |
|
"loss": 1.2725, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 3.4773269689737475e-05, |
|
"loss": 1.2807, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 3.474940334128879e-05, |
|
"loss": 1.2753, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 3.47255369928401e-05, |
|
"loss": 1.2717, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 3.470167064439141e-05, |
|
"loss": 1.2686, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 3.467780429594272e-05, |
|
"loss": 1.2662, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 3.4653937947494035e-05, |
|
"loss": 1.2721, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 3.463007159904535e-05, |
|
"loss": 1.2682, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 3.4606205250596664e-05, |
|
"loss": 1.2746, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 3.458233890214798e-05, |
|
"loss": 1.2697, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 3.4558472553699286e-05, |
|
"loss": 1.2666, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 3.45346062052506e-05, |
|
"loss": 1.266, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 3.451073985680191e-05, |
|
"loss": 1.2704, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 3.448687350835322e-05, |
|
"loss": 1.2611, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 3.446300715990454e-05, |
|
"loss": 1.2663, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 3.443914081145585e-05, |
|
"loss": 1.2622, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 3.441527446300717e-05, |
|
"loss": 1.2643, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 3.4391408114558475e-05, |
|
"loss": 1.2654, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 3.436754176610979e-05, |
|
"loss": 1.265, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 3.43436754176611e-05, |
|
"loss": 1.2564, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 3.431980906921241e-05, |
|
"loss": 1.2609, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 3.4295942720763726e-05, |
|
"loss": 1.2589, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 3.427207637231504e-05, |
|
"loss": 1.2597, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 3.4248210023866355e-05, |
|
"loss": 1.2553, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 3.422434367541766e-05, |
|
"loss": 1.2581, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 3.420047732696898e-05, |
|
"loss": 1.2588, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 3.4176610978520285e-05, |
|
"loss": 1.262, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 3.41527446300716e-05, |
|
"loss": 1.2535, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 3.4128878281622915e-05, |
|
"loss": 1.2547, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 3.410501193317423e-05, |
|
"loss": 1.2494, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 3.4081145584725544e-05, |
|
"loss": 1.2523, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 3.405727923627685e-05, |
|
"loss": 1.2553, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 3.4033412887828166e-05, |
|
"loss": 1.2548, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 3.4009546539379474e-05, |
|
"loss": 1.2525, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 3.398568019093079e-05, |
|
"loss": 1.2562, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 3.39618138424821e-05, |
|
"loss": 1.2571, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 3.393794749403342e-05, |
|
"loss": 1.2558, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 3.391408114558473e-05, |
|
"loss": 1.2509, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 3.389021479713604e-05, |
|
"loss": 1.2489, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 3.3866348448687355e-05, |
|
"loss": 1.2475, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 3.384248210023866e-05, |
|
"loss": 1.2468, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 3.381861575178998e-05, |
|
"loss": 1.2529, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 3.379474940334129e-05, |
|
"loss": 1.2524, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 3.3770883054892606e-05, |
|
"loss": 1.2504, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 3.374701670644392e-05, |
|
"loss": 1.2499, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 3.372315035799523e-05, |
|
"loss": 1.2493, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 3.369928400954654e-05, |
|
"loss": 1.2517, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 3.367541766109785e-05, |
|
"loss": 1.2502, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 3.3651551312649165e-05, |
|
"loss": 1.2455, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 3.362768496420048e-05, |
|
"loss": 1.2444, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 3.3603818615751795e-05, |
|
"loss": 1.2407, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 3.357995226730311e-05, |
|
"loss": 1.247, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 3.355608591885442e-05, |
|
"loss": 1.2476, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 3.353221957040573e-05, |
|
"loss": 1.2373, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_cer": 4.77991452991453, |
|
"eval_loss": 1.2068004608154297, |
|
"eval_runtime": 4.7887, |
|
"eval_samples_per_second": 2.088, |
|
"eval_steps_per_second": 0.209, |
|
"eval_wer": 1.0649350649350648, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"learning_rate": 3.350835322195704e-05, |
|
"loss": 1.3503, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 3.3484486873508354e-05, |
|
"loss": 1.2483, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 3.346062052505967e-05, |
|
"loss": 1.2401, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"learning_rate": 3.343675417661098e-05, |
|
"loss": 1.2351, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 3.34128878281623e-05, |
|
"loss": 1.2424, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"learning_rate": 3.3389021479713605e-05, |
|
"loss": 1.2392, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 3.336515513126492e-05, |
|
"loss": 1.2397, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 3.334128878281623e-05, |
|
"loss": 1.2388, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 3.331742243436754e-05, |
|
"loss": 1.2345, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"learning_rate": 3.329355608591886e-05, |
|
"loss": 1.2432, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 3.326968973747017e-05, |
|
"loss": 1.2374, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 3.3245823389021486e-05, |
|
"loss": 1.2345, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 3.3221957040572794e-05, |
|
"loss": 1.2349, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 3.319809069212411e-05, |
|
"loss": 1.2321, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 3.3174224343675416e-05, |
|
"loss": 1.2305, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 3.315035799522673e-05, |
|
"loss": 1.2371, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"learning_rate": 3.3126491646778045e-05, |
|
"loss": 1.238, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"learning_rate": 3.310262529832936e-05, |
|
"loss": 1.236, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 3.3078758949880675e-05, |
|
"loss": 1.2323, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 3.305489260143198e-05, |
|
"loss": 1.2266, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"learning_rate": 3.30310262529833e-05, |
|
"loss": 1.2322, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"learning_rate": 3.3007159904534605e-05, |
|
"loss": 1.2321, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 3.298329355608592e-05, |
|
"loss": 1.227, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 3.2959427207637234e-05, |
|
"loss": 1.2316, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 3.293556085918855e-05, |
|
"loss": 1.2317, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"learning_rate": 3.291169451073986e-05, |
|
"loss": 1.23, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 3.288782816229117e-05, |
|
"loss": 1.2321, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 3.2863961813842485e-05, |
|
"loss": 1.2326, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 3.284009546539379e-05, |
|
"loss": 1.2276, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 3.281622911694511e-05, |
|
"loss": 1.2284, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 3.279236276849642e-05, |
|
"loss": 1.2298, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"learning_rate": 3.276849642004774e-05, |
|
"loss": 1.2305, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 3.274463007159905e-05, |
|
"loss": 1.2284, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 3.272076372315036e-05, |
|
"loss": 1.2315, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"learning_rate": 3.2696897374701674e-05, |
|
"loss": 1.2266, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 3.267303102625299e-05, |
|
"loss": 1.2248, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 3.2649164677804296e-05, |
|
"loss": 1.2237, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 3.262529832935561e-05, |
|
"loss": 1.2272, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 3.2601431980906925e-05, |
|
"loss": 1.223, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"learning_rate": 3.257756563245824e-05, |
|
"loss": 1.2256, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 3.255369928400955e-05, |
|
"loss": 1.2199, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 3.252983293556086e-05, |
|
"loss": 1.2244, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 3.250596658711218e-05, |
|
"loss": 1.2191, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 3.2482100238663485e-05, |
|
"loss": 1.2297, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 3.24582338902148e-05, |
|
"loss": 1.2235, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 3.2434367541766114e-05, |
|
"loss": 1.2217, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 3.241050119331743e-05, |
|
"loss": 1.22, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 3.2386634844868736e-05, |
|
"loss": 1.2273, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 3.236276849642005e-05, |
|
"loss": 1.2171, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 3.2338902147971365e-05, |
|
"loss": 1.2251, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 3.231503579952267e-05, |
|
"loss": 1.2199, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"learning_rate": 3.229116945107399e-05, |
|
"loss": 1.2178, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 3.22673031026253e-05, |
|
"loss": 1.2198, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 3.224343675417661e-05, |
|
"loss": 1.2186, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"learning_rate": 3.2219570405727925e-05, |
|
"loss": 1.2116, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"learning_rate": 3.219570405727924e-05, |
|
"loss": 1.2191, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_cer": 4.788461538461538, |
|
"eval_loss": 1.1857426166534424, |
|
"eval_runtime": 4.8064, |
|
"eval_samples_per_second": 2.081, |
|
"eval_steps_per_second": 0.208, |
|
"eval_wer": 1.025974025974026, |
|
"step": 3372 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 3.2171837708830554e-05, |
|
"loss": 1.3261, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 3.214797136038186e-05, |
|
"loss": 1.2206, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 3.2124105011933176e-05, |
|
"loss": 1.2121, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"learning_rate": 3.210023866348449e-05, |
|
"loss": 1.2154, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 3.20763723150358e-05, |
|
"loss": 1.2153, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 3.205250596658711e-05, |
|
"loss": 1.2137, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 3.202863961813843e-05, |
|
"loss": 1.2168, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"learning_rate": 3.200477326968974e-05, |
|
"loss": 1.2152, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 3.198090692124105e-05, |
|
"loss": 1.2102, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 3.1957040572792365e-05, |
|
"loss": 1.2101, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"learning_rate": 3.193317422434368e-05, |
|
"loss": 1.2107, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 6.21, |
|
"learning_rate": 3.190930787589499e-05, |
|
"loss": 1.2146, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"learning_rate": 3.18854415274463e-05, |
|
"loss": 1.2136, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 3.1861575178997616e-05, |
|
"loss": 1.2109, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"learning_rate": 3.183770883054893e-05, |
|
"loss": 1.1998, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"learning_rate": 3.1813842482100245e-05, |
|
"loss": 1.2116, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"learning_rate": 3.178997613365155e-05, |
|
"loss": 1.2004, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"learning_rate": 3.176610978520287e-05, |
|
"loss": 1.2144, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"learning_rate": 3.1742243436754176e-05, |
|
"loss": 1.2048, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 6.35, |
|
"learning_rate": 3.171837708830549e-05, |
|
"loss": 1.1998, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"learning_rate": 3.1694510739856805e-05, |
|
"loss": 1.2075, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 3.167064439140812e-05, |
|
"loss": 1.2091, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"learning_rate": 3.1646778042959434e-05, |
|
"loss": 1.2061, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"learning_rate": 3.162291169451074e-05, |
|
"loss": 1.2081, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"learning_rate": 3.1599045346062056e-05, |
|
"loss": 1.2037, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"learning_rate": 3.1575178997613364e-05, |
|
"loss": 1.2091, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"learning_rate": 3.155131264916468e-05, |
|
"loss": 1.2054, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 3.152744630071599e-05, |
|
"loss": 1.2096, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"learning_rate": 3.150357995226731e-05, |
|
"loss": 1.2104, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"learning_rate": 3.147971360381862e-05, |
|
"loss": 1.2022, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"learning_rate": 3.145584725536993e-05, |
|
"loss": 1.2037, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 3.1431980906921245e-05, |
|
"loss": 1.2099, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 3.140811455847255e-05, |
|
"loss": 1.2037, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 3.138424821002387e-05, |
|
"loss": 1.2025, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"learning_rate": 3.136038186157518e-05, |
|
"loss": 1.2029, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 3.1336515513126496e-05, |
|
"loss": 1.2072, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 6.65, |
|
"learning_rate": 3.131264916467781e-05, |
|
"loss": 1.2022, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 3.128878281622912e-05, |
|
"loss": 1.2047, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 3.126491646778043e-05, |
|
"loss": 1.2019, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"learning_rate": 3.124105011933174e-05, |
|
"loss": 1.2083, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"learning_rate": 3.1217183770883056e-05, |
|
"loss": 1.2015, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 6.74, |
|
"learning_rate": 3.119331742243437e-05, |
|
"loss": 1.2069, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"learning_rate": 3.1169451073985685e-05, |
|
"loss": 1.2006, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"learning_rate": 3.1145584725537e-05, |
|
"loss": 1.1969, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"learning_rate": 3.112171837708831e-05, |
|
"loss": 1.1971, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 3.109785202863962e-05, |
|
"loss": 1.1997, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"learning_rate": 3.107398568019093e-05, |
|
"loss": 1.2082, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"learning_rate": 3.1050119331742244e-05, |
|
"loss": 1.1952, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"learning_rate": 3.102625298329356e-05, |
|
"loss": 1.1896, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 3.100238663484487e-05, |
|
"loss": 1.1958, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"learning_rate": 3.097852028639619e-05, |
|
"loss": 1.1938, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 6.92, |
|
"learning_rate": 3.0954653937947496e-05, |
|
"loss": 1.1991, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 6.94, |
|
"learning_rate": 3.093078758949881e-05, |
|
"loss": 1.1969, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"learning_rate": 3.090692124105012e-05, |
|
"loss": 1.1958, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 3.088305489260143e-05, |
|
"loss": 1.1876, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"learning_rate": 3.085918854415275e-05, |
|
"loss": 1.1942, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_cer": 4.7905982905982905, |
|
"eval_loss": 1.1601146459579468, |
|
"eval_runtime": 4.8741, |
|
"eval_samples_per_second": 2.052, |
|
"eval_steps_per_second": 0.205, |
|
"eval_wer": 0.987012987012987, |
|
"step": 3934 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"learning_rate": 3.083532219570406e-05, |
|
"loss": 1.3012, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 3.0811455847255376e-05, |
|
"loss": 1.1941, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 3.0787589498806684e-05, |
|
"loss": 1.1899, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 7.06, |
|
"learning_rate": 3.0763723150358e-05, |
|
"loss": 1.1886, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"learning_rate": 3.0739856801909307e-05, |
|
"loss": 1.1903, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"learning_rate": 3.071599045346062e-05, |
|
"loss": 1.1918, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"learning_rate": 3.0692124105011936e-05, |
|
"loss": 1.1976, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 7.14, |
|
"learning_rate": 3.066825775656325e-05, |
|
"loss": 1.2001, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"learning_rate": 3.0644391408114565e-05, |
|
"loss": 1.1896, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 7.17, |
|
"learning_rate": 3.062052505966587e-05, |
|
"loss": 1.1937, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"learning_rate": 3.059665871121719e-05, |
|
"loss": 1.1903, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 3.0572792362768495e-05, |
|
"loss": 1.1891, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 7.22, |
|
"learning_rate": 3.054892601431981e-05, |
|
"loss": 1.1805, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"learning_rate": 3.0525059665871124e-05, |
|
"loss": 1.1919, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 7.26, |
|
"learning_rate": 3.050119331742244e-05, |
|
"loss": 1.1904, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"learning_rate": 3.047732696897375e-05, |
|
"loss": 1.1927, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"learning_rate": 3.0453460620525064e-05, |
|
"loss": 1.1898, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"learning_rate": 3.0429594272076372e-05, |
|
"loss": 1.1928, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"learning_rate": 3.0405727923627687e-05, |
|
"loss": 1.1831, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 7.35, |
|
"learning_rate": 3.0381861575178998e-05, |
|
"loss": 1.1833, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"learning_rate": 3.0357995226730313e-05, |
|
"loss": 1.1863, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"learning_rate": 3.0334128878281627e-05, |
|
"loss": 1.1807, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 7.4, |
|
"learning_rate": 3.031026252983294e-05, |
|
"loss": 1.1854, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"learning_rate": 3.0286396181384253e-05, |
|
"loss": 1.1835, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 7.44, |
|
"learning_rate": 3.0262529832935564e-05, |
|
"loss": 1.1852, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"learning_rate": 3.0238663484486875e-05, |
|
"loss": 1.186, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"learning_rate": 3.0214797136038187e-05, |
|
"loss": 1.1841, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 7.49, |
|
"learning_rate": 3.01909307875895e-05, |
|
"loss": 1.1806, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 7.51, |
|
"learning_rate": 3.0167064439140816e-05, |
|
"loss": 1.1876, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 7.53, |
|
"learning_rate": 3.0143198090692127e-05, |
|
"loss": 1.1802, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"learning_rate": 3.011933174224344e-05, |
|
"loss": 1.1848, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 7.56, |
|
"learning_rate": 3.0095465393794753e-05, |
|
"loss": 1.1814, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 7.58, |
|
"learning_rate": 3.0071599045346064e-05, |
|
"loss": 1.1838, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"learning_rate": 3.0047732696897375e-05, |
|
"loss": 1.1799, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"learning_rate": 3.002386634844869e-05, |
|
"loss": 1.1791, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"learning_rate": 3.0000000000000004e-05, |
|
"loss": 1.18, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"learning_rate": 2.9976133651551315e-05, |
|
"loss": 1.1862, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 7.67, |
|
"learning_rate": 2.995226730310263e-05, |
|
"loss": 1.1711, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"learning_rate": 2.992840095465394e-05, |
|
"loss": 1.1776, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 7.7, |
|
"learning_rate": 2.9904534606205252e-05, |
|
"loss": 1.18, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"learning_rate": 2.9880668257756563e-05, |
|
"loss": 1.1819, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 7.74, |
|
"learning_rate": 2.9856801909307878e-05, |
|
"loss": 1.1766, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"learning_rate": 2.983293556085919e-05, |
|
"loss": 1.1756, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 7.77, |
|
"learning_rate": 2.9809069212410504e-05, |
|
"loss": 1.1812, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"learning_rate": 2.978520286396182e-05, |
|
"loss": 1.1764, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 2.976133651551313e-05, |
|
"loss": 1.1816, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"learning_rate": 2.973747016706444e-05, |
|
"loss": 1.1737, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 7.85, |
|
"learning_rate": 2.9713603818615752e-05, |
|
"loss": 1.1788, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 7.86, |
|
"learning_rate": 2.9689737470167067e-05, |
|
"loss": 1.1813, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 2.9665871121718378e-05, |
|
"loss": 1.1766, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 7.9, |
|
"learning_rate": 2.9642004773269692e-05, |
|
"loss": 1.1721, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"learning_rate": 2.9618138424821007e-05, |
|
"loss": 1.1788, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 7.93, |
|
"learning_rate": 2.9594272076372318e-05, |
|
"loss": 1.1747, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 7.95, |
|
"learning_rate": 2.9570405727923633e-05, |
|
"loss": 1.1741, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 2.954653937947494e-05, |
|
"loss": 1.1713, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"learning_rate": 2.9522673031026255e-05, |
|
"loss": 1.1716, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_cer": 4.777777777777778, |
|
"eval_loss": 1.1047004461288452, |
|
"eval_runtime": 4.6551, |
|
"eval_samples_per_second": 2.148, |
|
"eval_steps_per_second": 0.215, |
|
"eval_wer": 0.948051948051948, |
|
"step": 4496 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"learning_rate": 2.9498806682577566e-05, |
|
"loss": 1.276, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 8.02, |
|
"learning_rate": 2.947494033412888e-05, |
|
"loss": 1.1693, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 8.04, |
|
"learning_rate": 2.9451073985680195e-05, |
|
"loss": 1.1742, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"learning_rate": 2.9427207637231507e-05, |
|
"loss": 1.1741, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 8.08, |
|
"learning_rate": 2.940334128878282e-05, |
|
"loss": 1.1713, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 8.1, |
|
"learning_rate": 2.937947494033413e-05, |
|
"loss": 1.1656, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"learning_rate": 2.9355608591885443e-05, |
|
"loss": 1.1666, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 8.13, |
|
"learning_rate": 2.9331742243436755e-05, |
|
"loss": 1.169, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 8.15, |
|
"learning_rate": 2.930787589498807e-05, |
|
"loss": 1.1749, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 8.17, |
|
"learning_rate": 2.9284009546539384e-05, |
|
"loss": 1.1651, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 8.18, |
|
"learning_rate": 2.9260143198090695e-05, |
|
"loss": 1.1711, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 8.2, |
|
"learning_rate": 2.923627684964201e-05, |
|
"loss": 1.1713, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 8.22, |
|
"learning_rate": 2.9212410501193317e-05, |
|
"loss": 1.1656, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"learning_rate": 2.9188544152744632e-05, |
|
"loss": 1.1618, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 8.26, |
|
"learning_rate": 2.9164677804295943e-05, |
|
"loss": 1.1703, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 8.27, |
|
"learning_rate": 2.9140811455847258e-05, |
|
"loss": 1.1734, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 8.29, |
|
"learning_rate": 2.9116945107398572e-05, |
|
"loss": 1.1687, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 8.31, |
|
"learning_rate": 2.9093078758949883e-05, |
|
"loss": 1.1704, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 8.33, |
|
"learning_rate": 2.9069212410501198e-05, |
|
"loss": 1.1651, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 8.34, |
|
"learning_rate": 2.9045346062052506e-05, |
|
"loss": 1.1627, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 8.36, |
|
"learning_rate": 2.902147971360382e-05, |
|
"loss": 1.1639, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"learning_rate": 2.899761336515513e-05, |
|
"loss": 1.1606, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 8.4, |
|
"learning_rate": 2.8973747016706446e-05, |
|
"loss": 1.1643, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 8.42, |
|
"learning_rate": 2.894988066825776e-05, |
|
"loss": 1.1694, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 8.43, |
|
"learning_rate": 2.8926014319809072e-05, |
|
"loss": 1.1623, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 8.45, |
|
"learning_rate": 2.8902147971360387e-05, |
|
"loss": 1.163, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 8.47, |
|
"learning_rate": 2.8878281622911694e-05, |
|
"loss": 1.1623, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 8.49, |
|
"learning_rate": 2.885441527446301e-05, |
|
"loss": 1.1631, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"learning_rate": 2.883054892601432e-05, |
|
"loss": 1.1607, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 8.52, |
|
"learning_rate": 2.8806682577565635e-05, |
|
"loss": 1.158, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 8.54, |
|
"learning_rate": 2.878281622911695e-05, |
|
"loss": 1.1641, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 8.56, |
|
"learning_rate": 2.875894988066826e-05, |
|
"loss": 1.1615, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 8.58, |
|
"learning_rate": 2.8735083532219575e-05, |
|
"loss": 1.1618, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"learning_rate": 2.8711217183770886e-05, |
|
"loss": 1.1626, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 8.61, |
|
"learning_rate": 2.8687350835322197e-05, |
|
"loss": 1.1596, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 8.63, |
|
"learning_rate": 2.866348448687351e-05, |
|
"loss": 1.1568, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 8.65, |
|
"learning_rate": 2.8639618138424823e-05, |
|
"loss": 1.1555, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 8.66, |
|
"learning_rate": 2.8615751789976138e-05, |
|
"loss": 1.1623, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 8.68, |
|
"learning_rate": 2.859188544152745e-05, |
|
"loss": 1.1525, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 8.7, |
|
"learning_rate": 2.8568019093078763e-05, |
|
"loss": 1.1599, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 8.72, |
|
"learning_rate": 2.8544152744630075e-05, |
|
"loss": 1.1656, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 8.74, |
|
"learning_rate": 2.8520286396181386e-05, |
|
"loss": 1.1528, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"learning_rate": 2.8496420047732697e-05, |
|
"loss": 1.1603, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 8.77, |
|
"learning_rate": 2.847255369928401e-05, |
|
"loss": 1.1589, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 8.79, |
|
"learning_rate": 2.8448687350835323e-05, |
|
"loss": 1.1599, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 8.81, |
|
"learning_rate": 2.8424821002386637e-05, |
|
"loss": 1.1562, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 8.82, |
|
"learning_rate": 2.8400954653937952e-05, |
|
"loss": 1.1593, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 8.84, |
|
"learning_rate": 2.8377088305489263e-05, |
|
"loss": 1.1559, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 8.86, |
|
"learning_rate": 2.8353221957040574e-05, |
|
"loss": 1.1578, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 8.88, |
|
"learning_rate": 2.8329355608591886e-05, |
|
"loss": 1.162, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 8.9, |
|
"learning_rate": 2.83054892601432e-05, |
|
"loss": 1.1617, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 8.91, |
|
"learning_rate": 2.828162291169451e-05, |
|
"loss": 1.1576, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"learning_rate": 2.8257756563245826e-05, |
|
"loss": 1.1609, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 8.95, |
|
"learning_rate": 2.823389021479714e-05, |
|
"loss": 1.1584, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 8.97, |
|
"learning_rate": 2.821002386634845e-05, |
|
"loss": 1.1601, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 8.98, |
|
"learning_rate": 2.8186157517899763e-05, |
|
"loss": 1.1555, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_cer": 4.7585470085470085, |
|
"eval_loss": 1.0889137983322144, |
|
"eval_runtime": 4.8301, |
|
"eval_samples_per_second": 2.07, |
|
"eval_steps_per_second": 0.207, |
|
"eval_wer": 0.948051948051948, |
|
"step": 5058 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"learning_rate": 2.8162291169451074e-05, |
|
"loss": 1.2598, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 9.02, |
|
"learning_rate": 2.813842482100239e-05, |
|
"loss": 1.1559, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 9.04, |
|
"learning_rate": 2.81145584725537e-05, |
|
"loss": 1.1544, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 9.06, |
|
"learning_rate": 2.8090692124105014e-05, |
|
"loss": 1.1514, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 9.07, |
|
"learning_rate": 2.806682577565633e-05, |
|
"loss": 1.1548, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 9.09, |
|
"learning_rate": 2.804295942720764e-05, |
|
"loss": 1.1531, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 9.11, |
|
"learning_rate": 2.8019093078758955e-05, |
|
"loss": 1.149, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 9.13, |
|
"learning_rate": 2.7995226730310262e-05, |
|
"loss": 1.1512, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 9.15, |
|
"learning_rate": 2.7971360381861577e-05, |
|
"loss": 1.152, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 9.16, |
|
"learning_rate": 2.7947494033412888e-05, |
|
"loss": 1.1497, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 9.18, |
|
"learning_rate": 2.7923627684964203e-05, |
|
"loss": 1.151, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 9.2, |
|
"learning_rate": 2.7899761336515517e-05, |
|
"loss": 1.1571, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 9.22, |
|
"learning_rate": 2.787589498806683e-05, |
|
"loss": 1.1528, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 9.23, |
|
"learning_rate": 2.7852028639618143e-05, |
|
"loss": 1.1472, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 9.25, |
|
"learning_rate": 2.782816229116945e-05, |
|
"loss": 1.1502, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 9.27, |
|
"learning_rate": 2.7804295942720766e-05, |
|
"loss": 1.1487, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 9.29, |
|
"learning_rate": 2.7780429594272077e-05, |
|
"loss": 1.1448, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 9.31, |
|
"learning_rate": 2.775656324582339e-05, |
|
"loss": 1.1531, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 9.32, |
|
"learning_rate": 2.7732696897374706e-05, |
|
"loss": 1.151, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"learning_rate": 2.7708830548926017e-05, |
|
"loss": 1.1447, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 9.36, |
|
"learning_rate": 2.768496420047733e-05, |
|
"loss": 1.1501, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 9.38, |
|
"learning_rate": 2.766109785202864e-05, |
|
"loss": 1.1473, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 9.39, |
|
"learning_rate": 2.7637231503579954e-05, |
|
"loss": 1.1486, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 9.41, |
|
"learning_rate": 2.7613365155131265e-05, |
|
"loss": 1.1547, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 9.43, |
|
"learning_rate": 2.758949880668258e-05, |
|
"loss": 1.1492, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 9.45, |
|
"learning_rate": 2.7565632458233894e-05, |
|
"loss": 1.1489, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 9.47, |
|
"learning_rate": 2.7541766109785206e-05, |
|
"loss": 1.1494, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 9.48, |
|
"learning_rate": 2.751789976133652e-05, |
|
"loss": 1.1445, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 9.5, |
|
"learning_rate": 2.7494033412887828e-05, |
|
"loss": 1.145, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 9.52, |
|
"learning_rate": 2.7470167064439143e-05, |
|
"loss": 1.147, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 9.54, |
|
"learning_rate": 2.7446300715990454e-05, |
|
"loss": 1.1495, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 9.55, |
|
"learning_rate": 2.7422434367541768e-05, |
|
"loss": 1.1551, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 9.57, |
|
"learning_rate": 2.7398568019093083e-05, |
|
"loss": 1.1484, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"learning_rate": 2.7374701670644394e-05, |
|
"loss": 1.1546, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 9.61, |
|
"learning_rate": 2.735083532219571e-05, |
|
"loss": 1.1524, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 9.63, |
|
"learning_rate": 2.7326968973747016e-05, |
|
"loss": 1.1502, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 9.64, |
|
"learning_rate": 2.730310262529833e-05, |
|
"loss": 1.1464, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 9.66, |
|
"learning_rate": 2.7279236276849642e-05, |
|
"loss": 1.1454, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 9.68, |
|
"learning_rate": 2.7255369928400957e-05, |
|
"loss": 1.1416, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 9.7, |
|
"learning_rate": 2.723150357995227e-05, |
|
"loss": 1.1496, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 9.71, |
|
"learning_rate": 2.7207637231503583e-05, |
|
"loss": 1.1446, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 9.73, |
|
"learning_rate": 2.7183770883054897e-05, |
|
"loss": 1.144, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 9.75, |
|
"learning_rate": 2.7159904534606208e-05, |
|
"loss": 1.1436, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 9.77, |
|
"learning_rate": 2.713603818615752e-05, |
|
"loss": 1.1424, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 9.79, |
|
"learning_rate": 2.711217183770883e-05, |
|
"loss": 1.1466, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 9.8, |
|
"learning_rate": 2.7088305489260145e-05, |
|
"loss": 1.139, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 9.82, |
|
"learning_rate": 2.706443914081146e-05, |
|
"loss": 1.142, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 9.84, |
|
"learning_rate": 2.704057279236277e-05, |
|
"loss": 1.1448, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 9.86, |
|
"learning_rate": 2.7016706443914086e-05, |
|
"loss": 1.1374, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 9.87, |
|
"learning_rate": 2.6992840095465397e-05, |
|
"loss": 1.1469, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 9.89, |
|
"learning_rate": 2.6968973747016708e-05, |
|
"loss": 1.1398, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"learning_rate": 2.694510739856802e-05, |
|
"loss": 1.142, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 9.93, |
|
"learning_rate": 2.6921241050119334e-05, |
|
"loss": 1.1463, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 9.95, |
|
"learning_rate": 2.6897374701670645e-05, |
|
"loss": 1.133, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 9.96, |
|
"learning_rate": 2.687350835322196e-05, |
|
"loss": 1.14, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 9.98, |
|
"learning_rate": 2.6849642004773274e-05, |
|
"loss": 1.1465, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 2.6825775656324585e-05, |
|
"loss": 1.1389, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_cer": 4.756410256410256, |
|
"eval_loss": 1.0790975093841553, |
|
"eval_runtime": 4.8315, |
|
"eval_samples_per_second": 2.07, |
|
"eval_steps_per_second": 0.207, |
|
"eval_wer": 0.961038961038961, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 10.02, |
|
"learning_rate": 2.6801909307875896e-05, |
|
"loss": 1.2382, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 10.04, |
|
"learning_rate": 2.6778042959427208e-05, |
|
"loss": 1.1377, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 10.05, |
|
"learning_rate": 2.6754176610978522e-05, |
|
"loss": 1.1432, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 10.07, |
|
"learning_rate": 2.6730310262529833e-05, |
|
"loss": 1.1396, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 10.09, |
|
"learning_rate": 2.6706443914081148e-05, |
|
"loss": 1.1391, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 10.11, |
|
"learning_rate": 2.6682577565632463e-05, |
|
"loss": 1.1435, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 10.12, |
|
"learning_rate": 2.6658711217183774e-05, |
|
"loss": 1.1434, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 10.14, |
|
"learning_rate": 2.6634844868735085e-05, |
|
"loss": 1.1376, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 10.16, |
|
"learning_rate": 2.6610978520286396e-05, |
|
"loss": 1.1327, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 10.18, |
|
"learning_rate": 2.658711217183771e-05, |
|
"loss": 1.1399, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 10.2, |
|
"learning_rate": 2.6563245823389022e-05, |
|
"loss": 1.1327, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 10.21, |
|
"learning_rate": 2.6539379474940336e-05, |
|
"loss": 1.1419, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 10.23, |
|
"learning_rate": 2.651551312649165e-05, |
|
"loss": 1.1399, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 10.25, |
|
"learning_rate": 2.6491646778042962e-05, |
|
"loss": 1.1369, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 10.27, |
|
"learning_rate": 2.6467780429594277e-05, |
|
"loss": 1.1332, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 10.28, |
|
"learning_rate": 2.6443914081145585e-05, |
|
"loss": 1.1368, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 10.3, |
|
"learning_rate": 2.64200477326969e-05, |
|
"loss": 1.1356, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 10.32, |
|
"learning_rate": 2.639618138424821e-05, |
|
"loss": 1.1404, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 10.34, |
|
"learning_rate": 2.6372315035799525e-05, |
|
"loss": 1.1387, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 10.36, |
|
"learning_rate": 2.634844868735084e-05, |
|
"loss": 1.1379, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 10.37, |
|
"learning_rate": 2.632458233890215e-05, |
|
"loss": 1.1359, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 10.39, |
|
"learning_rate": 2.6300715990453465e-05, |
|
"loss": 1.1399, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 10.41, |
|
"learning_rate": 2.6276849642004773e-05, |
|
"loss": 1.1411, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 10.43, |
|
"learning_rate": 2.6252983293556088e-05, |
|
"loss": 1.1372, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 10.44, |
|
"learning_rate": 2.62291169451074e-05, |
|
"loss": 1.1387, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 10.46, |
|
"learning_rate": 2.6205250596658713e-05, |
|
"loss": 1.1331, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 10.48, |
|
"learning_rate": 2.6181384248210028e-05, |
|
"loss": 1.1337, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 10.5, |
|
"learning_rate": 2.615751789976134e-05, |
|
"loss": 1.1312, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 10.52, |
|
"learning_rate": 2.6133651551312654e-05, |
|
"loss": 1.1315, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 10.53, |
|
"learning_rate": 2.610978520286396e-05, |
|
"loss": 1.128, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 10.55, |
|
"learning_rate": 2.6085918854415276e-05, |
|
"loss": 1.136, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 10.57, |
|
"learning_rate": 2.6062052505966587e-05, |
|
"loss": 1.1284, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 10.59, |
|
"learning_rate": 2.6038186157517902e-05, |
|
"loss": 1.1289, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 10.6, |
|
"learning_rate": 2.6014319809069216e-05, |
|
"loss": 1.1307, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 10.62, |
|
"learning_rate": 2.5990453460620528e-05, |
|
"loss": 1.1337, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 10.64, |
|
"learning_rate": 2.5966587112171842e-05, |
|
"loss": 1.131, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 10.66, |
|
"learning_rate": 2.594272076372315e-05, |
|
"loss": 1.1269, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 10.68, |
|
"learning_rate": 2.5918854415274465e-05, |
|
"loss": 1.1294, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 10.69, |
|
"learning_rate": 2.5894988066825776e-05, |
|
"loss": 1.1323, |
|
"step": 6010 |
|
}, |
|
{ |
|
"epoch": 10.71, |
|
"learning_rate": 2.587112171837709e-05, |
|
"loss": 1.1295, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 10.73, |
|
"learning_rate": 2.5847255369928405e-05, |
|
"loss": 1.128, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 10.75, |
|
"learning_rate": 2.5823389021479716e-05, |
|
"loss": 1.1304, |
|
"step": 6040 |
|
}, |
|
{ |
|
"epoch": 10.76, |
|
"learning_rate": 2.579952267303103e-05, |
|
"loss": 1.1334, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 10.78, |
|
"learning_rate": 2.577565632458234e-05, |
|
"loss": 1.1298, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 10.8, |
|
"learning_rate": 2.5751789976133653e-05, |
|
"loss": 1.1292, |
|
"step": 6070 |
|
}, |
|
{ |
|
"epoch": 10.82, |
|
"learning_rate": 2.5727923627684964e-05, |
|
"loss": 1.1316, |
|
"step": 6080 |
|
}, |
|
{ |
|
"epoch": 10.83, |
|
"learning_rate": 2.570405727923628e-05, |
|
"loss": 1.1309, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 10.85, |
|
"learning_rate": 2.5680190930787593e-05, |
|
"loss": 1.1287, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 10.87, |
|
"learning_rate": 2.5656324582338905e-05, |
|
"loss": 1.1264, |
|
"step": 6110 |
|
}, |
|
{ |
|
"epoch": 10.89, |
|
"learning_rate": 2.563245823389022e-05, |
|
"loss": 1.1258, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 10.91, |
|
"learning_rate": 2.560859188544153e-05, |
|
"loss": 1.1285, |
|
"step": 6130 |
|
}, |
|
{ |
|
"epoch": 10.92, |
|
"learning_rate": 2.558472553699284e-05, |
|
"loss": 1.1229, |
|
"step": 6140 |
|
}, |
|
{ |
|
"epoch": 10.94, |
|
"learning_rate": 2.5560859188544153e-05, |
|
"loss": 1.1322, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 10.96, |
|
"learning_rate": 2.5536992840095467e-05, |
|
"loss": 1.1299, |
|
"step": 6160 |
|
}, |
|
{ |
|
"epoch": 10.98, |
|
"learning_rate": 2.5513126491646782e-05, |
|
"loss": 1.1307, |
|
"step": 6170 |
|
}, |
|
{ |
|
"epoch": 10.99, |
|
"learning_rate": 2.5489260143198093e-05, |
|
"loss": 1.1308, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"eval_cer": 4.297008547008547, |
|
"eval_loss": 1.0878677368164062, |
|
"eval_runtime": 4.8141, |
|
"eval_samples_per_second": 2.077, |
|
"eval_steps_per_second": 0.208, |
|
"eval_wer": 0.974025974025974, |
|
"step": 6182 |
|
}, |
|
{ |
|
"epoch": 11.01, |
|
"learning_rate": 2.5465393794749408e-05, |
|
"loss": 1.2266, |
|
"step": 6190 |
|
}, |
|
{ |
|
"epoch": 11.03, |
|
"learning_rate": 2.544152744630072e-05, |
|
"loss": 1.1325, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 11.05, |
|
"learning_rate": 2.541766109785203e-05, |
|
"loss": 1.1277, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 11.07, |
|
"learning_rate": 2.539379474940334e-05, |
|
"loss": 1.1332, |
|
"step": 6220 |
|
}, |
|
{ |
|
"epoch": 11.09, |
|
"learning_rate": 2.5369928400954656e-05, |
|
"loss": 1.1235, |
|
"step": 6230 |
|
}, |
|
{ |
|
"epoch": 11.1, |
|
"learning_rate": 2.5346062052505967e-05, |
|
"loss": 1.127, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 11.12, |
|
"learning_rate": 2.532219570405728e-05, |
|
"loss": 1.1187, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 11.14, |
|
"learning_rate": 2.5298329355608596e-05, |
|
"loss": 1.1235, |
|
"step": 6260 |
|
}, |
|
{ |
|
"epoch": 11.16, |
|
"learning_rate": 2.5274463007159907e-05, |
|
"loss": 1.1295, |
|
"step": 6270 |
|
}, |
|
{ |
|
"epoch": 11.17, |
|
"learning_rate": 2.525059665871122e-05, |
|
"loss": 1.1258, |
|
"step": 6280 |
|
}, |
|
{ |
|
"epoch": 11.19, |
|
"learning_rate": 2.522673031026253e-05, |
|
"loss": 1.1173, |
|
"step": 6290 |
|
}, |
|
{ |
|
"epoch": 11.21, |
|
"learning_rate": 2.5202863961813844e-05, |
|
"loss": 1.1275, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 11.23, |
|
"learning_rate": 2.5178997613365155e-05, |
|
"loss": 1.1241, |
|
"step": 6310 |
|
}, |
|
{ |
|
"epoch": 11.25, |
|
"learning_rate": 2.515513126491647e-05, |
|
"loss": 1.1286, |
|
"step": 6320 |
|
}, |
|
{ |
|
"epoch": 11.26, |
|
"learning_rate": 2.5131264916467785e-05, |
|
"loss": 1.1231, |
|
"step": 6330 |
|
}, |
|
{ |
|
"epoch": 11.28, |
|
"learning_rate": 2.5107398568019096e-05, |
|
"loss": 1.123, |
|
"step": 6340 |
|
}, |
|
{ |
|
"epoch": 11.3, |
|
"learning_rate": 2.5083532219570407e-05, |
|
"loss": 1.1236, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 11.32, |
|
"learning_rate": 2.5059665871121718e-05, |
|
"loss": 1.1244, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 11.33, |
|
"learning_rate": 2.5035799522673033e-05, |
|
"loss": 1.1254, |
|
"step": 6370 |
|
}, |
|
{ |
|
"epoch": 11.35, |
|
"learning_rate": 2.5011933174224344e-05, |
|
"loss": 1.1251, |
|
"step": 6380 |
|
}, |
|
{ |
|
"epoch": 11.37, |
|
"learning_rate": 2.498806682577566e-05, |
|
"loss": 1.1219, |
|
"step": 6390 |
|
}, |
|
{ |
|
"epoch": 11.39, |
|
"learning_rate": 2.4964200477326973e-05, |
|
"loss": 1.12, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 11.41, |
|
"learning_rate": 2.4940334128878284e-05, |
|
"loss": 1.1189, |
|
"step": 6410 |
|
}, |
|
{ |
|
"epoch": 11.42, |
|
"learning_rate": 2.49164677804296e-05, |
|
"loss": 1.1226, |
|
"step": 6420 |
|
}, |
|
{ |
|
"epoch": 11.44, |
|
"learning_rate": 2.4892601431980907e-05, |
|
"loss": 1.118, |
|
"step": 6430 |
|
}, |
|
{ |
|
"epoch": 11.46, |
|
"learning_rate": 2.486873508353222e-05, |
|
"loss": 1.125, |
|
"step": 6440 |
|
}, |
|
{ |
|
"epoch": 11.48, |
|
"learning_rate": 2.4844868735083532e-05, |
|
"loss": 1.1208, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 11.49, |
|
"learning_rate": 2.4821002386634847e-05, |
|
"loss": 1.1205, |
|
"step": 6460 |
|
}, |
|
{ |
|
"epoch": 11.51, |
|
"learning_rate": 2.479713603818616e-05, |
|
"loss": 1.1263, |
|
"step": 6470 |
|
}, |
|
{ |
|
"epoch": 11.53, |
|
"learning_rate": 2.4773269689737473e-05, |
|
"loss": 1.1177, |
|
"step": 6480 |
|
}, |
|
{ |
|
"epoch": 11.55, |
|
"learning_rate": 2.4749403341288787e-05, |
|
"loss": 1.127, |
|
"step": 6490 |
|
}, |
|
{ |
|
"epoch": 11.56, |
|
"learning_rate": 2.4725536992840095e-05, |
|
"loss": 1.1226, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 11.58, |
|
"learning_rate": 2.470167064439141e-05, |
|
"loss": 1.1202, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 11.6, |
|
"learning_rate": 2.467780429594272e-05, |
|
"loss": 1.122, |
|
"step": 6520 |
|
}, |
|
{ |
|
"epoch": 11.62, |
|
"learning_rate": 2.4653937947494035e-05, |
|
"loss": 1.1176, |
|
"step": 6530 |
|
}, |
|
{ |
|
"epoch": 11.64, |
|
"learning_rate": 2.463007159904535e-05, |
|
"loss": 1.1171, |
|
"step": 6540 |
|
}, |
|
{ |
|
"epoch": 11.65, |
|
"learning_rate": 2.460620525059666e-05, |
|
"loss": 1.1223, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 11.67, |
|
"learning_rate": 2.4582338902147976e-05, |
|
"loss": 1.1158, |
|
"step": 6560 |
|
}, |
|
{ |
|
"epoch": 11.69, |
|
"learning_rate": 2.4558472553699284e-05, |
|
"loss": 1.1166, |
|
"step": 6570 |
|
}, |
|
{ |
|
"epoch": 11.71, |
|
"learning_rate": 2.4534606205250598e-05, |
|
"loss": 1.1195, |
|
"step": 6580 |
|
}, |
|
{ |
|
"epoch": 11.72, |
|
"learning_rate": 2.451073985680191e-05, |
|
"loss": 1.1172, |
|
"step": 6590 |
|
}, |
|
{ |
|
"epoch": 11.74, |
|
"learning_rate": 2.4486873508353224e-05, |
|
"loss": 1.1248, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 11.76, |
|
"learning_rate": 2.446300715990454e-05, |
|
"loss": 1.1177, |
|
"step": 6610 |
|
}, |
|
{ |
|
"epoch": 11.78, |
|
"learning_rate": 2.443914081145585e-05, |
|
"loss": 1.1217, |
|
"step": 6620 |
|
}, |
|
{ |
|
"epoch": 11.8, |
|
"learning_rate": 2.4415274463007164e-05, |
|
"loss": 1.1222, |
|
"step": 6630 |
|
}, |
|
{ |
|
"epoch": 11.81, |
|
"learning_rate": 2.4391408114558472e-05, |
|
"loss": 1.1153, |
|
"step": 6640 |
|
}, |
|
{ |
|
"epoch": 11.83, |
|
"learning_rate": 2.4367541766109787e-05, |
|
"loss": 1.12, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 11.85, |
|
"learning_rate": 2.4343675417661098e-05, |
|
"loss": 1.1192, |
|
"step": 6660 |
|
}, |
|
{ |
|
"epoch": 11.87, |
|
"learning_rate": 2.4319809069212412e-05, |
|
"loss": 1.116, |
|
"step": 6670 |
|
}, |
|
{ |
|
"epoch": 11.88, |
|
"learning_rate": 2.4295942720763727e-05, |
|
"loss": 1.1176, |
|
"step": 6680 |
|
}, |
|
{ |
|
"epoch": 11.9, |
|
"learning_rate": 2.4272076372315038e-05, |
|
"loss": 1.1188, |
|
"step": 6690 |
|
}, |
|
{ |
|
"epoch": 11.92, |
|
"learning_rate": 2.4248210023866353e-05, |
|
"loss": 1.1184, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 11.94, |
|
"learning_rate": 2.4224343675417664e-05, |
|
"loss": 1.1178, |
|
"step": 6710 |
|
}, |
|
{ |
|
"epoch": 11.96, |
|
"learning_rate": 2.4200477326968975e-05, |
|
"loss": 1.1159, |
|
"step": 6720 |
|
}, |
|
{ |
|
"epoch": 11.97, |
|
"learning_rate": 2.4176610978520286e-05, |
|
"loss": 1.1158, |
|
"step": 6730 |
|
}, |
|
{ |
|
"epoch": 11.99, |
|
"learning_rate": 2.41527446300716e-05, |
|
"loss": 1.1174, |
|
"step": 6740 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"eval_cer": 4.730769230769231, |
|
"eval_loss": 1.0691099166870117, |
|
"eval_runtime": 4.8405, |
|
"eval_samples_per_second": 2.066, |
|
"eval_steps_per_second": 0.207, |
|
"eval_wer": 0.961038961038961, |
|
"step": 6744 |
|
}, |
|
{ |
|
"epoch": 12.01, |
|
"learning_rate": 2.4128878281622915e-05, |
|
"loss": 1.2111, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 12.03, |
|
"learning_rate": 2.4105011933174227e-05, |
|
"loss": 1.1112, |
|
"step": 6760 |
|
}, |
|
{ |
|
"epoch": 12.05, |
|
"learning_rate": 2.408114558472554e-05, |
|
"loss": 1.1195, |
|
"step": 6770 |
|
}, |
|
{ |
|
"epoch": 12.06, |
|
"learning_rate": 2.4057279236276852e-05, |
|
"loss": 1.1161, |
|
"step": 6780 |
|
}, |
|
{ |
|
"epoch": 12.08, |
|
"learning_rate": 2.4033412887828164e-05, |
|
"loss": 1.1169, |
|
"step": 6790 |
|
}, |
|
{ |
|
"epoch": 12.1, |
|
"learning_rate": 2.4009546539379475e-05, |
|
"loss": 1.1146, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 12.12, |
|
"learning_rate": 2.398568019093079e-05, |
|
"loss": 1.1129, |
|
"step": 6810 |
|
}, |
|
{ |
|
"epoch": 12.14, |
|
"learning_rate": 2.3961813842482104e-05, |
|
"loss": 1.1198, |
|
"step": 6820 |
|
}, |
|
{ |
|
"epoch": 12.15, |
|
"learning_rate": 2.3937947494033415e-05, |
|
"loss": 1.1119, |
|
"step": 6830 |
|
}, |
|
{ |
|
"epoch": 12.17, |
|
"learning_rate": 2.391408114558473e-05, |
|
"loss": 1.1186, |
|
"step": 6840 |
|
}, |
|
{ |
|
"epoch": 12.19, |
|
"learning_rate": 2.389021479713604e-05, |
|
"loss": 1.1116, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 12.21, |
|
"learning_rate": 2.3866348448687352e-05, |
|
"loss": 1.1108, |
|
"step": 6860 |
|
}, |
|
{ |
|
"epoch": 12.22, |
|
"learning_rate": 2.3842482100238663e-05, |
|
"loss": 1.1109, |
|
"step": 6870 |
|
}, |
|
{ |
|
"epoch": 12.24, |
|
"learning_rate": 2.3818615751789978e-05, |
|
"loss": 1.1153, |
|
"step": 6880 |
|
}, |
|
{ |
|
"epoch": 12.26, |
|
"learning_rate": 2.379474940334129e-05, |
|
"loss": 1.1103, |
|
"step": 6890 |
|
}, |
|
{ |
|
"epoch": 12.28, |
|
"learning_rate": 2.3770883054892604e-05, |
|
"loss": 1.1201, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 12.29, |
|
"learning_rate": 2.3747016706443918e-05, |
|
"loss": 1.1123, |
|
"step": 6910 |
|
}, |
|
{ |
|
"epoch": 12.31, |
|
"learning_rate": 2.372315035799523e-05, |
|
"loss": 1.1139, |
|
"step": 6920 |
|
}, |
|
{ |
|
"epoch": 12.33, |
|
"learning_rate": 2.369928400954654e-05, |
|
"loss": 1.1124, |
|
"step": 6930 |
|
}, |
|
{ |
|
"epoch": 12.35, |
|
"learning_rate": 2.3675417661097852e-05, |
|
"loss": 1.1153, |
|
"step": 6940 |
|
}, |
|
{ |
|
"epoch": 12.37, |
|
"learning_rate": 2.3651551312649166e-05, |
|
"loss": 1.1096, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 12.38, |
|
"learning_rate": 2.3627684964200477e-05, |
|
"loss": 1.1086, |
|
"step": 6960 |
|
}, |
|
{ |
|
"epoch": 12.4, |
|
"learning_rate": 2.3603818615751792e-05, |
|
"loss": 1.1099, |
|
"step": 6970 |
|
}, |
|
{ |
|
"epoch": 12.42, |
|
"learning_rate": 2.3579952267303107e-05, |
|
"loss": 1.1112, |
|
"step": 6980 |
|
}, |
|
{ |
|
"epoch": 12.44, |
|
"learning_rate": 2.3556085918854418e-05, |
|
"loss": 1.1135, |
|
"step": 6990 |
|
}, |
|
{ |
|
"epoch": 12.45, |
|
"learning_rate": 2.353221957040573e-05, |
|
"loss": 1.113, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 12.47, |
|
"learning_rate": 2.350835322195704e-05, |
|
"loss": 1.1106, |
|
"step": 7010 |
|
}, |
|
{ |
|
"epoch": 12.49, |
|
"learning_rate": 2.3484486873508355e-05, |
|
"loss": 1.1115, |
|
"step": 7020 |
|
}, |
|
{ |
|
"epoch": 12.51, |
|
"learning_rate": 2.3460620525059666e-05, |
|
"loss": 1.1145, |
|
"step": 7030 |
|
}, |
|
{ |
|
"epoch": 12.53, |
|
"learning_rate": 2.343675417661098e-05, |
|
"loss": 1.1142, |
|
"step": 7040 |
|
}, |
|
{ |
|
"epoch": 12.54, |
|
"learning_rate": 2.3412887828162295e-05, |
|
"loss": 1.1122, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 12.56, |
|
"learning_rate": 2.3389021479713606e-05, |
|
"loss": 1.1132, |
|
"step": 7060 |
|
}, |
|
{ |
|
"epoch": 12.58, |
|
"learning_rate": 2.336515513126492e-05, |
|
"loss": 1.1123, |
|
"step": 7070 |
|
}, |
|
{ |
|
"epoch": 12.6, |
|
"learning_rate": 2.334128878281623e-05, |
|
"loss": 1.1019, |
|
"step": 7080 |
|
}, |
|
{ |
|
"epoch": 12.61, |
|
"learning_rate": 2.3317422434367543e-05, |
|
"loss": 1.1118, |
|
"step": 7090 |
|
}, |
|
{ |
|
"epoch": 12.63, |
|
"learning_rate": 2.3293556085918854e-05, |
|
"loss": 1.1148, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 12.65, |
|
"learning_rate": 2.326968973747017e-05, |
|
"loss": 1.1112, |
|
"step": 7110 |
|
}, |
|
{ |
|
"epoch": 12.67, |
|
"learning_rate": 2.3245823389021484e-05, |
|
"loss": 1.1084, |
|
"step": 7120 |
|
}, |
|
{ |
|
"epoch": 12.69, |
|
"learning_rate": 2.3221957040572795e-05, |
|
"loss": 1.1093, |
|
"step": 7130 |
|
}, |
|
{ |
|
"epoch": 12.7, |
|
"learning_rate": 2.319809069212411e-05, |
|
"loss": 1.1055, |
|
"step": 7140 |
|
}, |
|
{ |
|
"epoch": 12.72, |
|
"learning_rate": 2.3174224343675417e-05, |
|
"loss": 1.1042, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 12.74, |
|
"learning_rate": 2.3150357995226732e-05, |
|
"loss": 1.1112, |
|
"step": 7160 |
|
}, |
|
{ |
|
"epoch": 12.76, |
|
"learning_rate": 2.3126491646778043e-05, |
|
"loss": 1.1113, |
|
"step": 7170 |
|
}, |
|
{ |
|
"epoch": 12.77, |
|
"learning_rate": 2.3102625298329358e-05, |
|
"loss": 1.1092, |
|
"step": 7180 |
|
}, |
|
{ |
|
"epoch": 12.79, |
|
"learning_rate": 2.3078758949880672e-05, |
|
"loss": 1.1074, |
|
"step": 7190 |
|
}, |
|
{ |
|
"epoch": 12.81, |
|
"learning_rate": 2.3054892601431983e-05, |
|
"loss": 1.1063, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 12.83, |
|
"learning_rate": 2.3031026252983298e-05, |
|
"loss": 1.1079, |
|
"step": 7210 |
|
}, |
|
{ |
|
"epoch": 12.85, |
|
"learning_rate": 2.3007159904534606e-05, |
|
"loss": 1.1041, |
|
"step": 7220 |
|
}, |
|
{ |
|
"epoch": 12.86, |
|
"learning_rate": 2.298329355608592e-05, |
|
"loss": 1.1093, |
|
"step": 7230 |
|
}, |
|
{ |
|
"epoch": 12.88, |
|
"learning_rate": 2.295942720763723e-05, |
|
"loss": 1.1055, |
|
"step": 7240 |
|
}, |
|
{ |
|
"epoch": 12.9, |
|
"learning_rate": 2.2935560859188546e-05, |
|
"loss": 1.1035, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 12.92, |
|
"learning_rate": 2.291169451073986e-05, |
|
"loss": 1.104, |
|
"step": 7260 |
|
}, |
|
{ |
|
"epoch": 12.93, |
|
"learning_rate": 2.2887828162291172e-05, |
|
"loss": 1.1079, |
|
"step": 7270 |
|
}, |
|
{ |
|
"epoch": 12.95, |
|
"learning_rate": 2.2863961813842486e-05, |
|
"loss": 1.1052, |
|
"step": 7280 |
|
}, |
|
{ |
|
"epoch": 12.97, |
|
"learning_rate": 2.2840095465393794e-05, |
|
"loss": 1.1054, |
|
"step": 7290 |
|
}, |
|
{ |
|
"epoch": 12.99, |
|
"learning_rate": 2.281622911694511e-05, |
|
"loss": 1.1079, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"eval_cer": 4.756410256410256, |
|
"eval_loss": 1.052440881729126, |
|
"eval_runtime": 4.8878, |
|
"eval_samples_per_second": 2.046, |
|
"eval_steps_per_second": 0.205, |
|
"eval_wer": 0.935064935064935, |
|
"step": 7306 |
|
}, |
|
{ |
|
"epoch": 13.01, |
|
"learning_rate": 2.279236276849642e-05, |
|
"loss": 1.2048, |
|
"step": 7310 |
|
}, |
|
{ |
|
"epoch": 13.02, |
|
"learning_rate": 2.2768496420047734e-05, |
|
"loss": 1.1063, |
|
"step": 7320 |
|
}, |
|
{ |
|
"epoch": 13.04, |
|
"learning_rate": 2.274463007159905e-05, |
|
"loss": 1.1031, |
|
"step": 7330 |
|
}, |
|
{ |
|
"epoch": 13.06, |
|
"learning_rate": 2.272076372315036e-05, |
|
"loss": 1.1081, |
|
"step": 7340 |
|
}, |
|
{ |
|
"epoch": 13.08, |
|
"learning_rate": 2.2696897374701675e-05, |
|
"loss": 1.1055, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 13.1, |
|
"learning_rate": 2.2673031026252986e-05, |
|
"loss": 1.1054, |
|
"step": 7360 |
|
}, |
|
{ |
|
"epoch": 13.11, |
|
"learning_rate": 2.2649164677804297e-05, |
|
"loss": 1.1096, |
|
"step": 7370 |
|
}, |
|
{ |
|
"epoch": 13.13, |
|
"learning_rate": 2.262529832935561e-05, |
|
"loss": 1.1055, |
|
"step": 7380 |
|
}, |
|
{ |
|
"epoch": 13.15, |
|
"learning_rate": 2.2601431980906923e-05, |
|
"loss": 1.1085, |
|
"step": 7390 |
|
}, |
|
{ |
|
"epoch": 13.17, |
|
"learning_rate": 2.2577565632458238e-05, |
|
"loss": 1.1027, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 13.18, |
|
"learning_rate": 2.255369928400955e-05, |
|
"loss": 1.0999, |
|
"step": 7410 |
|
}, |
|
{ |
|
"epoch": 13.2, |
|
"learning_rate": 2.2529832935560863e-05, |
|
"loss": 1.1009, |
|
"step": 7420 |
|
}, |
|
{ |
|
"epoch": 13.22, |
|
"learning_rate": 2.2505966587112174e-05, |
|
"loss": 1.1078, |
|
"step": 7430 |
|
}, |
|
{ |
|
"epoch": 13.24, |
|
"learning_rate": 2.2482100238663486e-05, |
|
"loss": 1.1022, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 13.26, |
|
"learning_rate": 2.2458233890214797e-05, |
|
"loss": 1.1037, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 13.27, |
|
"learning_rate": 2.243436754176611e-05, |
|
"loss": 1.1061, |
|
"step": 7460 |
|
}, |
|
{ |
|
"epoch": 13.29, |
|
"learning_rate": 2.2410501193317426e-05, |
|
"loss": 1.1, |
|
"step": 7470 |
|
}, |
|
{ |
|
"epoch": 13.31, |
|
"learning_rate": 2.2386634844868737e-05, |
|
"loss": 1.1011, |
|
"step": 7480 |
|
}, |
|
{ |
|
"epoch": 13.33, |
|
"learning_rate": 2.2362768496420052e-05, |
|
"loss": 1.1024, |
|
"step": 7490 |
|
}, |
|
{ |
|
"epoch": 13.34, |
|
"learning_rate": 2.2338902147971363e-05, |
|
"loss": 1.1005, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 13.36, |
|
"learning_rate": 2.2315035799522674e-05, |
|
"loss": 1.1009, |
|
"step": 7510 |
|
}, |
|
{ |
|
"epoch": 13.38, |
|
"learning_rate": 2.2291169451073985e-05, |
|
"loss": 1.0998, |
|
"step": 7520 |
|
}, |
|
{ |
|
"epoch": 13.4, |
|
"learning_rate": 2.22673031026253e-05, |
|
"loss": 1.0988, |
|
"step": 7530 |
|
}, |
|
{ |
|
"epoch": 13.42, |
|
"learning_rate": 2.224343675417661e-05, |
|
"loss": 1.1003, |
|
"step": 7540 |
|
}, |
|
{ |
|
"epoch": 13.43, |
|
"learning_rate": 2.2219570405727926e-05, |
|
"loss": 1.1013, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 13.45, |
|
"learning_rate": 2.219570405727924e-05, |
|
"loss": 1.101, |
|
"step": 7560 |
|
}, |
|
{ |
|
"epoch": 13.47, |
|
"learning_rate": 2.217183770883055e-05, |
|
"loss": 1.1077, |
|
"step": 7570 |
|
}, |
|
{ |
|
"epoch": 13.49, |
|
"learning_rate": 2.2147971360381863e-05, |
|
"loss": 1.0992, |
|
"step": 7580 |
|
}, |
|
{ |
|
"epoch": 13.5, |
|
"learning_rate": 2.2124105011933174e-05, |
|
"loss": 1.1058, |
|
"step": 7590 |
|
}, |
|
{ |
|
"epoch": 13.52, |
|
"learning_rate": 2.210023866348449e-05, |
|
"loss": 1.1066, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 13.54, |
|
"learning_rate": 2.20763723150358e-05, |
|
"loss": 1.1023, |
|
"step": 7610 |
|
}, |
|
{ |
|
"epoch": 13.56, |
|
"learning_rate": 2.2052505966587114e-05, |
|
"loss": 1.0965, |
|
"step": 7620 |
|
}, |
|
{ |
|
"epoch": 13.58, |
|
"learning_rate": 2.202863961813843e-05, |
|
"loss": 1.1001, |
|
"step": 7630 |
|
}, |
|
{ |
|
"epoch": 13.59, |
|
"learning_rate": 2.200477326968974e-05, |
|
"loss": 1.0979, |
|
"step": 7640 |
|
}, |
|
{ |
|
"epoch": 13.61, |
|
"learning_rate": 2.198090692124105e-05, |
|
"loss": 1.1039, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 13.63, |
|
"learning_rate": 2.1957040572792362e-05, |
|
"loss": 1.096, |
|
"step": 7660 |
|
}, |
|
{ |
|
"epoch": 13.65, |
|
"learning_rate": 2.1933174224343677e-05, |
|
"loss": 1.1098, |
|
"step": 7670 |
|
}, |
|
{ |
|
"epoch": 13.66, |
|
"learning_rate": 2.1909307875894988e-05, |
|
"loss": 1.1036, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 13.68, |
|
"learning_rate": 2.1885441527446303e-05, |
|
"loss": 1.1009, |
|
"step": 7690 |
|
}, |
|
{ |
|
"epoch": 13.7, |
|
"learning_rate": 2.1861575178997617e-05, |
|
"loss": 1.0988, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 13.72, |
|
"learning_rate": 2.183770883054893e-05, |
|
"loss": 1.0952, |
|
"step": 7710 |
|
}, |
|
{ |
|
"epoch": 13.74, |
|
"learning_rate": 2.1813842482100243e-05, |
|
"loss": 1.104, |
|
"step": 7720 |
|
}, |
|
{ |
|
"epoch": 13.75, |
|
"learning_rate": 2.178997613365155e-05, |
|
"loss": 1.1001, |
|
"step": 7730 |
|
}, |
|
{ |
|
"epoch": 13.77, |
|
"learning_rate": 2.1766109785202865e-05, |
|
"loss": 1.1, |
|
"step": 7740 |
|
}, |
|
{ |
|
"epoch": 13.79, |
|
"learning_rate": 2.1742243436754177e-05, |
|
"loss": 1.0953, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 13.81, |
|
"learning_rate": 2.171837708830549e-05, |
|
"loss": 1.0947, |
|
"step": 7760 |
|
}, |
|
{ |
|
"epoch": 13.82, |
|
"learning_rate": 2.1694510739856806e-05, |
|
"loss": 1.1016, |
|
"step": 7770 |
|
}, |
|
{ |
|
"epoch": 13.84, |
|
"learning_rate": 2.1670644391408117e-05, |
|
"loss": 1.0941, |
|
"step": 7780 |
|
}, |
|
{ |
|
"epoch": 13.86, |
|
"learning_rate": 2.164677804295943e-05, |
|
"loss": 1.0954, |
|
"step": 7790 |
|
}, |
|
{ |
|
"epoch": 13.88, |
|
"learning_rate": 2.162291169451074e-05, |
|
"loss": 1.1, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 13.9, |
|
"learning_rate": 2.1599045346062054e-05, |
|
"loss": 1.0974, |
|
"step": 7810 |
|
}, |
|
{ |
|
"epoch": 13.91, |
|
"learning_rate": 2.1575178997613365e-05, |
|
"loss": 1.0969, |
|
"step": 7820 |
|
}, |
|
{ |
|
"epoch": 13.93, |
|
"learning_rate": 2.155131264916468e-05, |
|
"loss": 1.0981, |
|
"step": 7830 |
|
}, |
|
{ |
|
"epoch": 13.95, |
|
"learning_rate": 2.1527446300715994e-05, |
|
"loss": 1.0993, |
|
"step": 7840 |
|
}, |
|
{ |
|
"epoch": 13.97, |
|
"learning_rate": 2.1503579952267305e-05, |
|
"loss": 1.0901, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 13.98, |
|
"learning_rate": 2.147971360381862e-05, |
|
"loss": 1.1019, |
|
"step": 7860 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"eval_cer": 4.754273504273504, |
|
"eval_loss": 1.0490120649337769, |
|
"eval_runtime": 4.8398, |
|
"eval_samples_per_second": 2.066, |
|
"eval_steps_per_second": 0.207, |
|
"eval_wer": 0.961038961038961, |
|
"step": 7868 |
|
} |
|
], |
|
"max_steps": 16860, |
|
"num_train_epochs": 30, |
|
"total_flos": 0.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|