|
{ |
|
"best_metric": 39.565217391304344, |
|
"best_model_checkpoint": "./whisper-small-wer35-ekg\\checkpoint-6000", |
|
"epoch": 95.58823529411765, |
|
"eval_steps": 500, |
|
"global_step": 6500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 28.624893188476562, |
|
"learning_rate": 9.433962264150944e-08, |
|
"loss": 2.7579, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 28.752416610717773, |
|
"learning_rate": 2.1226415094339622e-07, |
|
"loss": 2.6811, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 25.40899658203125, |
|
"learning_rate": 3.3018867924528305e-07, |
|
"loss": 2.5367, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 22.991146087646484, |
|
"learning_rate": 4.4811320754716983e-07, |
|
"loss": 2.2512, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 18.11638832092285, |
|
"learning_rate": 5.660377358490567e-07, |
|
"loss": 2.0029, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 17.4566650390625, |
|
"learning_rate": 6.839622641509434e-07, |
|
"loss": 1.7014, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 12.19814395904541, |
|
"learning_rate": 8.018867924528302e-07, |
|
"loss": 1.4435, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 9.45124626159668, |
|
"learning_rate": 9.19811320754717e-07, |
|
"loss": 1.2141, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 8.163089752197266, |
|
"learning_rate": 1.037735849056604e-06, |
|
"loss": 1.0621, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 6.771365642547607, |
|
"learning_rate": 1.1556603773584908e-06, |
|
"loss": 0.9325, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 5.819299697875977, |
|
"learning_rate": 1.2735849056603775e-06, |
|
"loss": 0.8767, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 5.547601222991943, |
|
"learning_rate": 1.3915094339622643e-06, |
|
"loss": 0.7872, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 4.7814154624938965, |
|
"learning_rate": 1.509433962264151e-06, |
|
"loss": 0.7148, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 4.755609512329102, |
|
"learning_rate": 1.6273584905660379e-06, |
|
"loss": 0.6742, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 4.354093074798584, |
|
"learning_rate": 1.7452830188679247e-06, |
|
"loss": 0.6191, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 4.038461685180664, |
|
"learning_rate": 1.8632075471698114e-06, |
|
"loss": 0.5673, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 4.2933526039123535, |
|
"learning_rate": 1.981132075471698e-06, |
|
"loss": 0.5534, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 4.423107147216797, |
|
"learning_rate": 2.099056603773585e-06, |
|
"loss": 0.5162, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 3.7280497550964355, |
|
"learning_rate": 2.2169811320754718e-06, |
|
"loss": 0.4796, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 3.990518808364868, |
|
"learning_rate": 2.3349056603773588e-06, |
|
"loss": 0.4976, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"grad_norm": 3.825350046157837, |
|
"learning_rate": 2.4528301886792453e-06, |
|
"loss": 0.4493, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"grad_norm": 4.116565704345703, |
|
"learning_rate": 2.5707547169811327e-06, |
|
"loss": 0.429, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"grad_norm": 3.6445629596710205, |
|
"learning_rate": 2.688679245283019e-06, |
|
"loss": 0.3898, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"grad_norm": 3.741975784301758, |
|
"learning_rate": 2.8066037735849063e-06, |
|
"loss": 0.394, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"grad_norm": 3.4814412593841553, |
|
"learning_rate": 2.9245283018867924e-06, |
|
"loss": 0.4115, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"grad_norm": 3.914616823196411, |
|
"learning_rate": 3.04245283018868e-06, |
|
"loss": 0.3858, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"grad_norm": 3.9170775413513184, |
|
"learning_rate": 3.160377358490566e-06, |
|
"loss": 0.3731, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"grad_norm": 3.542541980743408, |
|
"learning_rate": 3.2783018867924534e-06, |
|
"loss": 0.347, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"grad_norm": 3.5728800296783447, |
|
"learning_rate": 3.3962264150943395e-06, |
|
"loss": 0.3171, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"grad_norm": 3.627119541168213, |
|
"learning_rate": 3.514150943396227e-06, |
|
"loss": 0.3105, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"grad_norm": 3.763631820678711, |
|
"learning_rate": 3.632075471698113e-06, |
|
"loss": 0.315, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"grad_norm": 3.1514198780059814, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 0.3116, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"grad_norm": 3.847184181213379, |
|
"learning_rate": 3.8679245283018875e-06, |
|
"loss": 0.3076, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 3.8552136421203613, |
|
"learning_rate": 3.985849056603774e-06, |
|
"loss": 0.3106, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"grad_norm": 3.341456174850464, |
|
"learning_rate": 4.103773584905661e-06, |
|
"loss": 0.2699, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"grad_norm": 3.3753416538238525, |
|
"learning_rate": 4.221698113207548e-06, |
|
"loss": 0.2442, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"grad_norm": 3.4466023445129395, |
|
"learning_rate": 4.339622641509435e-06, |
|
"loss": 0.2436, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"grad_norm": 3.2262954711914062, |
|
"learning_rate": 4.457547169811321e-06, |
|
"loss": 0.2524, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"grad_norm": 3.167746067047119, |
|
"learning_rate": 4.575471698113208e-06, |
|
"loss": 0.2551, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"grad_norm": 3.338438034057617, |
|
"learning_rate": 4.693396226415095e-06, |
|
"loss": 0.2525, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"grad_norm": 2.4594788551330566, |
|
"learning_rate": 4.811320754716982e-06, |
|
"loss": 0.2243, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"grad_norm": 3.1020987033843994, |
|
"learning_rate": 4.929245283018868e-06, |
|
"loss": 0.2055, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"grad_norm": 2.919455051422119, |
|
"learning_rate": 5.047169811320756e-06, |
|
"loss": 0.2107, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"grad_norm": 3.4662413597106934, |
|
"learning_rate": 5.165094339622642e-06, |
|
"loss": 0.1946, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"grad_norm": 3.209831476211548, |
|
"learning_rate": 5.283018867924529e-06, |
|
"loss": 0.2049, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"grad_norm": 3.5380163192749023, |
|
"learning_rate": 5.400943396226416e-06, |
|
"loss": 0.1865, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"grad_norm": 3.4075424671173096, |
|
"learning_rate": 5.518867924528303e-06, |
|
"loss": 0.184, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 7.06, |
|
"grad_norm": 2.4087564945220947, |
|
"learning_rate": 5.636792452830189e-06, |
|
"loss": 0.1725, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"grad_norm": 3.172959566116333, |
|
"learning_rate": 5.754716981132076e-06, |
|
"loss": 0.1505, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 7.35, |
|
"grad_norm": 3.0081095695495605, |
|
"learning_rate": 5.872641509433963e-06, |
|
"loss": 0.149, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 7.35, |
|
"eval_loss": 0.32423341274261475, |
|
"eval_runtime": 476.393, |
|
"eval_samples_per_second": 1.583, |
|
"eval_steps_per_second": 0.017, |
|
"eval_wer": 50.19762845849802, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"grad_norm": 2.85640811920166, |
|
"learning_rate": 5.99056603773585e-06, |
|
"loss": 0.1478, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"grad_norm": 3.185556650161743, |
|
"learning_rate": 6.108490566037736e-06, |
|
"loss": 0.1495, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"grad_norm": 2.893033266067505, |
|
"learning_rate": 6.226415094339623e-06, |
|
"loss": 0.1508, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 7.94, |
|
"grad_norm": 3.044069290161133, |
|
"learning_rate": 6.34433962264151e-06, |
|
"loss": 0.155, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"grad_norm": 2.3645215034484863, |
|
"learning_rate": 6.462264150943397e-06, |
|
"loss": 0.1191, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"grad_norm": 2.373063564300537, |
|
"learning_rate": 6.580188679245284e-06, |
|
"loss": 0.1092, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"grad_norm": 2.313464641571045, |
|
"learning_rate": 6.69811320754717e-06, |
|
"loss": 0.1108, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 8.53, |
|
"grad_norm": 2.749617099761963, |
|
"learning_rate": 6.816037735849057e-06, |
|
"loss": 0.108, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 8.68, |
|
"grad_norm": 2.794914960861206, |
|
"learning_rate": 6.933962264150944e-06, |
|
"loss": 0.1102, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 8.82, |
|
"grad_norm": 2.7622482776641846, |
|
"learning_rate": 7.051886792452831e-06, |
|
"loss": 0.1121, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 8.97, |
|
"grad_norm": 2.6082873344421387, |
|
"learning_rate": 7.169811320754717e-06, |
|
"loss": 0.1171, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 9.12, |
|
"grad_norm": 2.379115581512451, |
|
"learning_rate": 7.287735849056604e-06, |
|
"loss": 0.0824, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 9.26, |
|
"grad_norm": 3.37858510017395, |
|
"learning_rate": 7.405660377358491e-06, |
|
"loss": 0.0834, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 9.41, |
|
"grad_norm": 2.881742000579834, |
|
"learning_rate": 7.523584905660378e-06, |
|
"loss": 0.0809, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 9.56, |
|
"grad_norm": 2.6887032985687256, |
|
"learning_rate": 7.641509433962266e-06, |
|
"loss": 0.0832, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 9.71, |
|
"grad_norm": 2.685237407684326, |
|
"learning_rate": 7.75943396226415e-06, |
|
"loss": 0.0815, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 9.85, |
|
"grad_norm": 2.297445774078369, |
|
"learning_rate": 7.877358490566038e-06, |
|
"loss": 0.078, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"grad_norm": 3.055098056793213, |
|
"learning_rate": 7.995283018867925e-06, |
|
"loss": 0.0753, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 10.15, |
|
"grad_norm": 2.498236656188965, |
|
"learning_rate": 8.113207547169812e-06, |
|
"loss": 0.054, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 10.29, |
|
"grad_norm": 2.1496338844299316, |
|
"learning_rate": 8.231132075471699e-06, |
|
"loss": 0.0583, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 10.44, |
|
"grad_norm": 2.9474973678588867, |
|
"learning_rate": 8.349056603773585e-06, |
|
"loss": 0.0559, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 10.59, |
|
"grad_norm": 3.049436569213867, |
|
"learning_rate": 8.466981132075472e-06, |
|
"loss": 0.0557, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 10.74, |
|
"grad_norm": 2.463704824447632, |
|
"learning_rate": 8.58490566037736e-06, |
|
"loss": 0.058, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 10.88, |
|
"grad_norm": 2.369274139404297, |
|
"learning_rate": 8.702830188679245e-06, |
|
"loss": 0.0614, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 11.03, |
|
"grad_norm": 2.1053879261016846, |
|
"learning_rate": 8.820754716981133e-06, |
|
"loss": 0.0545, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 11.18, |
|
"grad_norm": 1.6645501852035522, |
|
"learning_rate": 8.938679245283019e-06, |
|
"loss": 0.0404, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 11.32, |
|
"grad_norm": 2.5582962036132812, |
|
"learning_rate": 9.056603773584907e-06, |
|
"loss": 0.0407, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 11.47, |
|
"grad_norm": 2.079361915588379, |
|
"learning_rate": 9.174528301886794e-06, |
|
"loss": 0.0392, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 11.62, |
|
"grad_norm": 2.74023699760437, |
|
"learning_rate": 9.292452830188679e-06, |
|
"loss": 0.0437, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 11.76, |
|
"grad_norm": 2.8623881340026855, |
|
"learning_rate": 9.410377358490567e-06, |
|
"loss": 0.0416, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 11.91, |
|
"grad_norm": 2.3341763019561768, |
|
"learning_rate": 9.528301886792455e-06, |
|
"loss": 0.0435, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 12.06, |
|
"grad_norm": 2.0387160778045654, |
|
"learning_rate": 9.64622641509434e-06, |
|
"loss": 0.04, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 12.21, |
|
"grad_norm": 2.4834096431732178, |
|
"learning_rate": 9.764150943396227e-06, |
|
"loss": 0.029, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 12.35, |
|
"grad_norm": 2.721518039703369, |
|
"learning_rate": 9.882075471698113e-06, |
|
"loss": 0.0297, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 12.5, |
|
"grad_norm": 2.684687376022339, |
|
"learning_rate": 1e-05, |
|
"loss": 0.0316, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 12.65, |
|
"grad_norm": 2.5442607402801514, |
|
"learning_rate": 9.983153638814017e-06, |
|
"loss": 0.031, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 12.79, |
|
"grad_norm": 3.274472236633301, |
|
"learning_rate": 9.966307277628034e-06, |
|
"loss": 0.0328, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 12.94, |
|
"grad_norm": 2.6189253330230713, |
|
"learning_rate": 9.94946091644205e-06, |
|
"loss": 0.0328, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 13.09, |
|
"grad_norm": 2.8672521114349365, |
|
"learning_rate": 9.932614555256066e-06, |
|
"loss": 0.0299, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 13.24, |
|
"grad_norm": 2.2015247344970703, |
|
"learning_rate": 9.915768194070081e-06, |
|
"loss": 0.0224, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 13.38, |
|
"grad_norm": 1.7655293941497803, |
|
"learning_rate": 9.898921832884097e-06, |
|
"loss": 0.0222, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 13.53, |
|
"grad_norm": 2.5391361713409424, |
|
"learning_rate": 9.882075471698113e-06, |
|
"loss": 0.0232, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 13.68, |
|
"grad_norm": 2.2108376026153564, |
|
"learning_rate": 9.86522911051213e-06, |
|
"loss": 0.0249, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 13.82, |
|
"grad_norm": 2.0707106590270996, |
|
"learning_rate": 9.848382749326146e-06, |
|
"loss": 0.0241, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 13.97, |
|
"grad_norm": 2.2643473148345947, |
|
"learning_rate": 9.831536388140162e-06, |
|
"loss": 0.0276, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 14.12, |
|
"grad_norm": 1.739284634590149, |
|
"learning_rate": 9.81469002695418e-06, |
|
"loss": 0.017, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 14.26, |
|
"grad_norm": 1.8066262006759644, |
|
"learning_rate": 9.797843665768195e-06, |
|
"loss": 0.0157, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 14.41, |
|
"grad_norm": 2.5150985717773438, |
|
"learning_rate": 9.780997304582211e-06, |
|
"loss": 0.0196, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 14.56, |
|
"grad_norm": 1.7605470418930054, |
|
"learning_rate": 9.764150943396227e-06, |
|
"loss": 0.0169, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 14.71, |
|
"grad_norm": 2.1510426998138428, |
|
"learning_rate": 9.747304582210243e-06, |
|
"loss": 0.0186, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 14.71, |
|
"eval_loss": 0.4280255138874054, |
|
"eval_runtime": 408.4481, |
|
"eval_samples_per_second": 1.846, |
|
"eval_steps_per_second": 0.02, |
|
"eval_wer": 46.126482213438734, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 14.85, |
|
"grad_norm": 2.3640592098236084, |
|
"learning_rate": 9.73045822102426e-06, |
|
"loss": 0.0187, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"grad_norm": 1.8428599834442139, |
|
"learning_rate": 9.713611859838276e-06, |
|
"loss": 0.0213, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 15.15, |
|
"grad_norm": 1.8431464433670044, |
|
"learning_rate": 9.696765498652292e-06, |
|
"loss": 0.0127, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 15.29, |
|
"grad_norm": 1.7996199131011963, |
|
"learning_rate": 9.679919137466307e-06, |
|
"loss": 0.0137, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 15.44, |
|
"grad_norm": 1.7150269746780396, |
|
"learning_rate": 9.663072776280325e-06, |
|
"loss": 0.0148, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 15.59, |
|
"grad_norm": 1.8035845756530762, |
|
"learning_rate": 9.64622641509434e-06, |
|
"loss": 0.0137, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 15.74, |
|
"grad_norm": 2.240055799484253, |
|
"learning_rate": 9.629380053908356e-06, |
|
"loss": 0.0146, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 15.88, |
|
"grad_norm": 2.2169549465179443, |
|
"learning_rate": 9.612533692722372e-06, |
|
"loss": 0.0129, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 16.03, |
|
"grad_norm": 1.7007489204406738, |
|
"learning_rate": 9.595687331536388e-06, |
|
"loss": 0.0143, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 16.18, |
|
"grad_norm": 1.3945379257202148, |
|
"learning_rate": 9.578840970350406e-06, |
|
"loss": 0.0111, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 16.32, |
|
"grad_norm": 1.6498446464538574, |
|
"learning_rate": 9.561994609164421e-06, |
|
"loss": 0.0108, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 16.47, |
|
"grad_norm": 1.7959011793136597, |
|
"learning_rate": 9.545148247978437e-06, |
|
"loss": 0.012, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 16.62, |
|
"grad_norm": 1.7820062637329102, |
|
"learning_rate": 9.528301886792455e-06, |
|
"loss": 0.0133, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 16.76, |
|
"grad_norm": 1.8330453634262085, |
|
"learning_rate": 9.51145552560647e-06, |
|
"loss": 0.0134, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 16.91, |
|
"grad_norm": 1.6996911764144897, |
|
"learning_rate": 9.494609164420486e-06, |
|
"loss": 0.0121, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 17.06, |
|
"grad_norm": 1.2411192655563354, |
|
"learning_rate": 9.477762803234502e-06, |
|
"loss": 0.0112, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 17.21, |
|
"grad_norm": 1.4605441093444824, |
|
"learning_rate": 9.460916442048518e-06, |
|
"loss": 0.0108, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 17.35, |
|
"grad_norm": 1.6172523498535156, |
|
"learning_rate": 9.444070080862533e-06, |
|
"loss": 0.0093, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 17.5, |
|
"grad_norm": 1.367756724357605, |
|
"learning_rate": 9.427223719676551e-06, |
|
"loss": 0.0124, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 17.65, |
|
"grad_norm": 2.769695997238159, |
|
"learning_rate": 9.410377358490567e-06, |
|
"loss": 0.0111, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 17.79, |
|
"grad_norm": 1.4088915586471558, |
|
"learning_rate": 9.393530997304582e-06, |
|
"loss": 0.0118, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 17.94, |
|
"grad_norm": 2.075134038925171, |
|
"learning_rate": 9.3766846361186e-06, |
|
"loss": 0.0113, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 18.09, |
|
"grad_norm": 1.1849188804626465, |
|
"learning_rate": 9.359838274932616e-06, |
|
"loss": 0.0095, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 18.24, |
|
"grad_norm": 0.7864483594894409, |
|
"learning_rate": 9.342991913746632e-06, |
|
"loss": 0.008, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 18.38, |
|
"grad_norm": 1.687619924545288, |
|
"learning_rate": 9.326145552560647e-06, |
|
"loss": 0.0082, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 18.53, |
|
"grad_norm": 2.207996368408203, |
|
"learning_rate": 9.309299191374663e-06, |
|
"loss": 0.0105, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 18.68, |
|
"grad_norm": 1.876765489578247, |
|
"learning_rate": 9.292452830188679e-06, |
|
"loss": 0.009, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 18.82, |
|
"grad_norm": 2.1015830039978027, |
|
"learning_rate": 9.275606469002696e-06, |
|
"loss": 0.0113, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 18.97, |
|
"grad_norm": 1.7208313941955566, |
|
"learning_rate": 9.258760107816712e-06, |
|
"loss": 0.0116, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 19.12, |
|
"grad_norm": 1.086099624633789, |
|
"learning_rate": 9.241913746630728e-06, |
|
"loss": 0.0085, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 19.26, |
|
"grad_norm": 1.8027433156967163, |
|
"learning_rate": 9.225067385444745e-06, |
|
"loss": 0.0088, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 19.41, |
|
"grad_norm": 1.5787283182144165, |
|
"learning_rate": 9.208221024258761e-06, |
|
"loss": 0.0075, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 19.56, |
|
"grad_norm": 1.3783093690872192, |
|
"learning_rate": 9.191374663072777e-06, |
|
"loss": 0.0086, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 19.71, |
|
"grad_norm": 1.2076382637023926, |
|
"learning_rate": 9.174528301886794e-06, |
|
"loss": 0.0084, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 19.85, |
|
"grad_norm": 1.688698649406433, |
|
"learning_rate": 9.157681940700809e-06, |
|
"loss": 0.008, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"grad_norm": 1.9429757595062256, |
|
"learning_rate": 9.140835579514824e-06, |
|
"loss": 0.0081, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 20.15, |
|
"grad_norm": 1.4460784196853638, |
|
"learning_rate": 9.123989218328842e-06, |
|
"loss": 0.0067, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 20.29, |
|
"grad_norm": 1.672726035118103, |
|
"learning_rate": 9.107142857142858e-06, |
|
"loss": 0.0068, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 20.44, |
|
"grad_norm": 0.6751158833503723, |
|
"learning_rate": 9.090296495956873e-06, |
|
"loss": 0.0066, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 20.59, |
|
"grad_norm": 1.696090817451477, |
|
"learning_rate": 9.07345013477089e-06, |
|
"loss": 0.0066, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 20.74, |
|
"grad_norm": 1.0858286619186401, |
|
"learning_rate": 9.056603773584907e-06, |
|
"loss": 0.0065, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 20.88, |
|
"grad_norm": 1.6216528415679932, |
|
"learning_rate": 9.039757412398922e-06, |
|
"loss": 0.0069, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 21.03, |
|
"grad_norm": 1.192878246307373, |
|
"learning_rate": 9.02291105121294e-06, |
|
"loss": 0.0063, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 21.18, |
|
"grad_norm": 1.2905162572860718, |
|
"learning_rate": 9.006064690026954e-06, |
|
"loss": 0.0062, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 21.32, |
|
"grad_norm": 1.0860906839370728, |
|
"learning_rate": 8.989218328840971e-06, |
|
"loss": 0.0058, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 21.47, |
|
"grad_norm": 1.6400948762893677, |
|
"learning_rate": 8.972371967654987e-06, |
|
"loss": 0.0066, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 21.62, |
|
"grad_norm": 2.1465609073638916, |
|
"learning_rate": 8.955525606469003e-06, |
|
"loss": 0.0097, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 21.76, |
|
"grad_norm": 0.9771747589111328, |
|
"learning_rate": 8.938679245283019e-06, |
|
"loss": 0.0074, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 21.91, |
|
"grad_norm": 1.7411590814590454, |
|
"learning_rate": 8.921832884097036e-06, |
|
"loss": 0.0068, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 22.06, |
|
"grad_norm": 1.491974949836731, |
|
"learning_rate": 8.904986522911052e-06, |
|
"loss": 0.006, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 22.06, |
|
"eval_loss": 0.44684624671936035, |
|
"eval_runtime": 395.7518, |
|
"eval_samples_per_second": 1.905, |
|
"eval_steps_per_second": 0.02, |
|
"eval_wer": 43.537549407114625, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 22.21, |
|
"grad_norm": 1.5780655145645142, |
|
"learning_rate": 8.888140161725068e-06, |
|
"loss": 0.0047, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 22.35, |
|
"grad_norm": 1.6800497770309448, |
|
"learning_rate": 8.871293800539085e-06, |
|
"loss": 0.0057, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 22.5, |
|
"grad_norm": 1.0676138401031494, |
|
"learning_rate": 8.8544474393531e-06, |
|
"loss": 0.0055, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 22.65, |
|
"grad_norm": 2.0500409603118896, |
|
"learning_rate": 8.837601078167117e-06, |
|
"loss": 0.0059, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 22.79, |
|
"grad_norm": 1.3901283740997314, |
|
"learning_rate": 8.820754716981133e-06, |
|
"loss": 0.0056, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 22.94, |
|
"grad_norm": 1.7657601833343506, |
|
"learning_rate": 8.803908355795148e-06, |
|
"loss": 0.0069, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 23.09, |
|
"grad_norm": 0.7901756763458252, |
|
"learning_rate": 8.787061994609166e-06, |
|
"loss": 0.0048, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 23.24, |
|
"grad_norm": 1.3437069654464722, |
|
"learning_rate": 8.770215633423182e-06, |
|
"loss": 0.0052, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 23.38, |
|
"grad_norm": 1.1439257860183716, |
|
"learning_rate": 8.753369272237197e-06, |
|
"loss": 0.0048, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 23.53, |
|
"grad_norm": 2.013002872467041, |
|
"learning_rate": 8.736522911051213e-06, |
|
"loss": 0.005, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 23.68, |
|
"grad_norm": 1.2465882301330566, |
|
"learning_rate": 8.71967654986523e-06, |
|
"loss": 0.0067, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 23.82, |
|
"grad_norm": 2.2301642894744873, |
|
"learning_rate": 8.702830188679245e-06, |
|
"loss": 0.0058, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 23.97, |
|
"grad_norm": 1.269247055053711, |
|
"learning_rate": 8.685983827493262e-06, |
|
"loss": 0.0055, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 24.12, |
|
"grad_norm": 0.8886606693267822, |
|
"learning_rate": 8.669137466307278e-06, |
|
"loss": 0.0037, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 24.26, |
|
"grad_norm": 1.7440249919891357, |
|
"learning_rate": 8.652291105121294e-06, |
|
"loss": 0.0049, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 24.41, |
|
"grad_norm": 0.9428110122680664, |
|
"learning_rate": 8.635444743935311e-06, |
|
"loss": 0.0052, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 24.56, |
|
"grad_norm": 1.336026906967163, |
|
"learning_rate": 8.618598382749327e-06, |
|
"loss": 0.0066, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 24.71, |
|
"grad_norm": 0.7566521763801575, |
|
"learning_rate": 8.601752021563343e-06, |
|
"loss": 0.004, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 24.85, |
|
"grad_norm": 1.2722612619400024, |
|
"learning_rate": 8.58490566037736e-06, |
|
"loss": 0.0039, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 25.0, |
|
"grad_norm": 0.7349913120269775, |
|
"learning_rate": 8.568059299191376e-06, |
|
"loss": 0.0048, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 25.15, |
|
"grad_norm": 0.8350051641464233, |
|
"learning_rate": 8.55121293800539e-06, |
|
"loss": 0.004, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 25.29, |
|
"grad_norm": 1.1201083660125732, |
|
"learning_rate": 8.534366576819408e-06, |
|
"loss": 0.0044, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 25.44, |
|
"grad_norm": 0.9569421410560608, |
|
"learning_rate": 8.517520215633423e-06, |
|
"loss": 0.0048, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 25.59, |
|
"grad_norm": 0.9667839407920837, |
|
"learning_rate": 8.50067385444744e-06, |
|
"loss": 0.0051, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 25.74, |
|
"grad_norm": 0.6828237175941467, |
|
"learning_rate": 8.483827493261457e-06, |
|
"loss": 0.0061, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 25.88, |
|
"grad_norm": 1.4523017406463623, |
|
"learning_rate": 8.466981132075472e-06, |
|
"loss": 0.0054, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 26.03, |
|
"grad_norm": 1.0334619283676147, |
|
"learning_rate": 8.450134770889488e-06, |
|
"loss": 0.0034, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 26.18, |
|
"grad_norm": 1.5895966291427612, |
|
"learning_rate": 8.433288409703506e-06, |
|
"loss": 0.0036, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 26.32, |
|
"grad_norm": 1.0893361568450928, |
|
"learning_rate": 8.416442048517522e-06, |
|
"loss": 0.0041, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 26.47, |
|
"grad_norm": 1.2191766500473022, |
|
"learning_rate": 8.399595687331537e-06, |
|
"loss": 0.0038, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 26.62, |
|
"grad_norm": 1.8165545463562012, |
|
"learning_rate": 8.382749326145553e-06, |
|
"loss": 0.0038, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 26.76, |
|
"grad_norm": 1.477317452430725, |
|
"learning_rate": 8.365902964959569e-06, |
|
"loss": 0.0044, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 26.91, |
|
"grad_norm": 0.8306496739387512, |
|
"learning_rate": 8.349056603773585e-06, |
|
"loss": 0.0036, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 27.06, |
|
"grad_norm": 0.9612523317337036, |
|
"learning_rate": 8.332210242587602e-06, |
|
"loss": 0.0031, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 27.21, |
|
"grad_norm": 1.7324286699295044, |
|
"learning_rate": 8.315363881401618e-06, |
|
"loss": 0.0029, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 27.35, |
|
"grad_norm": 1.0282984972000122, |
|
"learning_rate": 8.298517520215634e-06, |
|
"loss": 0.0038, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 27.5, |
|
"grad_norm": 0.7670619487762451, |
|
"learning_rate": 8.281671159029651e-06, |
|
"loss": 0.0044, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 27.65, |
|
"grad_norm": 0.9554408192634583, |
|
"learning_rate": 8.264824797843667e-06, |
|
"loss": 0.0048, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 27.79, |
|
"grad_norm": 1.2249259948730469, |
|
"learning_rate": 8.247978436657683e-06, |
|
"loss": 0.0038, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 27.94, |
|
"grad_norm": 1.6148368120193481, |
|
"learning_rate": 8.231132075471699e-06, |
|
"loss": 0.0034, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 28.09, |
|
"grad_norm": 1.3190560340881348, |
|
"learning_rate": 8.214285714285714e-06, |
|
"loss": 0.005, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 28.24, |
|
"grad_norm": 1.7175958156585693, |
|
"learning_rate": 8.197439353099732e-06, |
|
"loss": 0.0037, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 28.38, |
|
"grad_norm": 1.4584541320800781, |
|
"learning_rate": 8.180592991913748e-06, |
|
"loss": 0.0032, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 28.53, |
|
"grad_norm": 1.2781018018722534, |
|
"learning_rate": 8.163746630727763e-06, |
|
"loss": 0.0038, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 28.68, |
|
"grad_norm": 1.805674433708191, |
|
"learning_rate": 8.146900269541779e-06, |
|
"loss": 0.0044, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 28.82, |
|
"grad_norm": 0.8535717129707336, |
|
"learning_rate": 8.130053908355797e-06, |
|
"loss": 0.0037, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 28.97, |
|
"grad_norm": 1.6355313062667847, |
|
"learning_rate": 8.113207547169812e-06, |
|
"loss": 0.0033, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 29.12, |
|
"grad_norm": 0.6453380584716797, |
|
"learning_rate": 8.096361185983828e-06, |
|
"loss": 0.0031, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 29.26, |
|
"grad_norm": 1.806770920753479, |
|
"learning_rate": 8.079514824797844e-06, |
|
"loss": 0.0038, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 29.41, |
|
"grad_norm": 0.49916043877601624, |
|
"learning_rate": 8.06266846361186e-06, |
|
"loss": 0.0032, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 29.41, |
|
"eval_loss": 0.4711141288280487, |
|
"eval_runtime": 394.2364, |
|
"eval_samples_per_second": 1.913, |
|
"eval_steps_per_second": 0.02, |
|
"eval_wer": 42.29249011857708, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 29.56, |
|
"grad_norm": 1.2711254358291626, |
|
"learning_rate": 8.045822102425877e-06, |
|
"loss": 0.0036, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 29.71, |
|
"grad_norm": 1.2178030014038086, |
|
"learning_rate": 8.028975741239893e-06, |
|
"loss": 0.0042, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 29.85, |
|
"grad_norm": 1.2590820789337158, |
|
"learning_rate": 8.012129380053909e-06, |
|
"loss": 0.0033, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 30.0, |
|
"grad_norm": 0.7739225625991821, |
|
"learning_rate": 7.995283018867925e-06, |
|
"loss": 0.0053, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 30.15, |
|
"grad_norm": 1.882087230682373, |
|
"learning_rate": 7.978436657681942e-06, |
|
"loss": 0.0043, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 30.29, |
|
"grad_norm": 1.1462137699127197, |
|
"learning_rate": 7.961590296495958e-06, |
|
"loss": 0.0027, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 30.44, |
|
"grad_norm": 1.1864248514175415, |
|
"learning_rate": 7.944743935309974e-06, |
|
"loss": 0.003, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 30.59, |
|
"grad_norm": 0.4730267822742462, |
|
"learning_rate": 7.92789757412399e-06, |
|
"loss": 0.0026, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 30.74, |
|
"grad_norm": 0.8079692125320435, |
|
"learning_rate": 7.911051212938005e-06, |
|
"loss": 0.002, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 30.88, |
|
"grad_norm": 0.9878515005111694, |
|
"learning_rate": 7.894204851752023e-06, |
|
"loss": 0.0039, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 31.03, |
|
"grad_norm": 0.26896196603775024, |
|
"learning_rate": 7.877358490566038e-06, |
|
"loss": 0.0033, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 31.18, |
|
"grad_norm": 0.5570561289787292, |
|
"learning_rate": 7.860512129380054e-06, |
|
"loss": 0.0041, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 31.32, |
|
"grad_norm": 1.9286779165267944, |
|
"learning_rate": 7.843665768194072e-06, |
|
"loss": 0.0031, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 31.47, |
|
"grad_norm": 0.5159671306610107, |
|
"learning_rate": 7.826819407008087e-06, |
|
"loss": 0.002, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 31.62, |
|
"grad_norm": 0.811177670955658, |
|
"learning_rate": 7.809973045822103e-06, |
|
"loss": 0.003, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 31.76, |
|
"grad_norm": 0.8233473300933838, |
|
"learning_rate": 7.793126684636119e-06, |
|
"loss": 0.0023, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 31.91, |
|
"grad_norm": 0.3896019756793976, |
|
"learning_rate": 7.776280323450135e-06, |
|
"loss": 0.0021, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 32.06, |
|
"grad_norm": 0.37738358974456787, |
|
"learning_rate": 7.75943396226415e-06, |
|
"loss": 0.0024, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 32.21, |
|
"grad_norm": 1.1048047542572021, |
|
"learning_rate": 7.742587601078168e-06, |
|
"loss": 0.003, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 32.35, |
|
"grad_norm": 0.4888085126876831, |
|
"learning_rate": 7.725741239892184e-06, |
|
"loss": 0.0018, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 32.5, |
|
"grad_norm": 0.8060516119003296, |
|
"learning_rate": 7.7088948787062e-06, |
|
"loss": 0.0019, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 32.65, |
|
"grad_norm": 0.8470051884651184, |
|
"learning_rate": 7.692048517520217e-06, |
|
"loss": 0.0019, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 32.79, |
|
"grad_norm": 0.9551669955253601, |
|
"learning_rate": 7.675202156334233e-06, |
|
"loss": 0.0027, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 32.94, |
|
"grad_norm": 1.0600543022155762, |
|
"learning_rate": 7.658355795148249e-06, |
|
"loss": 0.0025, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 33.09, |
|
"grad_norm": 0.4787401258945465, |
|
"learning_rate": 7.641509433962266e-06, |
|
"loss": 0.0027, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 33.24, |
|
"grad_norm": 0.7312725782394409, |
|
"learning_rate": 7.62466307277628e-06, |
|
"loss": 0.0024, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 33.38, |
|
"grad_norm": 1.3341702222824097, |
|
"learning_rate": 7.607816711590297e-06, |
|
"loss": 0.0022, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 33.53, |
|
"grad_norm": 0.6375266909599304, |
|
"learning_rate": 7.5909703504043134e-06, |
|
"loss": 0.0029, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 33.68, |
|
"grad_norm": 0.8027037978172302, |
|
"learning_rate": 7.574123989218329e-06, |
|
"loss": 0.0019, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 33.82, |
|
"grad_norm": 0.45866358280181885, |
|
"learning_rate": 7.557277628032346e-06, |
|
"loss": 0.002, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 33.97, |
|
"grad_norm": 0.4257550835609436, |
|
"learning_rate": 7.540431266846362e-06, |
|
"loss": 0.0022, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 34.12, |
|
"grad_norm": 0.6710132360458374, |
|
"learning_rate": 7.523584905660378e-06, |
|
"loss": 0.0027, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 34.26, |
|
"grad_norm": 0.7800111770629883, |
|
"learning_rate": 7.506738544474395e-06, |
|
"loss": 0.0022, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 34.41, |
|
"grad_norm": 0.5604603290557861, |
|
"learning_rate": 7.489892183288411e-06, |
|
"loss": 0.0028, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 34.56, |
|
"grad_norm": 1.3979426622390747, |
|
"learning_rate": 7.473045822102426e-06, |
|
"loss": 0.0024, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 34.71, |
|
"grad_norm": 1.4295878410339355, |
|
"learning_rate": 7.456199460916442e-06, |
|
"loss": 0.0025, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 34.85, |
|
"grad_norm": 0.8446325659751892, |
|
"learning_rate": 7.439353099730459e-06, |
|
"loss": 0.0039, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 35.0, |
|
"grad_norm": 0.7062538266181946, |
|
"learning_rate": 7.422506738544475e-06, |
|
"loss": 0.0024, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 35.15, |
|
"grad_norm": 0.42034047842025757, |
|
"learning_rate": 7.405660377358491e-06, |
|
"loss": 0.0015, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 35.29, |
|
"grad_norm": 0.21419081091880798, |
|
"learning_rate": 7.388814016172508e-06, |
|
"loss": 0.0015, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 35.44, |
|
"grad_norm": 0.2849736213684082, |
|
"learning_rate": 7.371967654986524e-06, |
|
"loss": 0.0008, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 35.59, |
|
"grad_norm": 0.4012085795402527, |
|
"learning_rate": 7.35512129380054e-06, |
|
"loss": 0.0015, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 35.74, |
|
"grad_norm": 0.12295886129140854, |
|
"learning_rate": 7.338274932614556e-06, |
|
"loss": 0.0016, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 35.88, |
|
"grad_norm": 0.5100615620613098, |
|
"learning_rate": 7.321428571428572e-06, |
|
"loss": 0.0017, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 36.03, |
|
"grad_norm": 0.19803810119628906, |
|
"learning_rate": 7.304582210242588e-06, |
|
"loss": 0.0013, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 36.18, |
|
"grad_norm": 0.45984306931495667, |
|
"learning_rate": 7.287735849056604e-06, |
|
"loss": 0.0007, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 36.32, |
|
"grad_norm": 1.1022437810897827, |
|
"learning_rate": 7.27088948787062e-06, |
|
"loss": 0.0012, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 36.47, |
|
"grad_norm": 0.20512618124485016, |
|
"learning_rate": 7.254043126684637e-06, |
|
"loss": 0.0017, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 36.62, |
|
"grad_norm": 0.11672288924455643, |
|
"learning_rate": 7.237196765498653e-06, |
|
"loss": 0.0009, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 36.76, |
|
"grad_norm": 0.41004320979118347, |
|
"learning_rate": 7.220350404312669e-06, |
|
"loss": 0.0005, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 36.76, |
|
"eval_loss": 0.48888495564460754, |
|
"eval_runtime": 395.4163, |
|
"eval_samples_per_second": 1.907, |
|
"eval_steps_per_second": 0.02, |
|
"eval_wer": 41.22529644268775, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 36.91, |
|
"grad_norm": 0.41586098074913025, |
|
"learning_rate": 7.203504043126686e-06, |
|
"loss": 0.0006, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 37.06, |
|
"grad_norm": 0.05540100112557411, |
|
"learning_rate": 7.1866576819407015e-06, |
|
"loss": 0.0004, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 37.21, |
|
"grad_norm": 0.20431630313396454, |
|
"learning_rate": 7.169811320754717e-06, |
|
"loss": 0.0005, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 37.35, |
|
"grad_norm": 0.13198405504226685, |
|
"learning_rate": 7.152964959568733e-06, |
|
"loss": 0.0005, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 37.5, |
|
"grad_norm": 1.1269527673721313, |
|
"learning_rate": 7.13611859838275e-06, |
|
"loss": 0.0006, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 37.65, |
|
"grad_norm": 0.06169961765408516, |
|
"learning_rate": 7.119272237196766e-06, |
|
"loss": 0.0003, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 37.79, |
|
"grad_norm": 1.2373998165130615, |
|
"learning_rate": 7.102425876010782e-06, |
|
"loss": 0.0007, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 37.94, |
|
"grad_norm": 0.730436384677887, |
|
"learning_rate": 7.085579514824799e-06, |
|
"loss": 0.0006, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 38.09, |
|
"grad_norm": 0.2517056465148926, |
|
"learning_rate": 7.0687331536388145e-06, |
|
"loss": 0.0009, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 38.24, |
|
"grad_norm": 0.24311576783657074, |
|
"learning_rate": 7.051886792452831e-06, |
|
"loss": 0.0008, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 38.38, |
|
"grad_norm": 0.46015554666519165, |
|
"learning_rate": 7.035040431266848e-06, |
|
"loss": 0.0008, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 38.53, |
|
"grad_norm": 0.10832543671131134, |
|
"learning_rate": 7.018194070080863e-06, |
|
"loss": 0.0011, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 38.68, |
|
"grad_norm": 0.06158292293548584, |
|
"learning_rate": 7.0013477088948785e-06, |
|
"loss": 0.0011, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 38.82, |
|
"grad_norm": 0.8122970461845398, |
|
"learning_rate": 6.984501347708895e-06, |
|
"loss": 0.0009, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 38.97, |
|
"grad_norm": 0.7663437128067017, |
|
"learning_rate": 6.967654986522912e-06, |
|
"loss": 0.0008, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 39.12, |
|
"grad_norm": 0.145246684551239, |
|
"learning_rate": 6.9508086253369275e-06, |
|
"loss": 0.0007, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 39.26, |
|
"grad_norm": 0.5487465858459473, |
|
"learning_rate": 6.933962264150944e-06, |
|
"loss": 0.0016, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 39.41, |
|
"grad_norm": 0.9387257695198059, |
|
"learning_rate": 6.917115902964961e-06, |
|
"loss": 0.0009, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 39.56, |
|
"grad_norm": 0.31192561984062195, |
|
"learning_rate": 6.9002695417789766e-06, |
|
"loss": 0.0008, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 39.71, |
|
"grad_norm": 0.0915888324379921, |
|
"learning_rate": 6.883423180592993e-06, |
|
"loss": 0.0005, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 39.85, |
|
"grad_norm": 0.10339212417602539, |
|
"learning_rate": 6.866576819407008e-06, |
|
"loss": 0.0005, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 40.0, |
|
"grad_norm": 0.07240493595600128, |
|
"learning_rate": 6.849730458221025e-06, |
|
"loss": 0.0008, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 40.15, |
|
"grad_norm": 0.08370707184076309, |
|
"learning_rate": 6.8328840970350405e-06, |
|
"loss": 0.0009, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 40.29, |
|
"grad_norm": 0.07619204372167587, |
|
"learning_rate": 6.816037735849057e-06, |
|
"loss": 0.0005, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 40.44, |
|
"grad_norm": 0.4352577030658722, |
|
"learning_rate": 6.799191374663073e-06, |
|
"loss": 0.0016, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 40.59, |
|
"grad_norm": 0.6862916946411133, |
|
"learning_rate": 6.78234501347709e-06, |
|
"loss": 0.0006, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 40.74, |
|
"grad_norm": 0.2726142406463623, |
|
"learning_rate": 6.765498652291106e-06, |
|
"loss": 0.0008, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 40.88, |
|
"grad_norm": 0.1969071477651596, |
|
"learning_rate": 6.748652291105122e-06, |
|
"loss": 0.0005, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 41.03, |
|
"grad_norm": 0.341925710439682, |
|
"learning_rate": 6.731805929919139e-06, |
|
"loss": 0.0007, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 41.18, |
|
"grad_norm": 1.3179571628570557, |
|
"learning_rate": 6.7149595687331536e-06, |
|
"loss": 0.0005, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 41.32, |
|
"grad_norm": 0.5500785112380981, |
|
"learning_rate": 6.69811320754717e-06, |
|
"loss": 0.0014, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 41.47, |
|
"grad_norm": 0.30600807070732117, |
|
"learning_rate": 6.681266846361186e-06, |
|
"loss": 0.0008, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 41.62, |
|
"grad_norm": 0.2631785571575165, |
|
"learning_rate": 6.664420485175203e-06, |
|
"loss": 0.001, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 41.76, |
|
"grad_norm": 0.8868472576141357, |
|
"learning_rate": 6.647574123989219e-06, |
|
"loss": 0.0009, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 41.91, |
|
"grad_norm": 1.278052806854248, |
|
"learning_rate": 6.630727762803235e-06, |
|
"loss": 0.0011, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 42.06, |
|
"grad_norm": 0.1165945902466774, |
|
"learning_rate": 6.613881401617252e-06, |
|
"loss": 0.0014, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 42.21, |
|
"grad_norm": 0.37418603897094727, |
|
"learning_rate": 6.597035040431267e-06, |
|
"loss": 0.0009, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 42.35, |
|
"grad_norm": 2.285724401473999, |
|
"learning_rate": 6.580188679245284e-06, |
|
"loss": 0.0013, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 42.5, |
|
"grad_norm": 0.48481008410453796, |
|
"learning_rate": 6.563342318059299e-06, |
|
"loss": 0.0012, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 42.65, |
|
"grad_norm": 0.330135703086853, |
|
"learning_rate": 6.546495956873316e-06, |
|
"loss": 0.0007, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 42.79, |
|
"grad_norm": 0.5341326594352722, |
|
"learning_rate": 6.529649595687332e-06, |
|
"loss": 0.0008, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 42.94, |
|
"grad_norm": 1.2509163618087769, |
|
"learning_rate": 6.512803234501348e-06, |
|
"loss": 0.0016, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 43.09, |
|
"grad_norm": 1.3307183980941772, |
|
"learning_rate": 6.495956873315365e-06, |
|
"loss": 0.0015, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 43.24, |
|
"grad_norm": 0.4615732729434967, |
|
"learning_rate": 6.47911051212938e-06, |
|
"loss": 0.0011, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 43.38, |
|
"grad_norm": 0.3208690583705902, |
|
"learning_rate": 6.462264150943397e-06, |
|
"loss": 0.001, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 43.53, |
|
"grad_norm": 0.6121777892112732, |
|
"learning_rate": 6.445417789757414e-06, |
|
"loss": 0.0012, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 43.68, |
|
"grad_norm": 0.8947989344596863, |
|
"learning_rate": 6.4285714285714295e-06, |
|
"loss": 0.0021, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 43.82, |
|
"grad_norm": 1.4063951969146729, |
|
"learning_rate": 6.411725067385444e-06, |
|
"loss": 0.0022, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 43.97, |
|
"grad_norm": 0.9614599347114563, |
|
"learning_rate": 6.394878706199461e-06, |
|
"loss": 0.0022, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 44.12, |
|
"grad_norm": 0.3544367551803589, |
|
"learning_rate": 6.378032345013478e-06, |
|
"loss": 0.0012, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 44.12, |
|
"eval_loss": 0.4919167459011078, |
|
"eval_runtime": 412.7274, |
|
"eval_samples_per_second": 1.827, |
|
"eval_steps_per_second": 0.019, |
|
"eval_wer": 42.03557312252965, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 44.26, |
|
"grad_norm": 0.3670799732208252, |
|
"learning_rate": 6.3611859838274934e-06, |
|
"loss": 0.0011, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 44.41, |
|
"grad_norm": 0.8639935255050659, |
|
"learning_rate": 6.34433962264151e-06, |
|
"loss": 0.0021, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 44.56, |
|
"grad_norm": 0.8535746932029724, |
|
"learning_rate": 6.327493261455526e-06, |
|
"loss": 0.0023, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 44.71, |
|
"grad_norm": 0.791280210018158, |
|
"learning_rate": 6.3106469002695425e-06, |
|
"loss": 0.0017, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 44.85, |
|
"grad_norm": 1.2309428453445435, |
|
"learning_rate": 6.293800539083559e-06, |
|
"loss": 0.0022, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 45.0, |
|
"grad_norm": 2.02470326423645, |
|
"learning_rate": 6.276954177897575e-06, |
|
"loss": 0.0023, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 45.15, |
|
"grad_norm": 0.6730286478996277, |
|
"learning_rate": 6.260107816711591e-06, |
|
"loss": 0.0014, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 45.29, |
|
"grad_norm": 0.6371138095855713, |
|
"learning_rate": 6.2432614555256064e-06, |
|
"loss": 0.0013, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 45.44, |
|
"grad_norm": 0.8862447738647461, |
|
"learning_rate": 6.226415094339623e-06, |
|
"loss": 0.002, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 45.59, |
|
"grad_norm": 0.600082516670227, |
|
"learning_rate": 6.209568733153639e-06, |
|
"loss": 0.0015, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 45.74, |
|
"grad_norm": 1.0069173574447632, |
|
"learning_rate": 6.1927223719676555e-06, |
|
"loss": 0.0011, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 45.88, |
|
"grad_norm": 0.8306257724761963, |
|
"learning_rate": 6.175876010781672e-06, |
|
"loss": 0.0017, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 46.03, |
|
"grad_norm": 0.0885746031999588, |
|
"learning_rate": 6.159029649595688e-06, |
|
"loss": 0.002, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 46.18, |
|
"grad_norm": 0.2680250108242035, |
|
"learning_rate": 6.1421832884097045e-06, |
|
"loss": 0.0011, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 46.32, |
|
"grad_norm": 0.5739225745201111, |
|
"learning_rate": 6.12533692722372e-06, |
|
"loss": 0.0006, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 46.47, |
|
"grad_norm": 0.09838727116584778, |
|
"learning_rate": 6.108490566037736e-06, |
|
"loss": 0.001, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 46.62, |
|
"grad_norm": 0.6324687600135803, |
|
"learning_rate": 6.091644204851752e-06, |
|
"loss": 0.0012, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 46.76, |
|
"grad_norm": 0.8236867189407349, |
|
"learning_rate": 6.0747978436657685e-06, |
|
"loss": 0.0018, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 46.91, |
|
"grad_norm": 1.935333251953125, |
|
"learning_rate": 6.057951482479785e-06, |
|
"loss": 0.0028, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 47.06, |
|
"grad_norm": 0.9879902601242065, |
|
"learning_rate": 6.041105121293801e-06, |
|
"loss": 0.0025, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 47.21, |
|
"grad_norm": 0.7782288193702698, |
|
"learning_rate": 6.0242587601078175e-06, |
|
"loss": 0.0017, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 47.35, |
|
"grad_norm": 0.9317180514335632, |
|
"learning_rate": 6.007412398921833e-06, |
|
"loss": 0.0014, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 47.5, |
|
"grad_norm": 0.4948224127292633, |
|
"learning_rate": 5.99056603773585e-06, |
|
"loss": 0.0012, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 47.65, |
|
"grad_norm": 0.5893387198448181, |
|
"learning_rate": 5.9737196765498666e-06, |
|
"loss": 0.0012, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 47.79, |
|
"grad_norm": 0.22002609074115753, |
|
"learning_rate": 5.9568733153638815e-06, |
|
"loss": 0.0015, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 47.94, |
|
"grad_norm": 0.4735713005065918, |
|
"learning_rate": 5.940026954177897e-06, |
|
"loss": 0.0008, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 48.09, |
|
"grad_norm": 0.8969672918319702, |
|
"learning_rate": 5.923180592991914e-06, |
|
"loss": 0.0014, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 48.24, |
|
"grad_norm": 0.6368213295936584, |
|
"learning_rate": 5.9063342318059305e-06, |
|
"loss": 0.0014, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 48.38, |
|
"grad_norm": 0.1679704338312149, |
|
"learning_rate": 5.889487870619946e-06, |
|
"loss": 0.0013, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 48.53, |
|
"grad_norm": 0.9470486044883728, |
|
"learning_rate": 5.872641509433963e-06, |
|
"loss": 0.0017, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 48.68, |
|
"grad_norm": 0.08789900690317154, |
|
"learning_rate": 5.855795148247979e-06, |
|
"loss": 0.0007, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 48.82, |
|
"grad_norm": 0.24826857447624207, |
|
"learning_rate": 5.838948787061995e-06, |
|
"loss": 0.0009, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 48.97, |
|
"grad_norm": 0.11028200387954712, |
|
"learning_rate": 5.822102425876012e-06, |
|
"loss": 0.0009, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 49.12, |
|
"grad_norm": 0.11782591044902802, |
|
"learning_rate": 5.805256064690027e-06, |
|
"loss": 0.0008, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 49.26, |
|
"grad_norm": 0.3089038133621216, |
|
"learning_rate": 5.7884097035040435e-06, |
|
"loss": 0.0007, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 49.41, |
|
"grad_norm": 0.5864781737327576, |
|
"learning_rate": 5.771563342318059e-06, |
|
"loss": 0.0006, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 49.56, |
|
"grad_norm": 0.6767354607582092, |
|
"learning_rate": 5.754716981132076e-06, |
|
"loss": 0.0004, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 49.71, |
|
"grad_norm": 0.7366251349449158, |
|
"learning_rate": 5.737870619946092e-06, |
|
"loss": 0.0006, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 49.85, |
|
"grad_norm": 0.460439532995224, |
|
"learning_rate": 5.721024258760108e-06, |
|
"loss": 0.0006, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 50.0, |
|
"grad_norm": 0.2541469633579254, |
|
"learning_rate": 5.704177897574125e-06, |
|
"loss": 0.0011, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 50.15, |
|
"grad_norm": 0.25263655185699463, |
|
"learning_rate": 5.687331536388141e-06, |
|
"loss": 0.0009, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 50.29, |
|
"grad_norm": 0.3167230188846588, |
|
"learning_rate": 5.670485175202157e-06, |
|
"loss": 0.0005, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 50.44, |
|
"grad_norm": 0.051995839923620224, |
|
"learning_rate": 5.653638814016172e-06, |
|
"loss": 0.0005, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 50.59, |
|
"grad_norm": 0.23166793584823608, |
|
"learning_rate": 5.636792452830189e-06, |
|
"loss": 0.0012, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 50.74, |
|
"grad_norm": 0.46085381507873535, |
|
"learning_rate": 5.619946091644205e-06, |
|
"loss": 0.0011, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 50.88, |
|
"grad_norm": 0.07778172194957733, |
|
"learning_rate": 5.603099730458221e-06, |
|
"loss": 0.0006, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 51.03, |
|
"grad_norm": 0.23257611691951752, |
|
"learning_rate": 5.586253369272238e-06, |
|
"loss": 0.0009, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 51.18, |
|
"grad_norm": 0.6834172606468201, |
|
"learning_rate": 5.569407008086254e-06, |
|
"loss": 0.0006, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 51.32, |
|
"grad_norm": 0.028312204405665398, |
|
"learning_rate": 5.55256064690027e-06, |
|
"loss": 0.0008, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 51.47, |
|
"grad_norm": 0.053247835487127304, |
|
"learning_rate": 5.535714285714286e-06, |
|
"loss": 0.0004, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 51.47, |
|
"eval_loss": 0.5013700127601624, |
|
"eval_runtime": 393.4347, |
|
"eval_samples_per_second": 1.916, |
|
"eval_steps_per_second": 0.02, |
|
"eval_wer": 40.39525691699605, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 51.62, |
|
"grad_norm": 0.09007065743207932, |
|
"learning_rate": 5.518867924528303e-06, |
|
"loss": 0.0003, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 51.76, |
|
"grad_norm": 0.03709765151143074, |
|
"learning_rate": 5.502021563342318e-06, |
|
"loss": 0.0003, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 51.91, |
|
"grad_norm": 0.024563109502196312, |
|
"learning_rate": 5.485175202156334e-06, |
|
"loss": 0.0002, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 52.06, |
|
"grad_norm": 0.38505399227142334, |
|
"learning_rate": 5.46832884097035e-06, |
|
"loss": 0.0007, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 52.21, |
|
"grad_norm": 0.041454803198575974, |
|
"learning_rate": 5.451482479784367e-06, |
|
"loss": 0.0002, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 52.35, |
|
"grad_norm": 0.03118882142007351, |
|
"learning_rate": 5.4346361185983834e-06, |
|
"loss": 0.0002, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 52.5, |
|
"grad_norm": 0.1050623282790184, |
|
"learning_rate": 5.417789757412399e-06, |
|
"loss": 0.0006, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 52.65, |
|
"grad_norm": 0.019064374268054962, |
|
"learning_rate": 5.400943396226416e-06, |
|
"loss": 0.0001, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 52.79, |
|
"grad_norm": 0.020824899896979332, |
|
"learning_rate": 5.384097035040432e-06, |
|
"loss": 0.0001, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 52.94, |
|
"grad_norm": 0.019172310829162598, |
|
"learning_rate": 5.367250673854448e-06, |
|
"loss": 0.0002, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 53.09, |
|
"grad_norm": 0.015175268054008484, |
|
"learning_rate": 5.350404312668463e-06, |
|
"loss": 0.0003, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 53.24, |
|
"grad_norm": 0.03325885161757469, |
|
"learning_rate": 5.33355795148248e-06, |
|
"loss": 0.0004, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 53.38, |
|
"grad_norm": 0.02436959184706211, |
|
"learning_rate": 5.3167115902964964e-06, |
|
"loss": 0.0002, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 53.53, |
|
"grad_norm": 0.017924895510077477, |
|
"learning_rate": 5.299865229110512e-06, |
|
"loss": 0.0001, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 53.68, |
|
"grad_norm": 0.02767987549304962, |
|
"learning_rate": 5.283018867924529e-06, |
|
"loss": 0.0001, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 53.82, |
|
"grad_norm": 0.01485258899629116, |
|
"learning_rate": 5.266172506738545e-06, |
|
"loss": 0.0003, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 53.97, |
|
"grad_norm": 0.013557419180870056, |
|
"learning_rate": 5.249326145552561e-06, |
|
"loss": 0.0001, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 54.12, |
|
"grad_norm": 0.010153830982744694, |
|
"learning_rate": 5.232479784366578e-06, |
|
"loss": 0.0002, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 54.26, |
|
"grad_norm": 0.01170294638723135, |
|
"learning_rate": 5.215633423180594e-06, |
|
"loss": 0.0001, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 54.41, |
|
"grad_norm": 0.008615361526608467, |
|
"learning_rate": 5.19878706199461e-06, |
|
"loss": 0.0001, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 54.56, |
|
"grad_norm": 0.00960615649819374, |
|
"learning_rate": 5.181940700808625e-06, |
|
"loss": 0.0001, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 54.71, |
|
"grad_norm": 0.008886042051017284, |
|
"learning_rate": 5.165094339622642e-06, |
|
"loss": 0.0001, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 54.85, |
|
"grad_norm": 0.014573791064321995, |
|
"learning_rate": 5.148247978436658e-06, |
|
"loss": 0.0001, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 55.0, |
|
"grad_norm": 0.008440797217190266, |
|
"learning_rate": 5.131401617250674e-06, |
|
"loss": 0.0001, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 55.15, |
|
"grad_norm": 0.010030324570834637, |
|
"learning_rate": 5.114555256064691e-06, |
|
"loss": 0.0001, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 55.29, |
|
"grad_norm": 0.0625586286187172, |
|
"learning_rate": 5.097708894878707e-06, |
|
"loss": 0.0001, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 55.44, |
|
"grad_norm": 0.011248340830206871, |
|
"learning_rate": 5.080862533692723e-06, |
|
"loss": 0.0001, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 55.59, |
|
"grad_norm": 0.006955728866159916, |
|
"learning_rate": 5.064016172506739e-06, |
|
"loss": 0.0001, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 55.74, |
|
"grad_norm": 0.00837327353656292, |
|
"learning_rate": 5.047169811320756e-06, |
|
"loss": 0.0001, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 55.88, |
|
"grad_norm": 0.008046143688261509, |
|
"learning_rate": 5.030323450134771e-06, |
|
"loss": 0.0001, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 56.03, |
|
"grad_norm": 0.007838546298444271, |
|
"learning_rate": 5.013477088948787e-06, |
|
"loss": 0.0001, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 56.18, |
|
"grad_norm": 0.006525520700961351, |
|
"learning_rate": 4.996630727762803e-06, |
|
"loss": 0.0001, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 56.32, |
|
"grad_norm": 0.005834328010678291, |
|
"learning_rate": 4.97978436657682e-06, |
|
"loss": 0.0001, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 56.47, |
|
"grad_norm": 0.01387355849146843, |
|
"learning_rate": 4.962938005390836e-06, |
|
"loss": 0.0003, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 56.62, |
|
"grad_norm": 0.01750769093632698, |
|
"learning_rate": 4.946091644204852e-06, |
|
"loss": 0.0001, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 56.76, |
|
"grad_norm": 0.018308816477656364, |
|
"learning_rate": 4.929245283018868e-06, |
|
"loss": 0.0001, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 56.91, |
|
"grad_norm": 0.0117345554754138, |
|
"learning_rate": 4.9123989218328845e-06, |
|
"loss": 0.0001, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 57.06, |
|
"grad_norm": 0.007436331827193499, |
|
"learning_rate": 4.8955525606469e-06, |
|
"loss": 0.0001, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 57.21, |
|
"grad_norm": 0.00813203677535057, |
|
"learning_rate": 4.878706199460917e-06, |
|
"loss": 0.0001, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 57.35, |
|
"grad_norm": 0.005871808156371117, |
|
"learning_rate": 4.8618598382749335e-06, |
|
"loss": 0.0001, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 57.5, |
|
"grad_norm": 0.006669571157544851, |
|
"learning_rate": 4.845013477088949e-06, |
|
"loss": 0.0001, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 57.65, |
|
"grad_norm": 0.004995842929929495, |
|
"learning_rate": 4.828167115902965e-06, |
|
"loss": 0.0001, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 57.79, |
|
"grad_norm": 0.0052757407538592815, |
|
"learning_rate": 4.811320754716982e-06, |
|
"loss": 0.0001, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 57.94, |
|
"grad_norm": 0.005870501976460218, |
|
"learning_rate": 4.7944743935309975e-06, |
|
"loss": 0.0001, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 58.09, |
|
"grad_norm": 0.007945972494781017, |
|
"learning_rate": 4.777628032345013e-06, |
|
"loss": 0.0001, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 58.24, |
|
"grad_norm": 0.005463455803692341, |
|
"learning_rate": 4.76078167115903e-06, |
|
"loss": 0.0001, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 58.38, |
|
"grad_norm": 0.0040795220993459225, |
|
"learning_rate": 4.7439353099730466e-06, |
|
"loss": 0.0001, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 58.53, |
|
"grad_norm": 0.004236205480992794, |
|
"learning_rate": 4.727088948787062e-06, |
|
"loss": 0.0001, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 58.68, |
|
"grad_norm": 0.004899207036942244, |
|
"learning_rate": 4.710242587601079e-06, |
|
"loss": 0.0001, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 58.82, |
|
"grad_norm": 0.006042791530489922, |
|
"learning_rate": 4.693396226415095e-06, |
|
"loss": 0.0001, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 58.82, |
|
"eval_loss": 0.5202410221099854, |
|
"eval_runtime": 391.5224, |
|
"eval_samples_per_second": 1.926, |
|
"eval_steps_per_second": 0.02, |
|
"eval_wer": 40.07905138339921, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 58.97, |
|
"grad_norm": 0.004714336711913347, |
|
"learning_rate": 4.6765498652291105e-06, |
|
"loss": 0.0001, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 59.12, |
|
"grad_norm": 0.004793087020516396, |
|
"learning_rate": 4.659703504043127e-06, |
|
"loss": 0.0001, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 59.26, |
|
"grad_norm": 0.0044836923480033875, |
|
"learning_rate": 4.642857142857144e-06, |
|
"loss": 0.0001, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 59.41, |
|
"grad_norm": 0.004963328130543232, |
|
"learning_rate": 4.626010781671159e-06, |
|
"loss": 0.0001, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 59.56, |
|
"grad_norm": 0.003358585759997368, |
|
"learning_rate": 4.609164420485175e-06, |
|
"loss": 0.0001, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 59.71, |
|
"grad_norm": 0.004264355171471834, |
|
"learning_rate": 4.592318059299192e-06, |
|
"loss": 0.0001, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 59.85, |
|
"grad_norm": 0.0040533156134188175, |
|
"learning_rate": 4.575471698113208e-06, |
|
"loss": 0.0001, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 60.0, |
|
"grad_norm": 0.004796077497303486, |
|
"learning_rate": 4.558625336927224e-06, |
|
"loss": 0.0001, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 60.15, |
|
"grad_norm": 0.0041580419056117535, |
|
"learning_rate": 4.54177897574124e-06, |
|
"loss": 0.0001, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 60.29, |
|
"grad_norm": 0.004052949137985706, |
|
"learning_rate": 4.524932614555256e-06, |
|
"loss": 0.0, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 60.44, |
|
"grad_norm": 0.004524200223386288, |
|
"learning_rate": 4.5080862533692726e-06, |
|
"loss": 0.0, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 60.59, |
|
"grad_norm": 0.0040616546757519245, |
|
"learning_rate": 4.491239892183289e-06, |
|
"loss": 0.0, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 60.74, |
|
"grad_norm": 0.004815140273422003, |
|
"learning_rate": 4.474393530997305e-06, |
|
"loss": 0.0001, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 60.88, |
|
"grad_norm": 0.0043374099768698215, |
|
"learning_rate": 4.457547169811321e-06, |
|
"loss": 0.0, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 61.03, |
|
"grad_norm": 0.003582689445465803, |
|
"learning_rate": 4.440700808625337e-06, |
|
"loss": 0.0, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 61.18, |
|
"grad_norm": 0.00396451773121953, |
|
"learning_rate": 4.423854447439353e-06, |
|
"loss": 0.0, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 61.32, |
|
"grad_norm": 0.004014734644442797, |
|
"learning_rate": 4.40700808625337e-06, |
|
"loss": 0.0, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 61.47, |
|
"grad_norm": 0.0032315212301909924, |
|
"learning_rate": 4.390161725067386e-06, |
|
"loss": 0.0, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 61.62, |
|
"grad_norm": 0.0032560701947659254, |
|
"learning_rate": 4.373315363881402e-06, |
|
"loss": 0.0001, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 61.76, |
|
"grad_norm": 0.0036136566195636988, |
|
"learning_rate": 4.356469002695418e-06, |
|
"loss": 0.0, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 61.91, |
|
"grad_norm": 0.004307006951421499, |
|
"learning_rate": 4.339622641509435e-06, |
|
"loss": 0.0, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 62.06, |
|
"grad_norm": 0.00355698075145483, |
|
"learning_rate": 4.32277628032345e-06, |
|
"loss": 0.0, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 62.21, |
|
"grad_norm": 0.003031729022040963, |
|
"learning_rate": 4.305929919137466e-06, |
|
"loss": 0.0, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 62.35, |
|
"grad_norm": 0.003886875230818987, |
|
"learning_rate": 4.289083557951483e-06, |
|
"loss": 0.0, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 62.5, |
|
"grad_norm": 0.003472729818895459, |
|
"learning_rate": 4.2722371967654994e-06, |
|
"loss": 0.0, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 62.65, |
|
"grad_norm": 0.004264041781425476, |
|
"learning_rate": 4.255390835579515e-06, |
|
"loss": 0.0, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 62.79, |
|
"grad_norm": 0.0033135716803371906, |
|
"learning_rate": 4.238544474393531e-06, |
|
"loss": 0.0, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 62.94, |
|
"grad_norm": 0.003554994473233819, |
|
"learning_rate": 4.221698113207548e-06, |
|
"loss": 0.0, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 63.09, |
|
"grad_norm": 0.0036291780415922403, |
|
"learning_rate": 4.204851752021563e-06, |
|
"loss": 0.0, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 63.24, |
|
"grad_norm": 0.003123074769973755, |
|
"learning_rate": 4.18800539083558e-06, |
|
"loss": 0.0, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 63.38, |
|
"grad_norm": 0.0030143638141453266, |
|
"learning_rate": 4.171159029649597e-06, |
|
"loss": 0.0, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 63.53, |
|
"grad_norm": 0.0033236437011510134, |
|
"learning_rate": 4.1543126684636125e-06, |
|
"loss": 0.0, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 63.68, |
|
"grad_norm": 0.003377540735527873, |
|
"learning_rate": 4.137466307277628e-06, |
|
"loss": 0.0, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 63.82, |
|
"grad_norm": 0.0034374604001641273, |
|
"learning_rate": 4.120619946091645e-06, |
|
"loss": 0.0, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 63.97, |
|
"grad_norm": 0.0032602883875370026, |
|
"learning_rate": 4.103773584905661e-06, |
|
"loss": 0.0, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 64.12, |
|
"grad_norm": 0.003039925592020154, |
|
"learning_rate": 4.086927223719676e-06, |
|
"loss": 0.0, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 64.26, |
|
"grad_norm": 0.003154992824420333, |
|
"learning_rate": 4.070080862533693e-06, |
|
"loss": 0.0, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 64.41, |
|
"grad_norm": 0.0030352873727679253, |
|
"learning_rate": 4.053234501347709e-06, |
|
"loss": 0.0, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 64.56, |
|
"grad_norm": 0.002541678724810481, |
|
"learning_rate": 4.0363881401617255e-06, |
|
"loss": 0.0, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 64.71, |
|
"grad_norm": 0.0035515627823770046, |
|
"learning_rate": 4.019541778975742e-06, |
|
"loss": 0.0, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 64.85, |
|
"grad_norm": 0.0032643494196236134, |
|
"learning_rate": 4.002695417789758e-06, |
|
"loss": 0.0, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 65.0, |
|
"grad_norm": 0.0029830934945493937, |
|
"learning_rate": 3.985849056603774e-06, |
|
"loss": 0.0, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 65.15, |
|
"grad_norm": 0.003434343496337533, |
|
"learning_rate": 3.96900269541779e-06, |
|
"loss": 0.0, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 65.29, |
|
"grad_norm": 0.003297910327091813, |
|
"learning_rate": 3.952156334231806e-06, |
|
"loss": 0.0, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 65.44, |
|
"grad_norm": 0.0029679101426154375, |
|
"learning_rate": 3.935309973045822e-06, |
|
"loss": 0.0, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 65.59, |
|
"grad_norm": 0.002996984403580427, |
|
"learning_rate": 3.9184636118598385e-06, |
|
"loss": 0.0, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 65.74, |
|
"grad_norm": 0.0031057405285537243, |
|
"learning_rate": 3.901617250673855e-06, |
|
"loss": 0.0, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 65.88, |
|
"grad_norm": 0.0026742741465568542, |
|
"learning_rate": 3.884770889487871e-06, |
|
"loss": 0.0, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 66.03, |
|
"grad_norm": 0.003236331744119525, |
|
"learning_rate": 3.8679245283018875e-06, |
|
"loss": 0.0, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 66.18, |
|
"grad_norm": 0.002975077135488391, |
|
"learning_rate": 3.851078167115903e-06, |
|
"loss": 0.0, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 66.18, |
|
"eval_loss": 0.5316298604011536, |
|
"eval_runtime": 394.5635, |
|
"eval_samples_per_second": 1.911, |
|
"eval_steps_per_second": 0.02, |
|
"eval_wer": 39.80237154150198, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 66.32, |
|
"grad_norm": 0.0026845140382647514, |
|
"learning_rate": 3.834231805929919e-06, |
|
"loss": 0.0, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 66.47, |
|
"grad_norm": 0.002751615596935153, |
|
"learning_rate": 3.817385444743936e-06, |
|
"loss": 0.0, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 66.62, |
|
"grad_norm": 0.0025983687955886126, |
|
"learning_rate": 3.800539083557952e-06, |
|
"loss": 0.0, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 66.76, |
|
"grad_norm": 0.0027817366644740105, |
|
"learning_rate": 3.7836927223719677e-06, |
|
"loss": 0.0, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 66.91, |
|
"grad_norm": 0.0028080460615456104, |
|
"learning_rate": 3.766846361185984e-06, |
|
"loss": 0.0, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 67.06, |
|
"grad_norm": 0.0026772802229970694, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 0.0, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 67.21, |
|
"grad_norm": 0.002854007761925459, |
|
"learning_rate": 3.7331536388140167e-06, |
|
"loss": 0.0, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 67.35, |
|
"grad_norm": 0.0026527883019298315, |
|
"learning_rate": 3.716307277628033e-06, |
|
"loss": 0.0, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 67.5, |
|
"grad_norm": 0.002448031213134527, |
|
"learning_rate": 3.6994609164420487e-06, |
|
"loss": 0.0, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 67.65, |
|
"grad_norm": 0.002617811318486929, |
|
"learning_rate": 3.682614555256065e-06, |
|
"loss": 0.0, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 67.79, |
|
"grad_norm": 0.0031759808771312237, |
|
"learning_rate": 3.665768194070081e-06, |
|
"loss": 0.0, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 67.94, |
|
"grad_norm": 0.002507594181224704, |
|
"learning_rate": 3.6489218328840973e-06, |
|
"loss": 0.0, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 68.09, |
|
"grad_norm": 0.0023514223285019398, |
|
"learning_rate": 3.632075471698113e-06, |
|
"loss": 0.0, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 68.24, |
|
"grad_norm": 0.0027671242132782936, |
|
"learning_rate": 3.6152291105121297e-06, |
|
"loss": 0.0, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 68.38, |
|
"grad_norm": 0.002477505709975958, |
|
"learning_rate": 3.598382749326146e-06, |
|
"loss": 0.0, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 68.53, |
|
"grad_norm": 0.0028106619138270617, |
|
"learning_rate": 3.581536388140162e-06, |
|
"loss": 0.0, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 68.68, |
|
"grad_norm": 0.0034189883153885603, |
|
"learning_rate": 3.5646900269541783e-06, |
|
"loss": 0.0, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 68.82, |
|
"grad_norm": 0.0030117544811218977, |
|
"learning_rate": 3.547843665768194e-06, |
|
"loss": 0.0, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 68.97, |
|
"grad_norm": 0.002722077304497361, |
|
"learning_rate": 3.5309973045822103e-06, |
|
"loss": 0.0, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 69.12, |
|
"grad_norm": 0.00265983073040843, |
|
"learning_rate": 3.514150943396227e-06, |
|
"loss": 0.0, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 69.26, |
|
"grad_norm": 0.002687458647415042, |
|
"learning_rate": 3.497304582210243e-06, |
|
"loss": 0.0, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 69.41, |
|
"grad_norm": 0.002611985895782709, |
|
"learning_rate": 3.480458221024259e-06, |
|
"loss": 0.0, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 69.56, |
|
"grad_norm": 0.002745421137660742, |
|
"learning_rate": 3.463611859838275e-06, |
|
"loss": 0.0, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 69.71, |
|
"grad_norm": 0.00274544395506382, |
|
"learning_rate": 3.4467654986522914e-06, |
|
"loss": 0.0, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 69.85, |
|
"grad_norm": 0.0032134081702679396, |
|
"learning_rate": 3.4299191374663076e-06, |
|
"loss": 0.0, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 70.0, |
|
"grad_norm": 0.0027218926697969437, |
|
"learning_rate": 3.4130727762803238e-06, |
|
"loss": 0.0, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 70.15, |
|
"grad_norm": 0.0029763532802462578, |
|
"learning_rate": 3.3962264150943395e-06, |
|
"loss": 0.0, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 70.29, |
|
"grad_norm": 0.002971038920804858, |
|
"learning_rate": 3.379380053908356e-06, |
|
"loss": 0.0, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 70.44, |
|
"grad_norm": 0.002593262353911996, |
|
"learning_rate": 3.3625336927223724e-06, |
|
"loss": 0.0, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 70.59, |
|
"grad_norm": 0.0025443334598094225, |
|
"learning_rate": 3.3456873315363886e-06, |
|
"loss": 0.0, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 70.74, |
|
"grad_norm": 0.002275052247568965, |
|
"learning_rate": 3.3288409703504044e-06, |
|
"loss": 0.0, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 70.88, |
|
"grad_norm": 0.0028011808171868324, |
|
"learning_rate": 3.3119946091644206e-06, |
|
"loss": 0.0, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 71.03, |
|
"grad_norm": 0.0028713145293295383, |
|
"learning_rate": 3.2951482479784368e-06, |
|
"loss": 0.0, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 71.18, |
|
"grad_norm": 0.00250536622479558, |
|
"learning_rate": 3.2783018867924534e-06, |
|
"loss": 0.0, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 71.32, |
|
"grad_norm": 0.002349557587876916, |
|
"learning_rate": 3.2614555256064696e-06, |
|
"loss": 0.0, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 71.47, |
|
"grad_norm": 0.002672821283340454, |
|
"learning_rate": 3.2446091644204854e-06, |
|
"loss": 0.0, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 71.62, |
|
"grad_norm": 0.002757046138867736, |
|
"learning_rate": 3.2277628032345016e-06, |
|
"loss": 0.0, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 71.76, |
|
"grad_norm": 0.002534884260967374, |
|
"learning_rate": 3.210916442048518e-06, |
|
"loss": 0.0, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 71.91, |
|
"grad_norm": 0.0023479722440242767, |
|
"learning_rate": 3.194070080862534e-06, |
|
"loss": 0.0, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 72.06, |
|
"grad_norm": 0.0025629138108342886, |
|
"learning_rate": 3.1772237196765498e-06, |
|
"loss": 0.0, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 72.21, |
|
"grad_norm": 0.002501903334632516, |
|
"learning_rate": 3.160377358490566e-06, |
|
"loss": 0.0, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 72.35, |
|
"grad_norm": 0.002238241257146001, |
|
"learning_rate": 3.1435309973045826e-06, |
|
"loss": 0.0, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 72.5, |
|
"grad_norm": 0.0019617443904280663, |
|
"learning_rate": 3.126684636118599e-06, |
|
"loss": 0.0, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 72.65, |
|
"grad_norm": 0.0025888625532388687, |
|
"learning_rate": 3.109838274932615e-06, |
|
"loss": 0.0, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 72.79, |
|
"grad_norm": 0.0022458680905401707, |
|
"learning_rate": 3.092991913746631e-06, |
|
"loss": 0.0, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 72.94, |
|
"grad_norm": 0.0028502140194177628, |
|
"learning_rate": 3.076145552560647e-06, |
|
"loss": 0.0, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 73.09, |
|
"grad_norm": 0.0024815101642161608, |
|
"learning_rate": 3.0592991913746632e-06, |
|
"loss": 0.0, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 73.24, |
|
"grad_norm": 0.0020565763115882874, |
|
"learning_rate": 3.04245283018868e-06, |
|
"loss": 0.0, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 73.38, |
|
"grad_norm": 0.0019868898671120405, |
|
"learning_rate": 3.025606469002695e-06, |
|
"loss": 0.0, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 73.53, |
|
"grad_norm": 0.002612064126878977, |
|
"learning_rate": 3.008760107816712e-06, |
|
"loss": 0.0, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 73.53, |
|
"eval_loss": 0.5392932891845703, |
|
"eval_runtime": 438.3941, |
|
"eval_samples_per_second": 1.72, |
|
"eval_steps_per_second": 0.018, |
|
"eval_wer": 39.901185770750985, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 73.68, |
|
"grad_norm": 0.0019985686521977186, |
|
"learning_rate": 2.991913746630728e-06, |
|
"loss": 0.0, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 73.82, |
|
"grad_norm": 0.0021473197266459465, |
|
"learning_rate": 2.9750673854447442e-06, |
|
"loss": 0.0, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 73.97, |
|
"grad_norm": 0.0023225159384310246, |
|
"learning_rate": 2.9582210242587605e-06, |
|
"loss": 0.0, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 74.12, |
|
"grad_norm": 0.0022743339650332928, |
|
"learning_rate": 2.9413746630727762e-06, |
|
"loss": 0.0, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 74.26, |
|
"grad_norm": 0.002134987385943532, |
|
"learning_rate": 2.9245283018867924e-06, |
|
"loss": 0.0, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 74.41, |
|
"grad_norm": 0.0021126819774508476, |
|
"learning_rate": 2.907681940700809e-06, |
|
"loss": 0.0, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 74.56, |
|
"grad_norm": 0.002264765091240406, |
|
"learning_rate": 2.8908355795148253e-06, |
|
"loss": 0.0, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 74.71, |
|
"grad_norm": 0.0028144866228103638, |
|
"learning_rate": 2.8739892183288415e-06, |
|
"loss": 0.0, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 74.85, |
|
"grad_norm": 0.0017289798706769943, |
|
"learning_rate": 2.8571428571428573e-06, |
|
"loss": 0.0, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 75.0, |
|
"grad_norm": 0.00249221152625978, |
|
"learning_rate": 2.8402964959568735e-06, |
|
"loss": 0.0, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 75.15, |
|
"grad_norm": 0.0018814760260283947, |
|
"learning_rate": 2.8234501347708897e-06, |
|
"loss": 0.0, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 75.29, |
|
"grad_norm": 0.0022003494668751955, |
|
"learning_rate": 2.8066037735849063e-06, |
|
"loss": 0.0, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 75.44, |
|
"grad_norm": 0.0023006245028227568, |
|
"learning_rate": 2.7897574123989217e-06, |
|
"loss": 0.0, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 75.59, |
|
"grad_norm": 0.0018226418178528547, |
|
"learning_rate": 2.7729110512129383e-06, |
|
"loss": 0.0, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 75.74, |
|
"grad_norm": 0.0019806609489023685, |
|
"learning_rate": 2.7560646900269545e-06, |
|
"loss": 0.0, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 75.88, |
|
"grad_norm": 0.0021330551244318485, |
|
"learning_rate": 2.7392183288409707e-06, |
|
"loss": 0.0, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 76.03, |
|
"grad_norm": 0.0021852573845535517, |
|
"learning_rate": 2.722371967654987e-06, |
|
"loss": 0.0, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 76.18, |
|
"grad_norm": 0.0021261894144117832, |
|
"learning_rate": 2.7055256064690027e-06, |
|
"loss": 0.0, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 76.32, |
|
"grad_norm": 0.0022390589583665133, |
|
"learning_rate": 2.688679245283019e-06, |
|
"loss": 0.0, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 76.47, |
|
"grad_norm": 0.002223338931798935, |
|
"learning_rate": 2.6718328840970355e-06, |
|
"loss": 0.0, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 76.62, |
|
"grad_norm": 0.0019250946352258325, |
|
"learning_rate": 2.6549865229110517e-06, |
|
"loss": 0.0, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 76.76, |
|
"grad_norm": 0.001935865031555295, |
|
"learning_rate": 2.6381401617250675e-06, |
|
"loss": 0.0, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 76.91, |
|
"grad_norm": 0.0018222652142867446, |
|
"learning_rate": 2.6212938005390837e-06, |
|
"loss": 0.0, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 77.06, |
|
"grad_norm": 0.0015805804869160056, |
|
"learning_rate": 2.6044474393531e-06, |
|
"loss": 0.0, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 77.21, |
|
"grad_norm": 0.0018061785958707333, |
|
"learning_rate": 2.587601078167116e-06, |
|
"loss": 0.0, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 77.35, |
|
"grad_norm": 0.0019492105348035693, |
|
"learning_rate": 2.5707547169811327e-06, |
|
"loss": 0.0, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 77.5, |
|
"grad_norm": 0.0020225332118570805, |
|
"learning_rate": 2.553908355795148e-06, |
|
"loss": 0.0, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 77.65, |
|
"grad_norm": 0.0021102512255311012, |
|
"learning_rate": 2.5370619946091647e-06, |
|
"loss": 0.0, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 77.79, |
|
"grad_norm": 0.001952395774424076, |
|
"learning_rate": 2.520215633423181e-06, |
|
"loss": 0.0, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 77.94, |
|
"grad_norm": 0.0022354356478899717, |
|
"learning_rate": 2.503369272237197e-06, |
|
"loss": 0.0, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 78.09, |
|
"grad_norm": 0.0015943964244797826, |
|
"learning_rate": 2.4865229110512133e-06, |
|
"loss": 0.0, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 78.24, |
|
"grad_norm": 0.00202794186770916, |
|
"learning_rate": 2.469676549865229e-06, |
|
"loss": 0.0, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 78.38, |
|
"grad_norm": 0.002233139704912901, |
|
"learning_rate": 2.4528301886792453e-06, |
|
"loss": 0.0, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 78.53, |
|
"grad_norm": 0.0019945132080465555, |
|
"learning_rate": 2.435983827493262e-06, |
|
"loss": 0.0, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 78.68, |
|
"grad_norm": 0.001916647655889392, |
|
"learning_rate": 2.4191374663072777e-06, |
|
"loss": 0.0, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 78.82, |
|
"grad_norm": 0.0020198444835841656, |
|
"learning_rate": 2.402291105121294e-06, |
|
"loss": 0.0, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 78.97, |
|
"grad_norm": 0.0019901052583009005, |
|
"learning_rate": 2.38544474393531e-06, |
|
"loss": 0.0, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 79.12, |
|
"grad_norm": 0.0020235173869878054, |
|
"learning_rate": 2.3685983827493263e-06, |
|
"loss": 0.0, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 79.26, |
|
"grad_norm": 0.0018330395687371492, |
|
"learning_rate": 2.3517520215633426e-06, |
|
"loss": 0.0, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 79.41, |
|
"grad_norm": 0.0023276926949620247, |
|
"learning_rate": 2.3349056603773588e-06, |
|
"loss": 0.0, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 79.56, |
|
"grad_norm": 0.0019117222400382161, |
|
"learning_rate": 2.3180592991913745e-06, |
|
"loss": 0.0, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 79.71, |
|
"grad_norm": 0.0019178889924660325, |
|
"learning_rate": 2.301212938005391e-06, |
|
"loss": 0.0, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 79.85, |
|
"grad_norm": 0.002448749728500843, |
|
"learning_rate": 2.2843665768194074e-06, |
|
"loss": 0.0, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 80.0, |
|
"grad_norm": 0.0022572469897568226, |
|
"learning_rate": 2.267520215633423e-06, |
|
"loss": 0.0, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 80.15, |
|
"grad_norm": 0.001967059215530753, |
|
"learning_rate": 2.2506738544474398e-06, |
|
"loss": 0.0, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 80.29, |
|
"grad_norm": 0.0022357290145009756, |
|
"learning_rate": 2.2338274932614556e-06, |
|
"loss": 0.0, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 80.44, |
|
"grad_norm": 0.002172647975385189, |
|
"learning_rate": 2.2169811320754718e-06, |
|
"loss": 0.0, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 80.59, |
|
"grad_norm": 0.0016459524631500244, |
|
"learning_rate": 2.200134770889488e-06, |
|
"loss": 0.0, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 80.74, |
|
"grad_norm": 0.00204639439471066, |
|
"learning_rate": 2.183288409703504e-06, |
|
"loss": 0.0, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 80.88, |
|
"grad_norm": 0.0017498856177553535, |
|
"learning_rate": 2.1664420485175204e-06, |
|
"loss": 0.0, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 80.88, |
|
"eval_loss": 0.5448271632194519, |
|
"eval_runtime": 430.4314, |
|
"eval_samples_per_second": 1.752, |
|
"eval_steps_per_second": 0.019, |
|
"eval_wer": 39.66403162055336, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 81.03, |
|
"grad_norm": 0.0018372549675405025, |
|
"learning_rate": 2.1495956873315366e-06, |
|
"loss": 0.0, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 81.18, |
|
"grad_norm": 0.0016495827585458755, |
|
"learning_rate": 2.132749326145553e-06, |
|
"loss": 0.0, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 81.32, |
|
"grad_norm": 0.0017999428091570735, |
|
"learning_rate": 2.115902964959569e-06, |
|
"loss": 0.0, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 81.47, |
|
"grad_norm": 0.0018734214827418327, |
|
"learning_rate": 2.099056603773585e-06, |
|
"loss": 0.0, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 81.62, |
|
"grad_norm": 0.0016185399144887924, |
|
"learning_rate": 2.082210242587601e-06, |
|
"loss": 0.0, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 81.76, |
|
"grad_norm": 0.001710501266643405, |
|
"learning_rate": 2.0653638814016176e-06, |
|
"loss": 0.0, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 81.91, |
|
"grad_norm": 0.002252147998660803, |
|
"learning_rate": 2.0485175202156334e-06, |
|
"loss": 0.0, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 82.06, |
|
"grad_norm": 0.0018945408519357443, |
|
"learning_rate": 2.0316711590296496e-06, |
|
"loss": 0.0, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 82.21, |
|
"grad_norm": 0.0016931135905906558, |
|
"learning_rate": 2.014824797843666e-06, |
|
"loss": 0.0, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 82.35, |
|
"grad_norm": 0.002079603960737586, |
|
"learning_rate": 1.997978436657682e-06, |
|
"loss": 0.0, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 82.5, |
|
"grad_norm": 0.0014379196800291538, |
|
"learning_rate": 1.981132075471698e-06, |
|
"loss": 0.0, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 82.65, |
|
"grad_norm": 0.0017873428296297789, |
|
"learning_rate": 1.9642857142857144e-06, |
|
"loss": 0.0, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 82.79, |
|
"grad_norm": 0.0016607646830379963, |
|
"learning_rate": 1.9474393530997306e-06, |
|
"loss": 0.0, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 82.94, |
|
"grad_norm": 0.0018578683957457542, |
|
"learning_rate": 1.930592991913747e-06, |
|
"loss": 0.0, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 83.09, |
|
"grad_norm": 0.001992453821003437, |
|
"learning_rate": 1.913746630727763e-06, |
|
"loss": 0.0, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 83.24, |
|
"grad_norm": 0.001795714139007032, |
|
"learning_rate": 1.896900269541779e-06, |
|
"loss": 0.0, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 83.38, |
|
"grad_norm": 0.0017513408092781901, |
|
"learning_rate": 1.8800539083557952e-06, |
|
"loss": 0.0, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 83.53, |
|
"grad_norm": 0.001984973670914769, |
|
"learning_rate": 1.8632075471698114e-06, |
|
"loss": 0.0, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 83.68, |
|
"grad_norm": 0.001725591835565865, |
|
"learning_rate": 1.8463611859838276e-06, |
|
"loss": 0.0, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 83.82, |
|
"grad_norm": 0.001957892207428813, |
|
"learning_rate": 1.8295148247978438e-06, |
|
"loss": 0.0, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 83.97, |
|
"grad_norm": 0.0014876240165904164, |
|
"learning_rate": 1.8126684636118598e-06, |
|
"loss": 0.0, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 84.12, |
|
"grad_norm": 0.0017114444635808468, |
|
"learning_rate": 1.7958221024258763e-06, |
|
"loss": 0.0, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 84.26, |
|
"grad_norm": 0.0018760694656521082, |
|
"learning_rate": 1.7789757412398922e-06, |
|
"loss": 0.0, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 84.41, |
|
"grad_norm": 0.001803001156076789, |
|
"learning_rate": 1.7621293800539085e-06, |
|
"loss": 0.0, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 84.56, |
|
"grad_norm": 0.0018211943097412586, |
|
"learning_rate": 1.7452830188679247e-06, |
|
"loss": 0.0, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 84.71, |
|
"grad_norm": 0.001698785461485386, |
|
"learning_rate": 1.7284366576819409e-06, |
|
"loss": 0.0, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 84.85, |
|
"grad_norm": 0.00188141327816993, |
|
"learning_rate": 1.711590296495957e-06, |
|
"loss": 0.0, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 85.0, |
|
"grad_norm": 0.0018102971371263266, |
|
"learning_rate": 1.6947439353099733e-06, |
|
"loss": 0.0, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 85.15, |
|
"grad_norm": 0.002033288823440671, |
|
"learning_rate": 1.6778975741239895e-06, |
|
"loss": 0.0, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 85.29, |
|
"grad_norm": 0.0015100777382031083, |
|
"learning_rate": 1.6610512129380055e-06, |
|
"loss": 0.0, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 85.44, |
|
"grad_norm": 0.0018259905045852065, |
|
"learning_rate": 1.6442048517520217e-06, |
|
"loss": 0.0, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 85.59, |
|
"grad_norm": 0.0018028096528723836, |
|
"learning_rate": 1.6273584905660379e-06, |
|
"loss": 0.0, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 85.74, |
|
"grad_norm": 0.0016898865578696132, |
|
"learning_rate": 1.610512129380054e-06, |
|
"loss": 0.0, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 85.88, |
|
"grad_norm": 0.0020932299084961414, |
|
"learning_rate": 1.59366576819407e-06, |
|
"loss": 0.0, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 86.03, |
|
"grad_norm": 0.0018589514074847102, |
|
"learning_rate": 1.5768194070080865e-06, |
|
"loss": 0.0, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 86.18, |
|
"grad_norm": 0.001960631925612688, |
|
"learning_rate": 1.5599730458221027e-06, |
|
"loss": 0.0, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 86.32, |
|
"grad_norm": 0.0015809714095667005, |
|
"learning_rate": 1.5431266846361187e-06, |
|
"loss": 0.0, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 86.47, |
|
"grad_norm": 0.0015852567739784718, |
|
"learning_rate": 1.5262803234501349e-06, |
|
"loss": 0.0, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 86.62, |
|
"grad_norm": 0.001482869847677648, |
|
"learning_rate": 1.509433962264151e-06, |
|
"loss": 0.0, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 86.76, |
|
"grad_norm": 0.00168868328910321, |
|
"learning_rate": 1.4925876010781673e-06, |
|
"loss": 0.0, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 86.91, |
|
"grad_norm": 0.0017209333600476384, |
|
"learning_rate": 1.4757412398921833e-06, |
|
"loss": 0.0, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 87.06, |
|
"grad_norm": 0.0015247148694470525, |
|
"learning_rate": 1.4588948787061997e-06, |
|
"loss": 0.0, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 87.21, |
|
"grad_norm": 0.0020303872879594564, |
|
"learning_rate": 1.4420485175202157e-06, |
|
"loss": 0.0, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 87.35, |
|
"grad_norm": 0.001518064527772367, |
|
"learning_rate": 1.425202156334232e-06, |
|
"loss": 0.0, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 87.5, |
|
"grad_norm": 0.0015475251711905003, |
|
"learning_rate": 1.4083557951482481e-06, |
|
"loss": 0.0, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 87.65, |
|
"grad_norm": 0.0014926763251423836, |
|
"learning_rate": 1.3915094339622643e-06, |
|
"loss": 0.0, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 87.79, |
|
"grad_norm": 0.0014638776192441583, |
|
"learning_rate": 1.3746630727762805e-06, |
|
"loss": 0.0, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 87.94, |
|
"grad_norm": 0.0015593844000250101, |
|
"learning_rate": 1.3578167115902965e-06, |
|
"loss": 0.0, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 88.09, |
|
"grad_norm": 0.001806886401027441, |
|
"learning_rate": 1.340970350404313e-06, |
|
"loss": 0.0, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 88.24, |
|
"grad_norm": 0.001656343461945653, |
|
"learning_rate": 1.324123989218329e-06, |
|
"loss": 0.0, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 88.24, |
|
"eval_loss": 0.5489587783813477, |
|
"eval_runtime": 428.0298, |
|
"eval_samples_per_second": 1.762, |
|
"eval_steps_per_second": 0.019, |
|
"eval_wer": 39.565217391304344, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 88.38, |
|
"grad_norm": 0.0019978652708232403, |
|
"learning_rate": 1.3072776280323451e-06, |
|
"loss": 0.0, |
|
"step": 6010 |
|
}, |
|
{ |
|
"epoch": 88.53, |
|
"grad_norm": 0.0016022637719288468, |
|
"learning_rate": 1.2904312668463611e-06, |
|
"loss": 0.0, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 88.68, |
|
"grad_norm": 0.0016824258491396904, |
|
"learning_rate": 1.2735849056603775e-06, |
|
"loss": 0.0, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 88.82, |
|
"grad_norm": 0.0017654020339250565, |
|
"learning_rate": 1.2567385444743937e-06, |
|
"loss": 0.0, |
|
"step": 6040 |
|
}, |
|
{ |
|
"epoch": 88.97, |
|
"grad_norm": 0.001638589659705758, |
|
"learning_rate": 1.2398921832884097e-06, |
|
"loss": 0.0, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 89.12, |
|
"grad_norm": 0.001573295914568007, |
|
"learning_rate": 1.223045822102426e-06, |
|
"loss": 0.0, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 89.26, |
|
"grad_norm": 0.0014727225061506033, |
|
"learning_rate": 1.2061994609164422e-06, |
|
"loss": 0.0, |
|
"step": 6070 |
|
}, |
|
{ |
|
"epoch": 89.41, |
|
"grad_norm": 0.0016162623651325703, |
|
"learning_rate": 1.1893530997304584e-06, |
|
"loss": 0.0, |
|
"step": 6080 |
|
}, |
|
{ |
|
"epoch": 89.56, |
|
"grad_norm": 0.0016934397863224149, |
|
"learning_rate": 1.1725067385444746e-06, |
|
"loss": 0.0, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 89.71, |
|
"grad_norm": 0.0017905846470966935, |
|
"learning_rate": 1.1556603773584908e-06, |
|
"loss": 0.0, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 89.85, |
|
"grad_norm": 0.001646560151129961, |
|
"learning_rate": 1.1388140161725068e-06, |
|
"loss": 0.0, |
|
"step": 6110 |
|
}, |
|
{ |
|
"epoch": 90.0, |
|
"grad_norm": 0.001669664983637631, |
|
"learning_rate": 1.121967654986523e-06, |
|
"loss": 0.0, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 90.15, |
|
"grad_norm": 0.0015495212282985449, |
|
"learning_rate": 1.1051212938005392e-06, |
|
"loss": 0.0, |
|
"step": 6130 |
|
}, |
|
{ |
|
"epoch": 90.29, |
|
"grad_norm": 0.0015938329743221402, |
|
"learning_rate": 1.0882749326145554e-06, |
|
"loss": 0.0, |
|
"step": 6140 |
|
}, |
|
{ |
|
"epoch": 90.44, |
|
"grad_norm": 0.0019517322070896626, |
|
"learning_rate": 1.0714285714285714e-06, |
|
"loss": 0.0, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 90.59, |
|
"grad_norm": 0.0015430712373927236, |
|
"learning_rate": 1.0545822102425878e-06, |
|
"loss": 0.0, |
|
"step": 6160 |
|
}, |
|
{ |
|
"epoch": 90.74, |
|
"grad_norm": 0.0016941017238423228, |
|
"learning_rate": 1.037735849056604e-06, |
|
"loss": 0.0, |
|
"step": 6170 |
|
}, |
|
{ |
|
"epoch": 90.88, |
|
"grad_norm": 0.001742173102684319, |
|
"learning_rate": 1.02088948787062e-06, |
|
"loss": 0.0, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 91.03, |
|
"grad_norm": 0.0018419664120301604, |
|
"learning_rate": 1.0040431266846362e-06, |
|
"loss": 0.0, |
|
"step": 6190 |
|
}, |
|
{ |
|
"epoch": 91.18, |
|
"grad_norm": 0.0017677543219178915, |
|
"learning_rate": 9.871967654986524e-07, |
|
"loss": 0.0, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 91.32, |
|
"grad_norm": 0.0016228326130658388, |
|
"learning_rate": 9.703504043126686e-07, |
|
"loss": 0.0, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 91.47, |
|
"grad_norm": 0.0014178988058120012, |
|
"learning_rate": 9.535040431266847e-07, |
|
"loss": 0.0, |
|
"step": 6220 |
|
}, |
|
{ |
|
"epoch": 91.62, |
|
"grad_norm": 0.0019479967886582017, |
|
"learning_rate": 9.366576819407008e-07, |
|
"loss": 0.0, |
|
"step": 6230 |
|
}, |
|
{ |
|
"epoch": 91.76, |
|
"grad_norm": 0.0013647832674905658, |
|
"learning_rate": 9.19811320754717e-07, |
|
"loss": 0.0, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 91.91, |
|
"grad_norm": 0.001629057340323925, |
|
"learning_rate": 9.029649595687333e-07, |
|
"loss": 0.0, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 92.06, |
|
"grad_norm": 0.0017082348931580782, |
|
"learning_rate": 8.861185983827494e-07, |
|
"loss": 0.0, |
|
"step": 6260 |
|
}, |
|
{ |
|
"epoch": 92.21, |
|
"grad_norm": 0.0017187732737511396, |
|
"learning_rate": 8.692722371967656e-07, |
|
"loss": 0.0, |
|
"step": 6270 |
|
}, |
|
{ |
|
"epoch": 92.35, |
|
"grad_norm": 0.0015693089226260781, |
|
"learning_rate": 8.524258760107817e-07, |
|
"loss": 0.0, |
|
"step": 6280 |
|
}, |
|
{ |
|
"epoch": 92.5, |
|
"grad_norm": 0.0016504173399880528, |
|
"learning_rate": 8.355795148247979e-07, |
|
"loss": 0.0, |
|
"step": 6290 |
|
}, |
|
{ |
|
"epoch": 92.65, |
|
"grad_norm": 0.0014378475025296211, |
|
"learning_rate": 8.18733153638814e-07, |
|
"loss": 0.0, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 92.79, |
|
"grad_norm": 0.0018091071397066116, |
|
"learning_rate": 8.018867924528302e-07, |
|
"loss": 0.0, |
|
"step": 6310 |
|
}, |
|
{ |
|
"epoch": 92.94, |
|
"grad_norm": 0.0016386961797252297, |
|
"learning_rate": 7.850404312668463e-07, |
|
"loss": 0.0, |
|
"step": 6320 |
|
}, |
|
{ |
|
"epoch": 93.09, |
|
"grad_norm": 0.0017092525959014893, |
|
"learning_rate": 7.681940700808626e-07, |
|
"loss": 0.0, |
|
"step": 6330 |
|
}, |
|
{ |
|
"epoch": 93.24, |
|
"grad_norm": 0.0015886749606579542, |
|
"learning_rate": 7.513477088948788e-07, |
|
"loss": 0.0, |
|
"step": 6340 |
|
}, |
|
{ |
|
"epoch": 93.38, |
|
"grad_norm": 0.0018031731015071273, |
|
"learning_rate": 7.345013477088949e-07, |
|
"loss": 0.0, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 93.53, |
|
"grad_norm": 0.0015261101070791483, |
|
"learning_rate": 7.176549865229111e-07, |
|
"loss": 0.0, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 93.68, |
|
"grad_norm": 0.001621345872990787, |
|
"learning_rate": 7.008086253369272e-07, |
|
"loss": 0.0, |
|
"step": 6370 |
|
}, |
|
{ |
|
"epoch": 93.82, |
|
"grad_norm": 0.0018881525611504912, |
|
"learning_rate": 6.839622641509434e-07, |
|
"loss": 0.0, |
|
"step": 6380 |
|
}, |
|
{ |
|
"epoch": 93.97, |
|
"grad_norm": 0.0014266808284446597, |
|
"learning_rate": 6.671159029649596e-07, |
|
"loss": 0.0, |
|
"step": 6390 |
|
}, |
|
{ |
|
"epoch": 94.12, |
|
"grad_norm": 0.0017150483326986432, |
|
"learning_rate": 6.502695417789757e-07, |
|
"loss": 0.0, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 94.26, |
|
"grad_norm": 0.001493273302912712, |
|
"learning_rate": 6.33423180592992e-07, |
|
"loss": 0.0, |
|
"step": 6410 |
|
}, |
|
{ |
|
"epoch": 94.41, |
|
"grad_norm": 0.0016202511033043265, |
|
"learning_rate": 6.165768194070082e-07, |
|
"loss": 0.0, |
|
"step": 6420 |
|
}, |
|
{ |
|
"epoch": 94.56, |
|
"grad_norm": 0.001579651259817183, |
|
"learning_rate": 5.997304582210243e-07, |
|
"loss": 0.0, |
|
"step": 6430 |
|
}, |
|
{ |
|
"epoch": 94.71, |
|
"grad_norm": 0.0014305037911981344, |
|
"learning_rate": 5.828840970350405e-07, |
|
"loss": 0.0, |
|
"step": 6440 |
|
}, |
|
{ |
|
"epoch": 94.85, |
|
"grad_norm": 0.0015384092694148421, |
|
"learning_rate": 5.660377358490567e-07, |
|
"loss": 0.0, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 95.0, |
|
"grad_norm": 0.0018785832216963172, |
|
"learning_rate": 5.491913746630729e-07, |
|
"loss": 0.0, |
|
"step": 6460 |
|
}, |
|
{ |
|
"epoch": 95.15, |
|
"grad_norm": 0.001387153286486864, |
|
"learning_rate": 5.32345013477089e-07, |
|
"loss": 0.0, |
|
"step": 6470 |
|
}, |
|
{ |
|
"epoch": 95.29, |
|
"grad_norm": 0.0013170434394851327, |
|
"learning_rate": 5.154986522911052e-07, |
|
"loss": 0.0, |
|
"step": 6480 |
|
}, |
|
{ |
|
"epoch": 95.44, |
|
"grad_norm": 0.001672849990427494, |
|
"learning_rate": 4.986522911051214e-07, |
|
"loss": 0.0, |
|
"step": 6490 |
|
}, |
|
{ |
|
"epoch": 95.59, |
|
"grad_norm": 0.0014924348797649145, |
|
"learning_rate": 4.818059299191375e-07, |
|
"loss": 0.0, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 95.59, |
|
"eval_loss": 0.5511065125465393, |
|
"eval_runtime": 393.4846, |
|
"eval_samples_per_second": 1.916, |
|
"eval_steps_per_second": 0.02, |
|
"eval_wer": 39.565217391304344, |
|
"step": 6500 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 6784, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 100, |
|
"save_steps": 500, |
|
"total_flos": 1.87169276012544e+20, |
|
"train_batch_size": 100, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|