|
{ |
|
"best_metric": 0.7016984883693075, |
|
"best_model_checkpoint": "wav2vec2-base-mirst500/checkpoint-6520", |
|
"epoch": 4.999616711383672, |
|
"global_step": 6520, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6012269938650306e-07, |
|
"loss": 2.0823, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.202453987730061e-07, |
|
"loss": 2.0814, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.3803680981595093e-06, |
|
"loss": 2.0803, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.8404907975460122e-06, |
|
"loss": 2.0788, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.3006134969325154e-06, |
|
"loss": 2.0776, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.7607361963190186e-06, |
|
"loss": 2.0731, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.2208588957055217e-06, |
|
"loss": 2.0728, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.6809815950920245e-06, |
|
"loss": 2.0645, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.141104294478528e-06, |
|
"loss": 2.0645, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.601226993865031e-06, |
|
"loss": 2.0514, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.061349693251534e-06, |
|
"loss": 2.0403, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 5.521472392638037e-06, |
|
"loss": 2.0268, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 5.981595092024539e-06, |
|
"loss": 2.0036, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 6.4417177914110434e-06, |
|
"loss": 2.0021, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 6.901840490797547e-06, |
|
"loss": 1.9849, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 7.361963190184049e-06, |
|
"loss": 1.9574, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 7.822085889570552e-06, |
|
"loss": 1.9671, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 8.282208588957055e-06, |
|
"loss": 1.9527, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 8.742331288343558e-06, |
|
"loss": 1.9551, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.202453987730062e-06, |
|
"loss": 1.9561, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.662576687116563e-06, |
|
"loss": 1.9289, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.0122699386503068e-05, |
|
"loss": 1.9388, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.0582822085889571e-05, |
|
"loss": 1.9122, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.1042944785276074e-05, |
|
"loss": 1.8831, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.1503067484662577e-05, |
|
"loss": 1.9166, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.1963190184049079e-05, |
|
"loss": 1.8841, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.2423312883435584e-05, |
|
"loss": 1.8977, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.2883435582822087e-05, |
|
"loss": 1.9011, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.3343558282208588e-05, |
|
"loss": 1.8778, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.3803680981595093e-05, |
|
"loss": 1.8782, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.4263803680981595e-05, |
|
"loss": 1.8568, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.4723926380368098e-05, |
|
"loss": 1.8845, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.5184049079754601e-05, |
|
"loss": 1.852, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.5598159509202456e-05, |
|
"loss": 1.8479, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.6058282208588957e-05, |
|
"loss": 1.8345, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.6518404907975462e-05, |
|
"loss": 1.8518, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.6978527607361963e-05, |
|
"loss": 1.8653, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.7438650306748465e-05, |
|
"loss": 1.837, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.789877300613497e-05, |
|
"loss": 1.8058, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.8358895705521475e-05, |
|
"loss": 1.8301, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.8819018404907976e-05, |
|
"loss": 1.8287, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9279141104294478e-05, |
|
"loss": 1.7848, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9739263803680982e-05, |
|
"loss": 1.7821, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.0199386503067484e-05, |
|
"loss": 1.7985, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.065950920245399e-05, |
|
"loss": 1.762, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.1119631901840494e-05, |
|
"loss": 1.7812, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.1579754601226992e-05, |
|
"loss": 1.7677, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.2039877300613497e-05, |
|
"loss": 1.7733, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.25e-05, |
|
"loss": 1.7699, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.2960122699386503e-05, |
|
"loss": 1.7429, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.3420245398773008e-05, |
|
"loss": 1.6911, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.388036809815951e-05, |
|
"loss": 1.6975, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.434049079754601e-05, |
|
"loss": 1.7233, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.4800613496932516e-05, |
|
"loss": 1.6838, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.526073619631902e-05, |
|
"loss": 1.6709, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.5720858895705522e-05, |
|
"loss": 1.7078, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.6180981595092023e-05, |
|
"loss": 1.6882, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.6641104294478528e-05, |
|
"loss": 1.6386, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.7101226993865033e-05, |
|
"loss": 1.6315, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.7561349693251535e-05, |
|
"loss": 1.5773, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.802147239263804e-05, |
|
"loss": 1.6123, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.848159509202454e-05, |
|
"loss": 1.6244, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.8941717791411042e-05, |
|
"loss": 1.5473, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.9401840490797547e-05, |
|
"loss": 1.5417, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.9861963190184052e-05, |
|
"loss": 1.5088, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.9964212678936606e-05, |
|
"loss": 1.51, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.9913087934560325e-05, |
|
"loss": 1.5141, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.9861963190184052e-05, |
|
"loss": 1.4878, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.9810838445807772e-05, |
|
"loss": 1.4722, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.9759713701431495e-05, |
|
"loss": 1.4969, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.9708588957055215e-05, |
|
"loss": 1.4691, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.9657464212678938e-05, |
|
"loss": 1.4537, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.9606339468302658e-05, |
|
"loss": 1.4395, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.955521472392638e-05, |
|
"loss": 1.4219, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.9504089979550104e-05, |
|
"loss": 1.421, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.9452965235173824e-05, |
|
"loss": 1.4472, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.9401840490797547e-05, |
|
"loss": 1.404, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 2.9350715746421267e-05, |
|
"loss": 1.3762, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 2.929959100204499e-05, |
|
"loss": 1.3499, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 2.924846625766871e-05, |
|
"loss": 1.387, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 2.9197341513292437e-05, |
|
"loss": 1.3872, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 2.9146216768916156e-05, |
|
"loss": 1.3594, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 2.909509202453988e-05, |
|
"loss": 1.3386, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 2.90439672801636e-05, |
|
"loss": 1.362, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 2.8992842535787323e-05, |
|
"loss": 1.4005, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 2.8941717791411042e-05, |
|
"loss": 1.2967, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 2.8890593047034765e-05, |
|
"loss": 1.3309, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 2.883946830265849e-05, |
|
"loss": 1.3563, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.8788343558282212e-05, |
|
"loss": 1.3469, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.873721881390593e-05, |
|
"loss": 1.3561, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.8686094069529655e-05, |
|
"loss": 1.3207, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.8634969325153375e-05, |
|
"loss": 1.327, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.8583844580777094e-05, |
|
"loss": 1.2817, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.8532719836400818e-05, |
|
"loss": 1.2823, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.848159509202454e-05, |
|
"loss": 1.2915, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 2.8430470347648264e-05, |
|
"loss": 1.2361, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 2.8379345603271984e-05, |
|
"loss": 1.2647, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 2.8328220858895707e-05, |
|
"loss": 1.2677, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.8277096114519427e-05, |
|
"loss": 1.2611, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.822597137014315e-05, |
|
"loss": 1.293, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.817484662576687e-05, |
|
"loss": 1.2216, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.8123721881390596e-05, |
|
"loss": 1.3012, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.8072597137014316e-05, |
|
"loss": 1.2432, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.802147239263804e-05, |
|
"loss": 1.2935, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.797034764826176e-05, |
|
"loss": 1.2375, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.7919222903885482e-05, |
|
"loss": 1.2617, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.7868098159509202e-05, |
|
"loss": 1.2519, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.7816973415132922e-05, |
|
"loss": 1.1971, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.776584867075665e-05, |
|
"loss": 1.2598, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.771472392638037e-05, |
|
"loss": 1.2187, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.766359918200409e-05, |
|
"loss": 1.2202, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.761247443762781e-05, |
|
"loss": 1.2007, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 2.7561349693251535e-05, |
|
"loss": 1.2117, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 2.7510224948875254e-05, |
|
"loss": 1.2165, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 2.7459100204498977e-05, |
|
"loss": 1.2141, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 2.74079754601227e-05, |
|
"loss": 1.2257, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 2.7356850715746424e-05, |
|
"loss": 1.1844, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 2.7305725971370144e-05, |
|
"loss": 1.229, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.7254601226993867e-05, |
|
"loss": 1.2189, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 2.7203476482617587e-05, |
|
"loss": 1.2027, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.715235173824131e-05, |
|
"loss": 1.2362, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.7101226993865033e-05, |
|
"loss": 1.1787, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.7050102249488753e-05, |
|
"loss": 1.2087, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.6998977505112476e-05, |
|
"loss": 1.1646, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.6947852760736196e-05, |
|
"loss": 1.2548, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.689672801635992e-05, |
|
"loss": 1.2269, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.684560327198364e-05, |
|
"loss": 1.1421, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.6794478527607362e-05, |
|
"loss": 1.2349, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.6743353783231085e-05, |
|
"loss": 1.1553, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 2.669222903885481e-05, |
|
"loss": 1.1999, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.5877392616726158, |
|
"eval_loss": 1.102908730506897, |
|
"eval_runtime": 301.1056, |
|
"eval_samples_per_second": 138.632, |
|
"eval_steps_per_second": 69.318, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 2.6641104294478528e-05, |
|
"loss": 1.2812, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 2.658997955010225e-05, |
|
"loss": 1.1977, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 2.653885480572597e-05, |
|
"loss": 1.1985, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 2.6487730061349694e-05, |
|
"loss": 1.1709, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 2.6441717791411043e-05, |
|
"loss": 1.2057, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 2.6390593047034766e-05, |
|
"loss": 1.1377, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 2.6339468302658486e-05, |
|
"loss": 1.1278, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 2.628834355828221e-05, |
|
"loss": 1.1997, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 2.623721881390593e-05, |
|
"loss": 1.121, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 2.6186094069529655e-05, |
|
"loss": 1.2115, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 2.6134969325153375e-05, |
|
"loss": 1.18, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 2.6083844580777098e-05, |
|
"loss": 1.1797, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 2.6032719836400818e-05, |
|
"loss": 1.1343, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 2.598159509202454e-05, |
|
"loss": 1.1437, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 2.593047034764826e-05, |
|
"loss": 1.1184, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 2.5879345603271984e-05, |
|
"loss": 1.1675, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 2.5828220858895707e-05, |
|
"loss": 1.171, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 2.577709611451943e-05, |
|
"loss": 1.1481, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 2.572597137014315e-05, |
|
"loss": 1.1237, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 2.5674846625766873e-05, |
|
"loss": 1.1696, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 2.5623721881390593e-05, |
|
"loss": 1.1376, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 2.5572597137014313e-05, |
|
"loss": 1.126, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 2.5521472392638036e-05, |
|
"loss": 1.1429, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 2.547034764826176e-05, |
|
"loss": 1.1271, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 2.5419222903885483e-05, |
|
"loss": 1.1538, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.5368098159509202e-05, |
|
"loss": 1.1844, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.5316973415132926e-05, |
|
"loss": 1.1682, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.5265848670756645e-05, |
|
"loss": 1.1432, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.521472392638037e-05, |
|
"loss": 1.1812, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.516359918200409e-05, |
|
"loss": 1.1812, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.5112474437627815e-05, |
|
"loss": 1.1207, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.5061349693251535e-05, |
|
"loss": 1.105, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.5010224948875258e-05, |
|
"loss": 1.1223, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.4959100204498978e-05, |
|
"loss": 1.0973, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.49079754601227e-05, |
|
"loss": 1.131, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.485685071574642e-05, |
|
"loss": 1.1277, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.4805725971370144e-05, |
|
"loss": 1.0796, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.4754601226993867e-05, |
|
"loss": 1.1054, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.4703476482617587e-05, |
|
"loss": 1.1159, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.465235173824131e-05, |
|
"loss": 1.0675, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.460122699386503e-05, |
|
"loss": 1.1283, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.4550102249488753e-05, |
|
"loss": 1.1507, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.4498977505112473e-05, |
|
"loss": 1.1587, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.44478527607362e-05, |
|
"loss": 1.1495, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.439672801635992e-05, |
|
"loss": 1.1109, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.4345603271983642e-05, |
|
"loss": 1.124, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.4294478527607362e-05, |
|
"loss": 1.1369, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.4243353783231085e-05, |
|
"loss": 1.1066, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.4192229038854805e-05, |
|
"loss": 1.1119, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.414110429447853e-05, |
|
"loss": 1.0885, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.408997955010225e-05, |
|
"loss": 1.1023, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.403885480572597e-05, |
|
"loss": 1.1624, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.3987730061349695e-05, |
|
"loss": 1.089, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.3936605316973414e-05, |
|
"loss": 1.1186, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.3885480572597138e-05, |
|
"loss": 1.0953, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.3834355828220857e-05, |
|
"loss": 1.1036, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.378323108384458e-05, |
|
"loss": 1.0797, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.3732106339468304e-05, |
|
"loss": 1.1168, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.3680981595092027e-05, |
|
"loss": 1.1152, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.3629856850715747e-05, |
|
"loss": 1.0968, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.357873210633947e-05, |
|
"loss": 1.1493, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.352760736196319e-05, |
|
"loss": 1.0944, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.3476482617586913e-05, |
|
"loss": 1.0999, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.3425357873210633e-05, |
|
"loss": 1.072, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.337423312883436e-05, |
|
"loss": 1.1158, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.332310838445808e-05, |
|
"loss": 1.082, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.32719836400818e-05, |
|
"loss": 1.058, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.3220858895705522e-05, |
|
"loss": 1.1094, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.3169734151329242e-05, |
|
"loss": 1.0677, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.3118609406952965e-05, |
|
"loss": 1.0579, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.3067484662576688e-05, |
|
"loss": 1.0428, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.301635991820041e-05, |
|
"loss": 1.0322, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.296523517382413e-05, |
|
"loss": 1.083, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.2914110429447854e-05, |
|
"loss": 1.0952, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.2862985685071574e-05, |
|
"loss": 1.0667, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.2811860940695297e-05, |
|
"loss": 1.0549, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.2760736196319017e-05, |
|
"loss": 1.0916, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.2709611451942744e-05, |
|
"loss": 1.0978, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.2658486707566464e-05, |
|
"loss": 1.058, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.2607361963190187e-05, |
|
"loss": 1.1053, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.2556237218813907e-05, |
|
"loss": 1.1061, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.2505112474437626e-05, |
|
"loss": 1.0919, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.245398773006135e-05, |
|
"loss": 1.0499, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.240286298568507e-05, |
|
"loss": 1.0577, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.2351738241308796e-05, |
|
"loss": 1.0188, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.2300613496932516e-05, |
|
"loss": 1.0922, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.224948875255624e-05, |
|
"loss": 1.0659, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.219836400817996e-05, |
|
"loss": 1.0656, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.2147239263803682e-05, |
|
"loss": 1.0858, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.2096114519427402e-05, |
|
"loss": 1.0432, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.2044989775051125e-05, |
|
"loss": 1.0386, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.1993865030674848e-05, |
|
"loss": 1.0843, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.194274028629857e-05, |
|
"loss": 1.1272, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.189161554192229e-05, |
|
"loss": 1.0712, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.1840490797546014e-05, |
|
"loss": 0.9976, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.1789366053169734e-05, |
|
"loss": 1.0925, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.1738241308793454e-05, |
|
"loss": 1.0628, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.1687116564417177e-05, |
|
"loss": 1.0742, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.16359918200409e-05, |
|
"loss": 1.058, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.1584867075664623e-05, |
|
"loss": 1.033, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.1533742331288343e-05, |
|
"loss": 1.0682, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.1482617586912066e-05, |
|
"loss": 1.1028, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.1431492842535786e-05, |
|
"loss": 1.0428, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.138036809815951e-05, |
|
"loss": 1.0432, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 2.1329243353783233e-05, |
|
"loss": 1.0666, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 2.1278118609406956e-05, |
|
"loss": 1.0205, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 2.1226993865030676e-05, |
|
"loss": 1.0398, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 2.11758691206544e-05, |
|
"loss": 1.0386, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 2.112474437627812e-05, |
|
"loss": 1.0642, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 2.1073619631901842e-05, |
|
"loss": 1.0635, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 2.102249488752556e-05, |
|
"loss": 1.0366, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 2.0971370143149285e-05, |
|
"loss": 1.0517, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 2.0920245398773008e-05, |
|
"loss": 1.0518, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 2.0869120654396728e-05, |
|
"loss": 1.0715, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 2.081799591002045e-05, |
|
"loss": 1.0363, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 2.076687116564417e-05, |
|
"loss": 1.0384, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 2.0715746421267894e-05, |
|
"loss": 1.0585, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 2.0664621676891614e-05, |
|
"loss": 1.0972, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 2.061349693251534e-05, |
|
"loss": 1.0248, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 2.056237218813906e-05, |
|
"loss": 1.0279, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 2.0511247443762783e-05, |
|
"loss": 1.0053, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 2.0460122699386503e-05, |
|
"loss": 1.0685, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 2.0408997955010226e-05, |
|
"loss": 1.0059, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 2.0357873210633946e-05, |
|
"loss": 1.029, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 2.030674846625767e-05, |
|
"loss": 1.0916, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 2.0255623721881392e-05, |
|
"loss": 1.0296, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 2.0204498977505112e-05, |
|
"loss": 1.0179, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 2.0153374233128835e-05, |
|
"loss": 1.0126, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 2.0102249488752555e-05, |
|
"loss": 1.0458, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 2.005112474437628e-05, |
|
"loss": 1.0779, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.6555350597705004, |
|
"eval_loss": 0.9455426335334778, |
|
"eval_runtime": 303.4096, |
|
"eval_samples_per_second": 137.58, |
|
"eval_steps_per_second": 68.792, |
|
"step": 2608 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.9999999999999998e-05, |
|
"loss": 1.1476, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.994887525562372e-05, |
|
"loss": 1.0322, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.9897750511247445e-05, |
|
"loss": 1.0394, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.9846625766871168e-05, |
|
"loss": 1.0319, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.9795501022494888e-05, |
|
"loss": 1.0061, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.974437627811861e-05, |
|
"loss": 1.0337, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.969325153374233e-05, |
|
"loss": 1.0778, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.9642126789366054e-05, |
|
"loss": 1.0107, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.9591002044989777e-05, |
|
"loss": 1.0841, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.95398773006135e-05, |
|
"loss": 1.0172, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.948875255623722e-05, |
|
"loss": 1.0077, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.943762781186094e-05, |
|
"loss": 1.0349, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.9386503067484663e-05, |
|
"loss": 0.9735, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.9335378323108383e-05, |
|
"loss": 1.0829, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.9284253578732106e-05, |
|
"loss": 1.0084, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.923312883435583e-05, |
|
"loss": 1.1453, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.9182004089979552e-05, |
|
"loss": 1.0449, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.9130879345603272e-05, |
|
"loss": 1.0324, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.9079754601226995e-05, |
|
"loss": 1.0473, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.9028629856850715e-05, |
|
"loss": 1.0417, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.8977505112474438e-05, |
|
"loss": 1.0275, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.8926380368098158e-05, |
|
"loss": 0.9963, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.8875255623721885e-05, |
|
"loss": 1.0089, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.8824130879345604e-05, |
|
"loss": 1.0995, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.8773006134969328e-05, |
|
"loss": 1.0087, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.8721881390593047e-05, |
|
"loss": 1.0147, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.8670756646216767e-05, |
|
"loss": 1.0342, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.861963190184049e-05, |
|
"loss": 1.0367, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.856850715746421e-05, |
|
"loss": 1.0202, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.8517382413087937e-05, |
|
"loss": 1.0607, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.8466257668711657e-05, |
|
"loss": 1.0357, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.841513292433538e-05, |
|
"loss": 1.0233, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.83640081799591e-05, |
|
"loss": 1.0346, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.8312883435582823e-05, |
|
"loss": 1.03, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.8261758691206543e-05, |
|
"loss": 0.9792, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.8210633946830266e-05, |
|
"loss": 1.0475, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.815950920245399e-05, |
|
"loss": 1.0535, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.8108384458077712e-05, |
|
"loss": 1.0164, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.8057259713701432e-05, |
|
"loss": 1.0234, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.8006134969325155e-05, |
|
"loss": 1.0178, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.7955010224948875e-05, |
|
"loss": 1.0093, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.7903885480572595e-05, |
|
"loss": 1.0081, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.785276073619632e-05, |
|
"loss": 0.9826, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.780163599182004e-05, |
|
"loss": 0.964, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.7750511247443764e-05, |
|
"loss": 0.9796, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.7699386503067484e-05, |
|
"loss": 1.0112, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.7648261758691207e-05, |
|
"loss": 0.9958, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.7597137014314927e-05, |
|
"loss": 1.0626, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.754601226993865e-05, |
|
"loss": 0.949, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.7494887525562373e-05, |
|
"loss": 0.9996, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.7443762781186097e-05, |
|
"loss": 0.923, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.7392638036809816e-05, |
|
"loss": 1.0195, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.734151329243354e-05, |
|
"loss": 1.0198, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.729038854805726e-05, |
|
"loss": 1.0267, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.7239263803680983e-05, |
|
"loss": 0.9649, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.7188139059304702e-05, |
|
"loss": 1.0171, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.7137014314928426e-05, |
|
"loss": 1.0343, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.708588957055215e-05, |
|
"loss": 1.0254, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.703476482617587e-05, |
|
"loss": 1.0452, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.6983640081799592e-05, |
|
"loss": 1.0527, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.693251533742331e-05, |
|
"loss": 0.9567, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.6881390593047035e-05, |
|
"loss": 0.9941, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.6830265848670755e-05, |
|
"loss": 1.0011, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.677914110429448e-05, |
|
"loss": 0.9983, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.67280163599182e-05, |
|
"loss": 1.0388, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.6676891615541924e-05, |
|
"loss": 0.9829, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.6625766871165644e-05, |
|
"loss": 1.047, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.6574642126789367e-05, |
|
"loss": 0.9815, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.6523517382413087e-05, |
|
"loss": 0.9917, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.647239263803681e-05, |
|
"loss": 1.0505, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.6421267893660533e-05, |
|
"loss": 0.9786, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.6370143149284253e-05, |
|
"loss": 0.9499, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.6319018404907976e-05, |
|
"loss": 1.0274, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.6267893660531696e-05, |
|
"loss": 1.0012, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.621676891615542e-05, |
|
"loss": 1.0759, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.616564417177914e-05, |
|
"loss": 1.009, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.6114519427402862e-05, |
|
"loss": 0.9257, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.6063394683026586e-05, |
|
"loss": 1.0042, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.601226993865031e-05, |
|
"loss": 1.0145, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.596114519427403e-05, |
|
"loss": 0.9835, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.591002044989775e-05, |
|
"loss": 1.0112, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.585889570552147e-05, |
|
"loss": 1.0209, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 1.5807770961145195e-05, |
|
"loss": 1.1021, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 1.5756646216768918e-05, |
|
"loss": 0.9853, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 1.570552147239264e-05, |
|
"loss": 0.9676, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 1.565439672801636e-05, |
|
"loss": 1.0247, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 1.560327198364008e-05, |
|
"loss": 0.9681, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 1.5552147239263804e-05, |
|
"loss": 0.9989, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 1.5501022494887524e-05, |
|
"loss": 1.0103, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 1.5449897750511247e-05, |
|
"loss": 0.9726, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 1.539877300613497e-05, |
|
"loss": 0.9575, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 1.5347648261758693e-05, |
|
"loss": 1.0127, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 1.5296523517382413e-05, |
|
"loss": 0.9866, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 1.5245398773006136e-05, |
|
"loss": 0.9727, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 1.5194274028629856e-05, |
|
"loss": 1.0087, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 1.5143149284253581e-05, |
|
"loss": 1.0048, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 1.50920245398773e-05, |
|
"loss": 1.0397, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 1.5040899795501024e-05, |
|
"loss": 0.9903, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 1.4989775051124744e-05, |
|
"loss": 1.0429, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 1.4938650306748467e-05, |
|
"loss": 1.0008, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 1.4887525562372188e-05, |
|
"loss": 0.9799, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 1.483640081799591e-05, |
|
"loss": 0.9668, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 1.4785276073619633e-05, |
|
"loss": 1.0046, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 1.4734151329243355e-05, |
|
"loss": 1.0146, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 1.4683026584867076e-05, |
|
"loss": 0.9645, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 1.46319018404908e-05, |
|
"loss": 1.0449, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 1.458077709611452e-05, |
|
"loss": 1.0, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.4529652351738242e-05, |
|
"loss": 0.9965, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.4478527607361964e-05, |
|
"loss": 1.0086, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.4427402862985685e-05, |
|
"loss": 1.0889, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.4376278118609407e-05, |
|
"loss": 1.0154, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.4325153374233128e-05, |
|
"loss": 1.017, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.4274028629856851e-05, |
|
"loss": 0.9491, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.4222903885480573e-05, |
|
"loss": 0.9901, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.4171779141104294e-05, |
|
"loss": 1.0308, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.4120654396728016e-05, |
|
"loss": 0.9649, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.4069529652351739e-05, |
|
"loss": 0.9903, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.401840490797546e-05, |
|
"loss": 0.9283, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.3967280163599182e-05, |
|
"loss": 0.9714, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.3916155419222905e-05, |
|
"loss": 0.9989, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.3865030674846627e-05, |
|
"loss": 0.9711, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.3813905930470348e-05, |
|
"loss": 1.0136, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.3762781186094071e-05, |
|
"loss": 0.9706, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.3711656441717793e-05, |
|
"loss": 1.0298, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.3660531697341513e-05, |
|
"loss": 0.9527, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.3609406952965234e-05, |
|
"loss": 0.9867, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 1.3558282208588957e-05, |
|
"loss": 0.9672, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 1.3507157464212679e-05, |
|
"loss": 0.9654, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 1.34560327198364e-05, |
|
"loss": 0.9646, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.3404907975460124e-05, |
|
"loss": 0.9597, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 1.3353783231083845e-05, |
|
"loss": 0.9775, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.6522530723714156, |
|
"eval_loss": 0.9669816493988037, |
|
"eval_runtime": 294.3351, |
|
"eval_samples_per_second": 141.821, |
|
"eval_steps_per_second": 70.912, |
|
"step": 3912 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 1.3302658486707567e-05, |
|
"loss": 1.0613, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 1.3251533742331288e-05, |
|
"loss": 0.9874, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 1.3200408997955011e-05, |
|
"loss": 0.9708, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 1.3149284253578733e-05, |
|
"loss": 0.9617, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 1.3098159509202454e-05, |
|
"loss": 0.968, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 1.3047034764826177e-05, |
|
"loss": 0.9683, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 1.2995910020449899e-05, |
|
"loss": 0.9496, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 1.294478527607362e-05, |
|
"loss": 0.919, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 1.2893660531697342e-05, |
|
"loss": 0.9563, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 1.2842535787321063e-05, |
|
"loss": 0.9115, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 1.2791411042944785e-05, |
|
"loss": 0.9078, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 1.2740286298568506e-05, |
|
"loss": 0.9824, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 1.268916155419223e-05, |
|
"loss": 0.9953, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 1.2638036809815951e-05, |
|
"loss": 1.004, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 1.2586912065439673e-05, |
|
"loss": 0.9191, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 1.2535787321063396e-05, |
|
"loss": 0.9923, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 1.2484662576687117e-05, |
|
"loss": 0.9668, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 1.2433537832310839e-05, |
|
"loss": 1.0262, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 1.238241308793456e-05, |
|
"loss": 1.0177, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 1.2331288343558283e-05, |
|
"loss": 0.9461, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 1.2280163599182005e-05, |
|
"loss": 1.0468, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 1.2229038854805726e-05, |
|
"loss": 0.9567, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 1.217791411042945e-05, |
|
"loss": 0.936, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 1.212678936605317e-05, |
|
"loss": 0.9586, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 1.2075664621676891e-05, |
|
"loss": 0.964, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 1.2024539877300614e-05, |
|
"loss": 0.9221, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 1.1973415132924336e-05, |
|
"loss": 0.9604, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 1.1922290388548057e-05, |
|
"loss": 1.0227, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 1.1871165644171779e-05, |
|
"loss": 0.9817, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 1.1820040899795502e-05, |
|
"loss": 0.9773, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 1.1768916155419223e-05, |
|
"loss": 0.9468, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 1.1717791411042945e-05, |
|
"loss": 0.9356, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 1.1666666666666668e-05, |
|
"loss": 0.9818, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 1.161554192229039e-05, |
|
"loss": 0.9984, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 1.1564417177914111e-05, |
|
"loss": 1.0297, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 1.1513292433537832e-05, |
|
"loss": 0.9418, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 1.1462167689161556e-05, |
|
"loss": 0.9486, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 1.1411042944785277e-05, |
|
"loss": 0.9456, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 1.1359918200408997e-05, |
|
"loss": 0.9821, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 1.130879345603272e-05, |
|
"loss": 0.9672, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 1.1257668711656442e-05, |
|
"loss": 0.9769, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 1.1206543967280163e-05, |
|
"loss": 0.9806, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 1.1155419222903885e-05, |
|
"loss": 0.9755, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 1.1104294478527608e-05, |
|
"loss": 0.9019, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 1.105316973415133e-05, |
|
"loss": 0.9812, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 1.100204498977505e-05, |
|
"loss": 0.9808, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 1.0950920245398774e-05, |
|
"loss": 1.0226, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 1.0899795501022495e-05, |
|
"loss": 0.9444, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 1.0848670756646217e-05, |
|
"loss": 0.9383, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 1.079754601226994e-05, |
|
"loss": 0.8953, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 1.0746421267893662e-05, |
|
"loss": 0.9296, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 1.0695296523517383e-05, |
|
"loss": 0.967, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 1.0644171779141105e-05, |
|
"loss": 0.9711, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 1.0593047034764828e-05, |
|
"loss": 0.9028, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 1.0541922290388548e-05, |
|
"loss": 0.972, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 1.0490797546012269e-05, |
|
"loss": 0.9936, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 1.0439672801635992e-05, |
|
"loss": 1.0213, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 1.0388548057259714e-05, |
|
"loss": 0.9247, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 1.0337423312883435e-05, |
|
"loss": 0.9832, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 1.0286298568507157e-05, |
|
"loss": 0.9604, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 1.023517382413088e-05, |
|
"loss": 0.9799, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 1.0184049079754601e-05, |
|
"loss": 0.9318, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 1.0132924335378323e-05, |
|
"loss": 0.9727, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 1.0081799591002046e-05, |
|
"loss": 0.9929, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 1.0030674846625768e-05, |
|
"loss": 0.9488, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 9.979550102249489e-06, |
|
"loss": 0.97, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 9.928425357873212e-06, |
|
"loss": 0.953, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 9.877300613496934e-06, |
|
"loss": 0.9396, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 9.826175869120655e-06, |
|
"loss": 1.0, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 9.775051124744375e-06, |
|
"loss": 0.9872, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 9.723926380368098e-06, |
|
"loss": 0.9254, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 9.67280163599182e-06, |
|
"loss": 0.989, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 9.621676891615541e-06, |
|
"loss": 0.8786, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 9.570552147239264e-06, |
|
"loss": 0.9236, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 9.519427402862986e-06, |
|
"loss": 0.9162, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 9.468302658486707e-06, |
|
"loss": 0.9444, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 9.417177914110429e-06, |
|
"loss": 0.9698, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 9.366053169734152e-06, |
|
"loss": 1.0498, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 9.314928425357874e-06, |
|
"loss": 0.9529, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 9.263803680981595e-06, |
|
"loss": 0.9147, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 9.212678936605318e-06, |
|
"loss": 0.9755, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 9.16155419222904e-06, |
|
"loss": 1.007, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 9.110429447852761e-06, |
|
"loss": 0.9338, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 9.059304703476484e-06, |
|
"loss": 0.9356, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 9.008179959100204e-06, |
|
"loss": 0.9711, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 8.957055214723926e-06, |
|
"loss": 0.9662, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 8.905930470347647e-06, |
|
"loss": 0.9794, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 8.85480572597137e-06, |
|
"loss": 1.0016, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 8.803680981595092e-06, |
|
"loss": 0.9433, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 8.752556237218813e-06, |
|
"loss": 0.9584, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 8.701431492842537e-06, |
|
"loss": 0.9664, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 8.650306748466258e-06, |
|
"loss": 0.9164, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 8.59918200408998e-06, |
|
"loss": 0.9198, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 8.548057259713701e-06, |
|
"loss": 0.9766, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 8.496932515337424e-06, |
|
"loss": 0.9515, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 8.445807770961146e-06, |
|
"loss": 0.9538, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 8.394683026584867e-06, |
|
"loss": 0.9635, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 8.34355828220859e-06, |
|
"loss": 0.9385, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 8.292433537832312e-06, |
|
"loss": 0.9669, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 8.241308793456032e-06, |
|
"loss": 0.9672, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 8.190184049079755e-06, |
|
"loss": 0.9436, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 8.139059304703476e-06, |
|
"loss": 0.936, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 8.087934560327198e-06, |
|
"loss": 0.999, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 8.03680981595092e-06, |
|
"loss": 0.9724, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 7.985685071574643e-06, |
|
"loss": 0.9303, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 7.934560327198364e-06, |
|
"loss": 0.9705, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 7.883435582822086e-06, |
|
"loss": 0.9082, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 7.832310838445809e-06, |
|
"loss": 0.9417, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 7.78118609406953e-06, |
|
"loss": 0.9132, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 7.730061349693252e-06, |
|
"loss": 0.9844, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 7.678936605316973e-06, |
|
"loss": 0.9299, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 7.6278118609406965e-06, |
|
"loss": 0.9278, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 7.576687116564418e-06, |
|
"loss": 0.9335, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 7.52556237218814e-06, |
|
"loss": 0.9042, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 7.474437627811861e-06, |
|
"loss": 0.971, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 7.423312883435583e-06, |
|
"loss": 0.9237, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 7.372188139059305e-06, |
|
"loss": 0.9598, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 7.321063394683027e-06, |
|
"loss": 0.9941, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 7.269938650306749e-06, |
|
"loss": 0.9467, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 7.21881390593047e-06, |
|
"loss": 0.9411, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 7.1676891615541924e-06, |
|
"loss": 0.9672, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 7.116564417177914e-06, |
|
"loss": 0.8993, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 7.065439672801636e-06, |
|
"loss": 0.9833, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 7.014314928425359e-06, |
|
"loss": 0.9743, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 6.96319018404908e-06, |
|
"loss": 0.9413, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 6.912065439672802e-06, |
|
"loss": 0.9357, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 6.860940695296523e-06, |
|
"loss": 0.9368, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 6.8098159509202454e-06, |
|
"loss": 0.9175, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 6.758691206543968e-06, |
|
"loss": 0.9567, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 6.707566462167689e-06, |
|
"loss": 0.9542, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.6945835229858899, |
|
"eval_loss": 0.8809502124786377, |
|
"eval_runtime": 297.753, |
|
"eval_samples_per_second": 140.193, |
|
"eval_steps_per_second": 70.098, |
|
"step": 5216 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 6.656441717791412e-06, |
|
"loss": 0.9247, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 6.605316973415133e-06, |
|
"loss": 0.9581, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 6.5541922290388555e-06, |
|
"loss": 0.9375, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 6.503067484662576e-06, |
|
"loss": 0.9146, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 6.4519427402862984e-06, |
|
"loss": 0.9437, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 6.400817995910021e-06, |
|
"loss": 0.9554, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 6.349693251533742e-06, |
|
"loss": 0.9519, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 6.298568507157465e-06, |
|
"loss": 0.9291, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 6.247443762781186e-06, |
|
"loss": 0.8958, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 6.1963190184049085e-06, |
|
"loss": 0.9188, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 6.14519427402863e-06, |
|
"loss": 0.949, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 6.0940695296523515e-06, |
|
"loss": 0.9866, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 6.042944785276074e-06, |
|
"loss": 0.9078, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 5.991820040899795e-06, |
|
"loss": 0.9243, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 5.940695296523518e-06, |
|
"loss": 0.8902, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 5.88957055214724e-06, |
|
"loss": 0.9754, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 5.8384458077709615e-06, |
|
"loss": 0.8929, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 5.787321063394684e-06, |
|
"loss": 0.9311, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 5.7361963190184045e-06, |
|
"loss": 0.9267, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 5.685071574642127e-06, |
|
"loss": 0.9586, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 5.633946830265848e-06, |
|
"loss": 0.9446, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 5.582822085889571e-06, |
|
"loss": 0.9085, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 5.531697341513293e-06, |
|
"loss": 0.9376, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 5.4805725971370145e-06, |
|
"loss": 0.9498, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 5.429447852760737e-06, |
|
"loss": 0.8981, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 5.378323108384458e-06, |
|
"loss": 0.961, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 5.32719836400818e-06, |
|
"loss": 0.9169, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 5.276073619631902e-06, |
|
"loss": 0.9219, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 5.224948875255624e-06, |
|
"loss": 0.9491, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 5.173824130879346e-06, |
|
"loss": 0.9504, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 5.1226993865030675e-06, |
|
"loss": 0.9376, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 5.07157464212679e-06, |
|
"loss": 0.9051, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 5.020449897750511e-06, |
|
"loss": 0.9578, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 4.969325153374233e-06, |
|
"loss": 0.9037, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 4.918200408997955e-06, |
|
"loss": 0.8804, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 4.867075664621677e-06, |
|
"loss": 0.9226, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 4.815950920245399e-06, |
|
"loss": 0.8782, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 4.7648261758691205e-06, |
|
"loss": 0.8848, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 4.713701431492843e-06, |
|
"loss": 0.929, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 4.662576687116565e-06, |
|
"loss": 0.9304, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 4.611451942740287e-06, |
|
"loss": 0.8859, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 4.560327198364008e-06, |
|
"loss": 0.9106, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 4.50920245398773e-06, |
|
"loss": 0.9991, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 4.458077709611452e-06, |
|
"loss": 0.9229, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 4.406952965235174e-06, |
|
"loss": 0.9479, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 4.355828220858896e-06, |
|
"loss": 0.9028, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 4.304703476482618e-06, |
|
"loss": 0.9395, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 4.25357873210634e-06, |
|
"loss": 0.9212, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 4.202453987730061e-06, |
|
"loss": 0.9599, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 4.151329243353783e-06, |
|
"loss": 0.9109, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 4.100204498977505e-06, |
|
"loss": 0.9477, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 4.049079754601227e-06, |
|
"loss": 0.8823, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 3.997955010224949e-06, |
|
"loss": 0.9318, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 3.946830265848671e-06, |
|
"loss": 0.8762, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 3.895705521472393e-06, |
|
"loss": 0.935, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 3.844580777096115e-06, |
|
"loss": 0.9151, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 3.793456032719836e-06, |
|
"loss": 0.9244, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 3.7423312883435584e-06, |
|
"loss": 0.9223, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 3.6912065439672803e-06, |
|
"loss": 0.8812, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 3.6400817995910022e-06, |
|
"loss": 0.9402, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 3.588957055214724e-06, |
|
"loss": 0.9159, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 3.537832310838446e-06, |
|
"loss": 0.9543, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 3.4867075664621676e-06, |
|
"loss": 0.9197, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 3.4355828220858895e-06, |
|
"loss": 0.945, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 3.384458077709612e-06, |
|
"loss": 0.9491, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 0.8835, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 3.2822085889570552e-06, |
|
"loss": 0.877, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 3.231083844580777e-06, |
|
"loss": 0.9487, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 3.179959100204499e-06, |
|
"loss": 0.9424, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 3.1288343558282206e-06, |
|
"loss": 0.9083, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 3.077709611451943e-06, |
|
"loss": 0.9283, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 3.026584867075665e-06, |
|
"loss": 0.9977, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 2.9754601226993867e-06, |
|
"loss": 0.9019, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 2.9243353783231082e-06, |
|
"loss": 0.9302, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 2.87321063394683e-06, |
|
"loss": 0.9267, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 2.8220858895705525e-06, |
|
"loss": 0.9545, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 2.7709611451942744e-06, |
|
"loss": 0.9349, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 2.724948875255624e-06, |
|
"loss": 0.8813, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 2.6738241308793458e-06, |
|
"loss": 0.9202, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 2.6226993865030673e-06, |
|
"loss": 0.9854, |
|
"step": 6010 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 2.571574642126789e-06, |
|
"loss": 0.9226, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 2.5204498977505115e-06, |
|
"loss": 0.9729, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 2.4693251533742334e-06, |
|
"loss": 0.9218, |
|
"step": 6040 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 2.418200408997955e-06, |
|
"loss": 0.917, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 2.367075664621677e-06, |
|
"loss": 0.9602, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 2.3159509202453988e-06, |
|
"loss": 0.8968, |
|
"step": 6070 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 2.264826175869121e-06, |
|
"loss": 0.9104, |
|
"step": 6080 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 2.2137014314928426e-06, |
|
"loss": 0.8855, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 2.1625766871165645e-06, |
|
"loss": 0.9322, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 2.1114519427402864e-06, |
|
"loss": 0.9361, |
|
"step": 6110 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 2.060327198364008e-06, |
|
"loss": 0.917, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 2.00920245398773e-06, |
|
"loss": 0.9389, |
|
"step": 6130 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 1.958077709611452e-06, |
|
"loss": 0.9851, |
|
"step": 6140 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 1.9069529652351741e-06, |
|
"loss": 0.9481, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 1.8558282208588958e-06, |
|
"loss": 0.8991, |
|
"step": 6160 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 1.8047034764826175e-06, |
|
"loss": 0.9207, |
|
"step": 6170 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 1.7535787321063397e-06, |
|
"loss": 0.9402, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 1.7024539877300614e-06, |
|
"loss": 0.9713, |
|
"step": 6190 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 1.6513292433537833e-06, |
|
"loss": 0.9374, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 1.6002044989775052e-06, |
|
"loss": 0.9352, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 1.5490797546012271e-06, |
|
"loss": 0.9246, |
|
"step": 6220 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 1.4979550102249488e-06, |
|
"loss": 0.8953, |
|
"step": 6230 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 1.446830265848671e-06, |
|
"loss": 0.9044, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 1.3957055214723927e-06, |
|
"loss": 0.927, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 1.3445807770961146e-06, |
|
"loss": 0.926, |
|
"step": 6260 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 1.2934560327198365e-06, |
|
"loss": 0.9082, |
|
"step": 6270 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 1.2423312883435582e-06, |
|
"loss": 0.9158, |
|
"step": 6280 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 1.1912065439672801e-06, |
|
"loss": 0.9389, |
|
"step": 6290 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 1.140081799591002e-06, |
|
"loss": 0.9288, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 1.088957055214724e-06, |
|
"loss": 0.93, |
|
"step": 6310 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 1.0378323108384457e-06, |
|
"loss": 0.8904, |
|
"step": 6320 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 9.867075664621678e-07, |
|
"loss": 0.9087, |
|
"step": 6330 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 9.355828220858896e-07, |
|
"loss": 0.9703, |
|
"step": 6340 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 8.844580777096115e-07, |
|
"loss": 0.8408, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 8.333333333333333e-07, |
|
"loss": 0.8819, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 7.822085889570551e-07, |
|
"loss": 0.8903, |
|
"step": 6370 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 7.310838445807771e-07, |
|
"loss": 0.9052, |
|
"step": 6380 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 6.79959100204499e-07, |
|
"loss": 0.9522, |
|
"step": 6390 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 6.288343558282209e-07, |
|
"loss": 0.9269, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 5.777096114519427e-07, |
|
"loss": 0.9148, |
|
"step": 6410 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 5.265848670756646e-07, |
|
"loss": 0.9424, |
|
"step": 6420 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 4.754601226993865e-07, |
|
"loss": 0.9658, |
|
"step": 6430 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 4.243353783231084e-07, |
|
"loss": 0.9388, |
|
"step": 6440 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 3.732106339468303e-07, |
|
"loss": 0.9308, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 3.220858895705522e-07, |
|
"loss": 0.9122, |
|
"step": 6460 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 2.7096114519427406e-07, |
|
"loss": 0.8978, |
|
"step": 6470 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 2.1983640081799592e-07, |
|
"loss": 0.9449, |
|
"step": 6480 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 1.6871165644171781e-07, |
|
"loss": 0.9018, |
|
"step": 6490 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 1.1758691206543969e-07, |
|
"loss": 0.9552, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 6.646216768916156e-08, |
|
"loss": 0.9161, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 1.533742331288344e-08, |
|
"loss": 0.9403, |
|
"step": 6520 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_accuracy": 0.7016984883693075, |
|
"eval_loss": 0.8677756190299988, |
|
"eval_runtime": 303.2543, |
|
"eval_samples_per_second": 137.65, |
|
"eval_steps_per_second": 68.827, |
|
"step": 6520 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"step": 6520, |
|
"total_flos": 1.1511308168038711e+19, |
|
"train_loss": 1.11384382686732, |
|
"train_runtime": 4746.5723, |
|
"train_samples_per_second": 175.884, |
|
"train_steps_per_second": 1.374 |
|
} |
|
], |
|
"max_steps": 6520, |
|
"num_train_epochs": 5, |
|
"total_flos": 1.1511308168038711e+19, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|