|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 9.999680409076383, |
|
"global_step": 3910, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.115089514066497e-07, |
|
"loss": 2.3876, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.0230179028132994e-06, |
|
"loss": 2.382, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.534526854219949e-06, |
|
"loss": 2.3645, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 2.0460358056265987e-06, |
|
"loss": 2.3333, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2.5575447570332483e-06, |
|
"loss": 2.2959, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 3.069053708439898e-06, |
|
"loss": 2.2527, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 3.5805626598465474e-06, |
|
"loss": 2.2021, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.092071611253197e-06, |
|
"loss": 2.1613, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.603580562659847e-06, |
|
"loss": 2.0852, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 5.1150895140664966e-06, |
|
"loss": 2.0602, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 5.626598465473146e-06, |
|
"loss": 2.0174, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 6.138107416879796e-06, |
|
"loss": 1.9743, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 6.649616368286445e-06, |
|
"loss": 1.8913, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 7.161125319693095e-06, |
|
"loss": 1.8595, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 7.672634271099745e-06, |
|
"loss": 1.7506, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.184143222506395e-06, |
|
"loss": 1.6762, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 8.695652173913044e-06, |
|
"loss": 1.6315, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 9.207161125319694e-06, |
|
"loss": 1.5075, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 9.718670076726344e-06, |
|
"loss": 1.522, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0230179028132993e-05, |
|
"loss": 1.4587, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.0741687979539643e-05, |
|
"loss": 1.4112, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.1253196930946292e-05, |
|
"loss": 1.4088, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.1764705882352942e-05, |
|
"loss": 1.3279, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.2276214833759591e-05, |
|
"loss": 1.3437, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.2787723785166241e-05, |
|
"loss": 1.312, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.329923273657289e-05, |
|
"loss": 1.2475, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.381074168797954e-05, |
|
"loss": 1.2143, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.432225063938619e-05, |
|
"loss": 1.2112, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.483375959079284e-05, |
|
"loss": 1.1612, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.534526854219949e-05, |
|
"loss": 1.1555, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.585677749360614e-05, |
|
"loss": 1.1404, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.636828644501279e-05, |
|
"loss": 1.1411, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.687979539641944e-05, |
|
"loss": 1.1204, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.739130434782609e-05, |
|
"loss": 1.0784, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.790281329923274e-05, |
|
"loss": 1.0616, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.8414322250639388e-05, |
|
"loss": 1.0479, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.8925831202046038e-05, |
|
"loss": 1.087, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.9437340153452687e-05, |
|
"loss": 1.0054, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.9948849104859337e-05, |
|
"loss": 1.0543, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.9948849104859337e-05, |
|
"loss": 0.9918, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.9892014776925264e-05, |
|
"loss": 0.9886, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.983518044899119e-05, |
|
"loss": 0.9545, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.9778346121057118e-05, |
|
"loss": 0.9254, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.972151179312305e-05, |
|
"loss": 0.9518, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.9664677465188975e-05, |
|
"loss": 0.938, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.9607843137254903e-05, |
|
"loss": 0.9437, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.955100880932083e-05, |
|
"loss": 0.9041, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.949417448138676e-05, |
|
"loss": 0.9089, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.9437340153452687e-05, |
|
"loss": 0.8944, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.9380505825518614e-05, |
|
"loss": 0.9, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.932367149758454e-05, |
|
"loss": 0.9285, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.926683716965047e-05, |
|
"loss": 0.9065, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.92100028417164e-05, |
|
"loss": 0.8607, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.9153168513782326e-05, |
|
"loss": 0.8576, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.9096334185848253e-05, |
|
"loss": 0.8888, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.903949985791418e-05, |
|
"loss": 0.8894, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.898266552998011e-05, |
|
"loss": 0.8637, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.8925831202046038e-05, |
|
"loss": 0.8725, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.8868996874111965e-05, |
|
"loss": 0.852, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.881216254617789e-05, |
|
"loss": 0.8559, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.875532821824382e-05, |
|
"loss": 0.8671, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.869849389030975e-05, |
|
"loss": 0.8656, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.8641659562375676e-05, |
|
"loss": 0.8336, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.8584825234441603e-05, |
|
"loss": 0.8274, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.852799090650753e-05, |
|
"loss": 0.8555, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.847115657857346e-05, |
|
"loss": 0.8158, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.8414322250639388e-05, |
|
"loss": 0.8182, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 1.8357487922705315e-05, |
|
"loss": 0.8143, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.8300653594771242e-05, |
|
"loss": 0.8632, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.824381926683717e-05, |
|
"loss": 0.8461, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.81869849389031e-05, |
|
"loss": 0.8144, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.8130150610969027e-05, |
|
"loss": 0.834, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.8073316283034954e-05, |
|
"loss": 0.8729, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.801648195510088e-05, |
|
"loss": 0.8187, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.795964762716681e-05, |
|
"loss": 0.827, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.790281329923274e-05, |
|
"loss": 0.8571, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.7845978971298665e-05, |
|
"loss": 0.8204, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.7789144643364593e-05, |
|
"loss": 0.8573, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.773231031543052e-05, |
|
"loss": 0.7526, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.767547598749645e-05, |
|
"loss": 0.7175, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.7618641659562377e-05, |
|
"loss": 0.7448, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.7561807331628304e-05, |
|
"loss": 0.7375, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.750497300369423e-05, |
|
"loss": 0.7215, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.7448138675760162e-05, |
|
"loss": 0.7259, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.739130434782609e-05, |
|
"loss": 0.7564, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.7334470019892016e-05, |
|
"loss": 0.7001, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.7277635691957943e-05, |
|
"loss": 0.7136, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.722080136402387e-05, |
|
"loss": 0.7273, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.71639670360898e-05, |
|
"loss": 0.7035, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.7107132708155728e-05, |
|
"loss": 0.7125, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.7050298380221655e-05, |
|
"loss": 0.73, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.6993464052287582e-05, |
|
"loss": 0.7052, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.6936629724353512e-05, |
|
"loss": 0.7362, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.687979539641944e-05, |
|
"loss": 0.7649, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.6822961068485366e-05, |
|
"loss": 0.7001, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.6766126740551293e-05, |
|
"loss": 0.7006, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.670929241261722e-05, |
|
"loss": 0.7253, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.665245808468315e-05, |
|
"loss": 0.7042, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.6595623756749078e-05, |
|
"loss": 0.7072, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.6538789428815005e-05, |
|
"loss": 0.7279, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.6481955100880932e-05, |
|
"loss": 0.7273, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.6425120772946863e-05, |
|
"loss": 0.7148, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 1.636828644501279e-05, |
|
"loss": 0.7136, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 1.6311452117078717e-05, |
|
"loss": 0.6997, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 1.6254617789144644e-05, |
|
"loss": 0.7115, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 1.619778346121057e-05, |
|
"loss": 0.6905, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 1.61409491332765e-05, |
|
"loss": 0.725, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 1.608411480534243e-05, |
|
"loss": 0.6954, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 1.6027280477408356e-05, |
|
"loss": 0.7409, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 1.5970446149474283e-05, |
|
"loss": 0.7275, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.591361182154021e-05, |
|
"loss": 0.7164, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.585677749360614e-05, |
|
"loss": 0.7317, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.5799943165672067e-05, |
|
"loss": 0.7129, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.5743108837737994e-05, |
|
"loss": 0.6981, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.568627450980392e-05, |
|
"loss": 0.6901, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 1.5629440181869852e-05, |
|
"loss": 0.7029, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.557260585393578e-05, |
|
"loss": 0.7217, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 1.5515771526001706e-05, |
|
"loss": 0.6391, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 1.5458937198067633e-05, |
|
"loss": 0.6264, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 1.540210287013356e-05, |
|
"loss": 0.6258, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 1.534526854219949e-05, |
|
"loss": 0.6432, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 1.5288434214265418e-05, |
|
"loss": 0.6276, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 1.5231599886331345e-05, |
|
"loss": 0.6057, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 1.5174765558397273e-05, |
|
"loss": 0.6249, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 1.51179312304632e-05, |
|
"loss": 0.6112, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 1.506109690252913e-05, |
|
"loss": 0.6232, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 1.5004262574595056e-05, |
|
"loss": 0.6441, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 1.4947428246660985e-05, |
|
"loss": 0.6036, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 1.4890593918726912e-05, |
|
"loss": 0.6509, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 1.483375959079284e-05, |
|
"loss": 0.611, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 1.4776925262858768e-05, |
|
"loss": 0.6206, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 1.4720090934924695e-05, |
|
"loss": 0.6064, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 1.4663256606990624e-05, |
|
"loss": 0.6353, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 1.4606422279056551e-05, |
|
"loss": 0.6394, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 1.454958795112248e-05, |
|
"loss": 0.6163, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 1.4492753623188407e-05, |
|
"loss": 0.614, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 1.4435919295254336e-05, |
|
"loss": 0.6185, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 1.4379084967320263e-05, |
|
"loss": 0.6439, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 1.432225063938619e-05, |
|
"loss": 0.6156, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 1.4265416311452118e-05, |
|
"loss": 0.6665, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 1.4208581983518046e-05, |
|
"loss": 0.6015, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 1.4151747655583974e-05, |
|
"loss": 0.5993, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 1.4094913327649901e-05, |
|
"loss": 0.6466, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 1.403807899971583e-05, |
|
"loss": 0.6505, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 1.3981244671781757e-05, |
|
"loss": 0.6328, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 1.3924410343847686e-05, |
|
"loss": 0.6456, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 1.3867576015913613e-05, |
|
"loss": 0.6334, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 1.381074168797954e-05, |
|
"loss": 0.5947, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 1.3753907360045469e-05, |
|
"loss": 0.6485, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 1.3697073032111396e-05, |
|
"loss": 0.6559, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 1.3640238704177325e-05, |
|
"loss": 0.6182, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 1.3583404376243252e-05, |
|
"loss": 0.6184, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 1.352657004830918e-05, |
|
"loss": 0.6376, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 1.3469735720375108e-05, |
|
"loss": 0.6126, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 1.3412901392441036e-05, |
|
"loss": 0.6122, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 1.3356067064506963e-05, |
|
"loss": 0.6305, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 1.329923273657289e-05, |
|
"loss": 0.5854, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 1.324239840863882e-05, |
|
"loss": 0.5463, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 1.3185564080704746e-05, |
|
"loss": 0.5565, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 1.3128729752770675e-05, |
|
"loss": 0.5883, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 1.3071895424836602e-05, |
|
"loss": 0.5709, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 1.3015061096902531e-05, |
|
"loss": 0.5554, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 1.2958226768968458e-05, |
|
"loss": 0.541, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 1.2901392441034387e-05, |
|
"loss": 0.5633, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 1.2844558113100314e-05, |
|
"loss": 0.53, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 1.2787723785166241e-05, |
|
"loss": 0.5695, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 1.273088945723217e-05, |
|
"loss": 0.5689, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 1.2674055129298097e-05, |
|
"loss": 0.561, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 1.2617220801364026e-05, |
|
"loss": 0.5441, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 1.2560386473429953e-05, |
|
"loss": 0.5052, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 1.2503552145495881e-05, |
|
"loss": 0.5565, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 1.2446717817561809e-05, |
|
"loss": 0.569, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 1.2389883489627737e-05, |
|
"loss": 0.5383, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 1.2333049161693664e-05, |
|
"loss": 0.5603, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 1.2276214833759591e-05, |
|
"loss": 0.5318, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 1.221938050582552e-05, |
|
"loss": 0.5463, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 1.2162546177891447e-05, |
|
"loss": 0.5893, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 1.2105711849957376e-05, |
|
"loss": 0.584, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 1.2048877522023303e-05, |
|
"loss": 0.5615, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 1.1992043194089232e-05, |
|
"loss": 0.5359, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 1.1935208866155159e-05, |
|
"loss": 0.5391, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 1.1878374538221088e-05, |
|
"loss": 0.588, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 1.1821540210287015e-05, |
|
"loss": 0.5374, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 1.1764705882352942e-05, |
|
"loss": 0.5946, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 1.170787155441887e-05, |
|
"loss": 0.5641, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 1.1651037226484798e-05, |
|
"loss": 0.5724, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 1.1594202898550726e-05, |
|
"loss": 0.5327, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 1.1537368570616654e-05, |
|
"loss": 0.5543, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 1.1480534242682582e-05, |
|
"loss": 0.5694, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 1.142369991474851e-05, |
|
"loss": 0.5487, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 1.1366865586814438e-05, |
|
"loss": 0.5794, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 1.1310031258880365e-05, |
|
"loss": 0.5981, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 1.1253196930946292e-05, |
|
"loss": 0.5559, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 1.1196362603012221e-05, |
|
"loss": 0.5132, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 1.1139528275078148e-05, |
|
"loss": 0.5294, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 1.1082693947144077e-05, |
|
"loss": 0.5457, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 1.1025859619210004e-05, |
|
"loss": 0.5082, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 1.0969025291275933e-05, |
|
"loss": 0.4839, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 1.091219096334186e-05, |
|
"loss": 0.5042, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 1.0855356635407789e-05, |
|
"loss": 0.4862, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 1.0798522307473716e-05, |
|
"loss": 0.5559, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 1.0741687979539643e-05, |
|
"loss": 0.5107, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 1.0684853651605571e-05, |
|
"loss": 0.4919, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"learning_rate": 1.0628019323671499e-05, |
|
"loss": 0.5198, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 1.0571184995737427e-05, |
|
"loss": 0.4967, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 1.0514350667803354e-05, |
|
"loss": 0.4895, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"learning_rate": 1.0457516339869283e-05, |
|
"loss": 0.492, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"learning_rate": 1.040068201193521e-05, |
|
"loss": 0.494, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 1.0343847684001139e-05, |
|
"loss": 0.4864, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"learning_rate": 1.0287013356067066e-05, |
|
"loss": 0.5043, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 1.0230179028132993e-05, |
|
"loss": 0.5088, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"learning_rate": 1.0173344700198922e-05, |
|
"loss": 0.5035, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 1.0116510372264849e-05, |
|
"loss": 0.5052, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 1.0059676044330778e-05, |
|
"loss": 0.5096, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 1.0002841716396705e-05, |
|
"loss": 0.4916, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 9.946007388462632e-06, |
|
"loss": 0.4942, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 9.889173060528559e-06, |
|
"loss": 0.4957, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"learning_rate": 9.832338732594488e-06, |
|
"loss": 0.5078, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 9.775504404660415e-06, |
|
"loss": 0.4841, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 9.718670076726344e-06, |
|
"loss": 0.4971, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"learning_rate": 9.66183574879227e-06, |
|
"loss": 0.485, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 9.6050014208582e-06, |
|
"loss": 0.4804, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"learning_rate": 9.548167092924126e-06, |
|
"loss": 0.4811, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 9.491332764990055e-06, |
|
"loss": 0.5065, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 9.434498437055982e-06, |
|
"loss": 0.4964, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 9.37766410912191e-06, |
|
"loss": 0.5003, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"learning_rate": 9.320829781187838e-06, |
|
"loss": 0.4964, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 9.263995453253765e-06, |
|
"loss": 0.5077, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"learning_rate": 9.207161125319694e-06, |
|
"loss": 0.5197, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 9.150326797385621e-06, |
|
"loss": 0.5071, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 9.09349246945155e-06, |
|
"loss": 0.4853, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"learning_rate": 9.036658141517477e-06, |
|
"loss": 0.4799, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 8.979823813583406e-06, |
|
"loss": 0.4857, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"learning_rate": 8.922989485649333e-06, |
|
"loss": 0.4689, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 8.86615515771526e-06, |
|
"loss": 0.5046, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"learning_rate": 8.809320829781189e-06, |
|
"loss": 0.4501, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"learning_rate": 8.752486501847116e-06, |
|
"loss": 0.4295, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 8.695652173913044e-06, |
|
"loss": 0.4513, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 6.11, |
|
"learning_rate": 8.638817845978971e-06, |
|
"loss": 0.4742, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"learning_rate": 8.5819835180449e-06, |
|
"loss": 0.4412, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 8.525149190110827e-06, |
|
"loss": 0.4546, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"learning_rate": 8.468314862176756e-06, |
|
"loss": 0.4547, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 6.21, |
|
"learning_rate": 8.411480534242683e-06, |
|
"loss": 0.4286, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"learning_rate": 8.35464620630861e-06, |
|
"loss": 0.4475, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"learning_rate": 8.297811878374539e-06, |
|
"loss": 0.4666, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 8.240977550440466e-06, |
|
"loss": 0.4564, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"learning_rate": 8.184143222506395e-06, |
|
"loss": 0.4504, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 8.127308894572322e-06, |
|
"loss": 0.4422, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"learning_rate": 8.07047456663825e-06, |
|
"loss": 0.4548, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 8.013640238704178e-06, |
|
"loss": 0.457, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"learning_rate": 7.956805910770105e-06, |
|
"loss": 0.4474, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"learning_rate": 7.899971582836034e-06, |
|
"loss": 0.4462, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"learning_rate": 7.84313725490196e-06, |
|
"loss": 0.4246, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 7.78630292696789e-06, |
|
"loss": 0.4488, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"learning_rate": 7.729468599033817e-06, |
|
"loss": 0.4616, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"learning_rate": 7.672634271099745e-06, |
|
"loss": 0.4273, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 6.57, |
|
"learning_rate": 7.615799943165672e-06, |
|
"loss": 0.4671, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 7.5589656152316e-06, |
|
"loss": 0.4738, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"learning_rate": 7.502131287297528e-06, |
|
"loss": 0.4563, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 6.65, |
|
"learning_rate": 7.445296959363456e-06, |
|
"loss": 0.4773, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 7.388462631429384e-06, |
|
"loss": 0.4678, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 7.331628303495312e-06, |
|
"loss": 0.4758, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 7.27479397556124e-06, |
|
"loss": 0.4357, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 7.217959647627168e-06, |
|
"loss": 0.4732, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"learning_rate": 7.161125319693095e-06, |
|
"loss": 0.4376, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"learning_rate": 7.104290991759023e-06, |
|
"loss": 0.4503, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"learning_rate": 7.047456663824951e-06, |
|
"loss": 0.452, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"learning_rate": 6.990622335890879e-06, |
|
"loss": 0.4345, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 6.9337880079568065e-06, |
|
"loss": 0.4928, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"learning_rate": 6.8769536800227344e-06, |
|
"loss": 0.4375, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 6.820119352088662e-06, |
|
"loss": 0.4412, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"learning_rate": 6.76328502415459e-06, |
|
"loss": 0.4186, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"learning_rate": 6.706450696220518e-06, |
|
"loss": 0.433, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"learning_rate": 6.649616368286445e-06, |
|
"loss": 0.4575, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 6.592782040352373e-06, |
|
"loss": 0.4553, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 7.06, |
|
"learning_rate": 6.535947712418301e-06, |
|
"loss": 0.4123, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"learning_rate": 6.479113384484229e-06, |
|
"loss": 0.4148, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"learning_rate": 6.422279056550157e-06, |
|
"loss": 0.4094, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 7.14, |
|
"learning_rate": 6.365444728616085e-06, |
|
"loss": 0.402, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"learning_rate": 6.308610400682013e-06, |
|
"loss": 0.4316, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"learning_rate": 6.251776072747941e-06, |
|
"loss": 0.4182, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 6.194941744813869e-06, |
|
"loss": 0.4226, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"learning_rate": 6.138107416879796e-06, |
|
"loss": 0.4167, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 7.26, |
|
"learning_rate": 6.081273088945724e-06, |
|
"loss": 0.3813, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"learning_rate": 6.0244387610116515e-06, |
|
"loss": 0.4131, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"learning_rate": 5.9676044330775795e-06, |
|
"loss": 0.4336, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"learning_rate": 5.910770105143507e-06, |
|
"loss": 0.401, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"learning_rate": 5.853935777209435e-06, |
|
"loss": 0.4051, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 5.797101449275363e-06, |
|
"loss": 0.4045, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"learning_rate": 5.740267121341291e-06, |
|
"loss": 0.4039, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 7.44, |
|
"learning_rate": 5.683432793407219e-06, |
|
"loss": 0.4158, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"learning_rate": 5.626598465473146e-06, |
|
"loss": 0.3805, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 7.49, |
|
"learning_rate": 5.569764137539074e-06, |
|
"loss": 0.4288, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"learning_rate": 5.512929809605002e-06, |
|
"loss": 0.3905, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"learning_rate": 5.45609548167093e-06, |
|
"loss": 0.4227, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"learning_rate": 5.399261153736858e-06, |
|
"loss": 0.437, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"learning_rate": 5.342426825802786e-06, |
|
"loss": 0.4289, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 7.62, |
|
"learning_rate": 5.285592497868714e-06, |
|
"loss": 0.4034, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"learning_rate": 5.2287581699346416e-06, |
|
"loss": 0.4394, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 7.67, |
|
"learning_rate": 5.1719238420005695e-06, |
|
"loss": 0.4292, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 7.7, |
|
"learning_rate": 5.1150895140664966e-06, |
|
"loss": 0.4219, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"learning_rate": 5.0582551861324245e-06, |
|
"loss": 0.4181, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"learning_rate": 5.001420858198352e-06, |
|
"loss": 0.3983, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 7.77, |
|
"learning_rate": 4.9445865302642795e-06, |
|
"loss": 0.4167, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 7.8, |
|
"learning_rate": 4.887752202330207e-06, |
|
"loss": 0.3953, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"learning_rate": 4.830917874396135e-06, |
|
"loss": 0.4081, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 7.85, |
|
"learning_rate": 4.774083546462063e-06, |
|
"loss": 0.3817, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 4.717249218527991e-06, |
|
"loss": 0.4229, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 7.9, |
|
"learning_rate": 4.660414890593919e-06, |
|
"loss": 0.41, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 7.93, |
|
"learning_rate": 4.603580562659847e-06, |
|
"loss": 0.3895, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 7.95, |
|
"learning_rate": 4.546746234725775e-06, |
|
"loss": 0.4021, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 7.98, |
|
"learning_rate": 4.489911906791703e-06, |
|
"loss": 0.419, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"learning_rate": 4.43307757885763e-06, |
|
"loss": 0.4308, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 8.03, |
|
"learning_rate": 4.376243250923558e-06, |
|
"loss": 0.3788, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"learning_rate": 4.319408922989486e-06, |
|
"loss": 0.3693, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 8.08, |
|
"learning_rate": 4.262574595055414e-06, |
|
"loss": 0.3889, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"learning_rate": 4.205740267121342e-06, |
|
"loss": 0.4061, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 8.13, |
|
"learning_rate": 4.1489059391872695e-06, |
|
"loss": 0.3893, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"learning_rate": 4.092071611253197e-06, |
|
"loss": 0.3829, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 8.18, |
|
"learning_rate": 4.035237283319125e-06, |
|
"loss": 0.3624, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 8.21, |
|
"learning_rate": 3.978402955385052e-06, |
|
"loss": 0.3947, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"learning_rate": 3.92156862745098e-06, |
|
"loss": 0.4006, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 8.26, |
|
"learning_rate": 3.864734299516908e-06, |
|
"loss": 0.3909, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 8.29, |
|
"learning_rate": 3.807899971582836e-06, |
|
"loss": 0.3744, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 8.31, |
|
"learning_rate": 3.751065643648764e-06, |
|
"loss": 0.4052, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 8.34, |
|
"learning_rate": 3.694231315714692e-06, |
|
"loss": 0.3801, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 8.36, |
|
"learning_rate": 3.63739698778062e-06, |
|
"loss": 0.3822, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 8.39, |
|
"learning_rate": 3.5805626598465474e-06, |
|
"loss": 0.3856, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 8.41, |
|
"learning_rate": 3.5237283319124753e-06, |
|
"loss": 0.3644, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 8.44, |
|
"learning_rate": 3.4668940039784033e-06, |
|
"loss": 0.3844, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 8.47, |
|
"learning_rate": 3.410059676044331e-06, |
|
"loss": 0.4009, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 8.49, |
|
"learning_rate": 3.353225348110259e-06, |
|
"loss": 0.3886, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 8.52, |
|
"learning_rate": 3.2963910201761866e-06, |
|
"loss": 0.3711, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 8.54, |
|
"learning_rate": 3.2395566922421145e-06, |
|
"loss": 0.3844, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 8.57, |
|
"learning_rate": 3.1827223643080424e-06, |
|
"loss": 0.3861, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"learning_rate": 3.1258880363739704e-06, |
|
"loss": 0.3874, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 8.62, |
|
"learning_rate": 3.069053708439898e-06, |
|
"loss": 0.3617, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 8.64, |
|
"learning_rate": 3.0122193805058258e-06, |
|
"loss": 0.37, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 8.67, |
|
"learning_rate": 2.9553850525717537e-06, |
|
"loss": 0.3907, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 8.7, |
|
"learning_rate": 2.8985507246376816e-06, |
|
"loss": 0.3958, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 8.72, |
|
"learning_rate": 2.8417163967036095e-06, |
|
"loss": 0.3868, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"learning_rate": 2.784882068769537e-06, |
|
"loss": 0.3755, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 8.77, |
|
"learning_rate": 2.728047740835465e-06, |
|
"loss": 0.371, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"learning_rate": 2.671213412901393e-06, |
|
"loss": 0.3793, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 8.82, |
|
"learning_rate": 2.6143790849673208e-06, |
|
"loss": 0.3588, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 8.85, |
|
"learning_rate": 2.5575447570332483e-06, |
|
"loss": 0.3868, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 8.87, |
|
"learning_rate": 2.500710429099176e-06, |
|
"loss": 0.3826, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 8.9, |
|
"learning_rate": 2.4438761011651037e-06, |
|
"loss": 0.3868, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"learning_rate": 2.3870417732310316e-06, |
|
"loss": 0.4038, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 8.95, |
|
"learning_rate": 2.3302074452969595e-06, |
|
"loss": 0.3527, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 8.98, |
|
"learning_rate": 2.2733731173628875e-06, |
|
"loss": 0.3766, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"learning_rate": 2.216538789428815e-06, |
|
"loss": 0.3853, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 9.03, |
|
"learning_rate": 2.159704461494743e-06, |
|
"loss": 0.36, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 9.05, |
|
"learning_rate": 2.102870133560671e-06, |
|
"loss": 0.3456, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 9.08, |
|
"learning_rate": 2.0460358056265987e-06, |
|
"loss": 0.3509, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 9.1, |
|
"learning_rate": 1.989201477692526e-06, |
|
"loss": 0.3565, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 9.13, |
|
"learning_rate": 1.932367149758454e-06, |
|
"loss": 0.3911, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 9.16, |
|
"learning_rate": 1.875532821824382e-06, |
|
"loss": 0.3853, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 9.18, |
|
"learning_rate": 1.81869849389031e-06, |
|
"loss": 0.3816, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 9.21, |
|
"learning_rate": 1.7618641659562377e-06, |
|
"loss": 0.3679, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 9.23, |
|
"learning_rate": 1.7050298380221656e-06, |
|
"loss": 0.365, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 9.26, |
|
"learning_rate": 1.6481955100880933e-06, |
|
"loss": 0.346, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 9.28, |
|
"learning_rate": 1.5913611821540212e-06, |
|
"loss": 0.3598, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 9.31, |
|
"learning_rate": 1.534526854219949e-06, |
|
"loss": 0.3428, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 9.33, |
|
"learning_rate": 1.4776925262858768e-06, |
|
"loss": 0.3959, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 9.36, |
|
"learning_rate": 1.4208581983518048e-06, |
|
"loss": 0.3707, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 9.39, |
|
"learning_rate": 1.3640238704177325e-06, |
|
"loss": 0.3541, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 9.41, |
|
"learning_rate": 1.3071895424836604e-06, |
|
"loss": 0.3691, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 9.44, |
|
"learning_rate": 1.250355214549588e-06, |
|
"loss": 0.3736, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 9.46, |
|
"learning_rate": 1.1935208866155158e-06, |
|
"loss": 0.3496, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 9.49, |
|
"learning_rate": 1.1366865586814437e-06, |
|
"loss": 0.3369, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 9.51, |
|
"learning_rate": 1.0798522307473714e-06, |
|
"loss": 0.3959, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 9.54, |
|
"learning_rate": 1.0230179028132994e-06, |
|
"loss": 0.3604, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 9.57, |
|
"learning_rate": 9.66183574879227e-07, |
|
"loss": 0.3569, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"learning_rate": 9.09349246945155e-07, |
|
"loss": 0.3648, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 9.62, |
|
"learning_rate": 8.525149190110828e-07, |
|
"loss": 0.354, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 9.64, |
|
"learning_rate": 7.956805910770106e-07, |
|
"loss": 0.3331, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 9.67, |
|
"learning_rate": 7.388462631429384e-07, |
|
"loss": 0.384, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 9.69, |
|
"learning_rate": 6.820119352088662e-07, |
|
"loss": 0.3496, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 9.72, |
|
"learning_rate": 6.25177607274794e-07, |
|
"loss": 0.3779, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 9.74, |
|
"learning_rate": 5.683432793407219e-07, |
|
"loss": 0.3726, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 9.77, |
|
"learning_rate": 5.115089514066497e-07, |
|
"loss": 0.3931, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 9.8, |
|
"learning_rate": 4.546746234725775e-07, |
|
"loss": 0.3714, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 9.82, |
|
"learning_rate": 3.978402955385053e-07, |
|
"loss": 0.3703, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 9.85, |
|
"learning_rate": 3.410059676044331e-07, |
|
"loss": 0.3559, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 9.87, |
|
"learning_rate": 2.8417163967036093e-07, |
|
"loss": 0.3656, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 9.9, |
|
"learning_rate": 2.2733731173628875e-07, |
|
"loss": 0.3534, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 9.92, |
|
"learning_rate": 1.7050298380221656e-07, |
|
"loss": 0.3769, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 9.95, |
|
"learning_rate": 1.1366865586814437e-07, |
|
"loss": 0.3442, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 9.97, |
|
"learning_rate": 5.6834327934072186e-08, |
|
"loss": 0.3558, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.3395, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"step": 3910, |
|
"total_flos": 1.0186138340339622e+17, |
|
"train_loss": 0.6484309974533823, |
|
"train_runtime": 10912.4775, |
|
"train_samples_per_second": 91.732, |
|
"train_steps_per_second": 0.358 |
|
} |
|
], |
|
"max_steps": 3910, |
|
"num_train_epochs": 10, |
|
"total_flos": 1.0186138340339622e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|