|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.6457290244272493, |
|
"global_step": 1016, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.4193548387096776e-06, |
|
"loss": 3.4647, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.838709677419355e-06, |
|
"loss": 2.4108, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 7.258064516129032e-06, |
|
"loss": 1.5747, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.67741935483871e-06, |
|
"loss": 1.5005, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.2096774193548387e-05, |
|
"loss": 1.4694, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.4516129032258065e-05, |
|
"loss": 1.4399, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.6935483870967744e-05, |
|
"loss": 1.4215, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.935483870967742e-05, |
|
"loss": 1.4526, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.1774193548387097e-05, |
|
"loss": 1.4458, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.4193548387096773e-05, |
|
"loss": 1.4237, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.6612903225806453e-05, |
|
"loss": 1.4339, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 2.903225806451613e-05, |
|
"loss": 1.4294, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 2.999951499529191e-05, |
|
"loss": 1.4968, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 2.9996551191211948e-05, |
|
"loss": 1.4261, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.9990893561853812e-05, |
|
"loss": 1.4371, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2.9982543123495507e-05, |
|
"loss": 1.4412, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 2.9971501376123366e-05, |
|
"loss": 1.4638, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 2.9957770303162634e-05, |
|
"loss": 1.4498, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 2.9941352371121173e-05, |
|
"loss": 1.4393, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 2.992225052914641e-05, |
|
"loss": 1.4291, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 2.990046820849558e-05, |
|
"loss": 1.4587, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 2.9876009321919372e-05, |
|
"loss": 1.4272, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.9848878262959076e-05, |
|
"loss": 1.4622, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.981907990515739e-05, |
|
"loss": 1.4863, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.9786619601182965e-05, |
|
"loss": 1.4274, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.975150318186892e-05, |
|
"loss": 1.4382, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.9713736955165456e-05, |
|
"loss": 1.4413, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.9673327705006745e-05, |
|
"loss": 1.431, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.963028269009235e-05, |
|
"loss": 1.4659, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.9584609642583337e-05, |
|
"loss": 1.4426, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.9536316766713357e-05, |
|
"loss": 1.4733, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.9485412737314923e-05, |
|
"loss": 1.4595, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.9431906698261136e-05, |
|
"loss": 1.4845, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 2.9375808260823192e-05, |
|
"loss": 1.5219, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 2.931712750194392e-05, |
|
"loss": 1.532, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.9255874962427638e-05, |
|
"loss": 1.4632, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.9192061645046724e-05, |
|
"loss": 1.5057, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.9125699012565204e-05, |
|
"loss": 1.5023, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.9056798985679688e-05, |
|
"loss": 1.4423, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.8985373940878053e-05, |
|
"loss": 1.4968, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.8911436708216276e-05, |
|
"loss": 1.4767, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.883500056901376e-05, |
|
"loss": 1.4968, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.875607925346762e-05, |
|
"loss": 1.4842, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.867468693818634e-05, |
|
"loss": 1.4921, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.859083824364323e-05, |
|
"loss": 1.4969, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.8504548231550143e-05, |
|
"loss": 1.4423, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.8415832402151956e-05, |
|
"loss": 1.5165, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.832470669144227e-05, |
|
"loss": 1.4715, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.8231187468300836e-05, |
|
"loss": 1.4747, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.8135291531553192e-05, |
|
"loss": 1.466, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.8037036106953134e-05, |
|
"loss": 1.4609, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.793643884408843e-05, |
|
"loss": 1.4649, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.7833517813210437e-05, |
|
"loss": 1.4923, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.7728291501988173e-05, |
|
"loss": 1.4968, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.7620778812187338e-05, |
|
"loss": 1.4545, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.7510999056275038e-05, |
|
"loss": 1.4791, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.739897195395067e-05, |
|
"loss": 1.4849, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.728471762860369e-05, |
|
"loss": 1.4492, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.716825660369885e-05, |
|
"loss": 1.5052, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.704960979908957e-05, |
|
"loss": 1.4701, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.6928798527260127e-05, |
|
"loss": 1.48, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.680584448949729e-05, |
|
"loss": 1.5158, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.6680769771992136e-05, |
|
"loss": 1.5047, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.6553596841872682e-05, |
|
"loss": 1.5023, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.6424348543168177e-05, |
|
"loss": 1.4932, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.6293048092705586e-05, |
|
"loss": 1.5241, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.6159719075939196e-05, |
|
"loss": 1.4969, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.602438544271395e-05, |
|
"loss": 1.4983, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.5887071502963338e-05, |
|
"loss": 1.4801, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.574780192234264e-05, |
|
"loss": 1.4595, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.5606601717798212e-05, |
|
"loss": 1.4901, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.5463496253073726e-05, |
|
"loss": 1.4939, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.531851123415406e-05, |
|
"loss": 1.4791, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 2.5171672704647785e-05, |
|
"loss": 1.448, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 2.502300704110891e-05, |
|
"loss": 1.4857, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 2.4872540948298913e-05, |
|
"loss": 1.4829, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 2.472030145438974e-05, |
|
"loss": 1.4786, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 2.4566315906108772e-05, |
|
"loss": 1.4118, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 2.4410611963826522e-05, |
|
"loss": 1.429, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 2.4253217596588036e-05, |
|
"loss": 1.4719, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 2.4094161077088784e-05, |
|
"loss": 1.5054, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 2.3933470976596088e-05, |
|
"loss": 1.4493, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 2.3771176159816846e-05, |
|
"loss": 1.4957, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.360730577971259e-05, |
|
"loss": 1.4764, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.3441889272262742e-05, |
|
"loss": 1.5056, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.3274956351177037e-05, |
|
"loss": 1.4732, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.3106537002558074e-05, |
|
"loss": 1.5047, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.293666147951491e-05, |
|
"loss": 1.5098, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.2765360296728697e-05, |
|
"loss": 1.504, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.259266422497137e-05, |
|
"loss": 1.4742, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 2.2418604285578273e-05, |
|
"loss": 1.482, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 2.2243211744875818e-05, |
|
"loss": 1.4702, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 2.2066518108565137e-05, |
|
"loss": 1.4839, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.18885551160627e-05, |
|
"loss": 1.4942, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.1709354734798998e-05, |
|
"loss": 1.4497, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.152894915447624e-05, |
|
"loss": 1.4927, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.134737078128611e-05, |
|
"loss": 1.4794, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.1164652232088674e-05, |
|
"loss": 1.5034, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.0980826328553416e-05, |
|
"loss": 1.5157, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.0795926091263504e-05, |
|
"loss": 1.4681, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.0609984733784287e-05, |
|
"loss": 1.4732, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.042303565669719e-05, |
|
"loss": 1.5047, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.0235112441599948e-05, |
|
"loss": 1.5093, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.0046248845074373e-05, |
|
"loss": 1.4515, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.9856478792622666e-05, |
|
"loss": 1.5051, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.9665836372573397e-05, |
|
"loss": 1.5073, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.947435582995821e-05, |
|
"loss": 1.4952, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.928207156036043e-05, |
|
"loss": 1.4308, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.9089018103736568e-05, |
|
"loss": 1.4588, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.8895230138211942e-05, |
|
"loss": 1.5477, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.870074247385144e-05, |
|
"loss": 1.4979, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.8505590046406615e-05, |
|
"loss": 1.4487, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.8309807911040186e-05, |
|
"loss": 1.4671, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.8113431236029078e-05, |
|
"loss": 1.4486, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.7916495296447162e-05, |
|
"loss": 1.483, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.771903546782883e-05, |
|
"loss": 1.4896, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.7521087219814454e-05, |
|
"loss": 1.5259, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.7322686109779032e-05, |
|
"loss": 1.4845, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.7123867776445e-05, |
|
"loss": 1.4866, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.692466793348047e-05, |
|
"loss": 1.4968, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.6725122363084004e-05, |
|
"loss": 1.4582, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.6525266909557046e-05, |
|
"loss": 1.4605, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.6325137472865262e-05, |
|
"loss": 1.4391, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 3.6259799003601074, |
|
"eval_runtime": 967.3528, |
|
"eval_samples_per_second": 8.998, |
|
"eval_steps_per_second": 2.249, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.6124770002189804e-05, |
|
"loss": 1.5992, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.5924200489469782e-05, |
|
"loss": 1.1688, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.572346496293706e-05, |
|
"loss": 1.1778, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.5522599480644496e-05, |
|
"loss": 1.1652, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.532164012398886e-05, |
|
"loss": 1.1344, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5120622991229545e-05, |
|
"loss": 1.1474, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.4919584191004244e-05, |
|
"loss": 1.1457, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.471855983584276e-05, |
|
"loss": 1.1441, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.4517586035680145e-05, |
|
"loss": 1.1546, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.431669889137027e-05, |
|
"loss": 1.1526, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.4115934488201047e-05, |
|
"loss": 1.1778, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.3915328889412434e-05, |
|
"loss": 1.1468, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.3714918129718418e-05, |
|
"loss": 1.1367, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.3514738208834112e-05, |
|
"loss": 1.1972, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.331482508500912e-05, |
|
"loss": 1.1701, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.31152146685684e-05, |
|
"loss": 1.1911, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.2915942815461677e-05, |
|
"loss": 1.1758, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.2717045320822658e-05, |
|
"loss": 1.1486, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.2518557912539185e-05, |
|
"loss": 1.1502, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.232051624483541e-05, |
|
"loss": 1.1459, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.2122955891867278e-05, |
|
"loss": 1.1546, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.1925912341332324e-05, |
|
"loss": 1.165, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.1729420988095042e-05, |
|
"loss": 1.1548, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.1533517127828926e-05, |
|
"loss": 1.1454, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.1338235950676305e-05, |
|
"loss": 1.19, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.1143612534927153e-05, |
|
"loss": 1.1475, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.0949681840717997e-05, |
|
"loss": 1.1754, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.0756478703752036e-05, |
|
"loss": 1.1041, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.0564037829041609e-05, |
|
"loss": 1.1465, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.037239378467416e-05, |
|
"loss": 1.1704, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.0181580995602766e-05, |
|
"loss": 1.1716, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 9.991633737462405e-06, |
|
"loss": 1.1902, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 9.802586130413045e-06, |
|
"loss": 1.1342, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 9.614472133010623e-06, |
|
"loss": 1.1848, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 9.42732553610712e-06, |
|
"loss": 1.1652, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 9.241179956780689e-06, |
|
"loss": 1.1832, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 9.056068832297041e-06, |
|
"loss": 1.1935, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 8.872025414103135e-06, |
|
"loss": 1.1714, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 8.689082761854213e-06, |
|
"loss": 1.1525, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 8.507273737475307e-06, |
|
"loss": 1.1701, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 8.326630999258286e-06, |
|
"loss": 1.1761, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 8.14718699599542e-06, |
|
"loss": 1.1323, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 7.968973961150653e-06, |
|
"loss": 1.1768, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 7.792023907069486e-06, |
|
"loss": 1.1487, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 7.616368619228645e-06, |
|
"loss": 1.1445, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 7.442039650526419e-06, |
|
"loss": 1.1348, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 7.2690683156148705e-06, |
|
"loss": 1.1478, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 7.097485685274776e-06, |
|
"loss": 1.1625, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 6.927322580834376e-06, |
|
"loss": 1.1331, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 6.758609568632982e-06, |
|
"loss": 1.1608, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 6.591376954530345e-06, |
|
"loss": 1.1349, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 6.4256547784628e-06, |
|
"loss": 1.1569, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 6.261472809047244e-06, |
|
"loss": 1.152, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 6.098860538233769e-06, |
|
"loss": 1.1498, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 5.937847176008072e-06, |
|
"loss": 1.1802, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 5.778461645144438e-06, |
|
"loss": 1.1717, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 5.6207325760103845e-06, |
|
"loss": 1.152, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 5.464688301423782e-06, |
|
"loss": 1.1414, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 5.310356851563427e-06, |
|
"loss": 1.154, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 5.1577659489340255e-06, |
|
"loss": 1.1353, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 5.00694300338638e-06, |
|
"loss": 1.1783, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 4.857915107193783e-06, |
|
"loss": 1.14, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 4.710709030185422e-06, |
|
"loss": 1.1755, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 4.565351214937748e-06, |
|
"loss": 1.1121, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.421867772024601e-06, |
|
"loss": 1.171, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.280284475326948e-06, |
|
"loss": 1.1517, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.140626757403176e-06, |
|
"loss": 1.1694, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.002919704920607e-06, |
|
"loss": 1.1465, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 3.8671880541492236e-06, |
|
"loss": 1.179, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 3.7334561865182694e-06, |
|
"loss": 1.1498, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 3.6017481242366503e-06, |
|
"loss": 1.1438, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 3.472087525977823e-06, |
|
"loss": 1.1647, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 3.3444976826299754e-06, |
|
"loss": 1.1475, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 3.219001513112329e-06, |
|
"loss": 1.15, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 3.0956215602581933e-06, |
|
"loss": 1.1613, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.974379986765622e-06, |
|
"loss": 1.1672, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.855298571216316e-06, |
|
"loss": 1.1702, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.738398704163561e-06, |
|
"loss": 1.1634, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.6237013842898533e-06, |
|
"loss": 1.1756, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.511227214634887e-06, |
|
"loss": 1.1075, |
|
"step": 1015 |
|
} |
|
], |
|
"max_steps": 1234, |
|
"num_train_epochs": 2, |
|
"total_flos": 4.827958566725878e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|