|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.7708119218910586, |
|
"eval_steps": 500, |
|
"global_step": 1500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9999979150098865e-05, |
|
"loss": 4.0418, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999989444743508e-05, |
|
"loss": 3.8361, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999974458911041e-05, |
|
"loss": 3.8737, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9999529575515425e-05, |
|
"loss": 3.806, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9999249407210515e-05, |
|
"loss": 3.4125, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9998904084925845e-05, |
|
"loss": 3.6489, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.999849360956141e-05, |
|
"loss": 3.4719, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.999811831976564e-05, |
|
"loss": 3.5265, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.999759057166764e-05, |
|
"loss": 3.6889, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9996997673913204e-05, |
|
"loss": 3.5104, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.999633962804756e-05, |
|
"loss": 3.5778, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.999561643578575e-05, |
|
"loss": 3.3353, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.999482809901257e-05, |
|
"loss": 3.3511, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9993974619782625e-05, |
|
"loss": 3.6355, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.999305600032027e-05, |
|
"loss": 3.3915, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.999207224301965e-05, |
|
"loss": 3.4926, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.999102335044467e-05, |
|
"loss": 3.4918, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9989909325328996e-05, |
|
"loss": 3.3984, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.998873017057605e-05, |
|
"loss": 3.4989, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.998748588925897e-05, |
|
"loss": 3.6368, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9986176484620665e-05, |
|
"loss": 3.4412, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.998480196007375e-05, |
|
"loss": 3.4708, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9983362319200554e-05, |
|
"loss": 3.37, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.998185756575313e-05, |
|
"loss": 3.4524, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9980287703653225e-05, |
|
"loss": 3.3863, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.997865273699227e-05, |
|
"loss": 3.4414, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.997729789123732e-05, |
|
"loss": 3.3335, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.997554574721849e-05, |
|
"loss": 3.231, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.997372851099727e-05, |
|
"loss": 3.5962, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.997184618730983e-05, |
|
"loss": 3.2795, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.996989878106192e-05, |
|
"loss": 3.5384, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.996788629732897e-05, |
|
"loss": 3.3274, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.996580874135597e-05, |
|
"loss": 3.2292, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.996366611855753e-05, |
|
"loss": 3.4829, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.996145843451785e-05, |
|
"loss": 3.1998, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.9959185694990655e-05, |
|
"loss": 3.3757, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.995684790589926e-05, |
|
"loss": 3.1001, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.99544450733365e-05, |
|
"loss": 3.2301, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.995197720356471e-05, |
|
"loss": 3.3187, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.994944430301576e-05, |
|
"loss": 3.527, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.994684637829098e-05, |
|
"loss": 3.3808, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.994418343616119e-05, |
|
"loss": 3.2406, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.994145548356664e-05, |
|
"loss": 3.3463, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.993866252761702e-05, |
|
"loss": 3.2972, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.993580457559145e-05, |
|
"loss": 3.4135, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.9932881634938424e-05, |
|
"loss": 3.4025, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.992989371327581e-05, |
|
"loss": 3.662, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.992684081839086e-05, |
|
"loss": 3.4279, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.9923722958240124e-05, |
|
"loss": 3.3075, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.992054014094951e-05, |
|
"loss": 3.2826, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.991729237481417e-05, |
|
"loss": 3.4992, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.991397966829856e-05, |
|
"loss": 3.2421, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.991060203003639e-05, |
|
"loss": 3.2615, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.9907159468830585e-05, |
|
"loss": 3.3299, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.990365199365327e-05, |
|
"loss": 3.3117, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.9900079613645754e-05, |
|
"loss": 3.4729, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.9896442338118524e-05, |
|
"loss": 3.2521, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.989274017655117e-05, |
|
"loss": 3.261, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.9888973138592414e-05, |
|
"loss": 3.4403, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.988514123406004e-05, |
|
"loss": 3.3408, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.9881244472940914e-05, |
|
"loss": 3.3208, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.9877282865390904e-05, |
|
"loss": 3.6889, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.9873256421734915e-05, |
|
"loss": 3.1537, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.986916515246681e-05, |
|
"loss": 3.1837, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.986500906824941e-05, |
|
"loss": 3.23, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.9860788179914456e-05, |
|
"loss": 3.4343, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.985650249846258e-05, |
|
"loss": 3.3317, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.9852152035063294e-05, |
|
"loss": 3.155, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.984773680105493e-05, |
|
"loss": 3.3527, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.984415798673684e-05, |
|
"loss": 3.3783, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.9839626194743575e-05, |
|
"loss": 3.2567, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.983502966478656e-05, |
|
"loss": 3.1278, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.9830368408845425e-05, |
|
"loss": 3.2344, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.982564243906852e-05, |
|
"loss": 3.3458, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.9820851767772844e-05, |
|
"loss": 3.1467, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.981599640744402e-05, |
|
"loss": 3.217, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.981107637073628e-05, |
|
"loss": 3.4011, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.98060916704724e-05, |
|
"loss": 3.4098, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.9801042319643686e-05, |
|
"loss": 3.3332, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.979592833140996e-05, |
|
"loss": 3.4042, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.979074971909948e-05, |
|
"loss": 3.3127, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.978550649620894e-05, |
|
"loss": 3.1726, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.978019867640342e-05, |
|
"loss": 3.3029, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.977482627351637e-05, |
|
"loss": 3.3519, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.976938930154954e-05, |
|
"loss": 3.2507, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.976388777467297e-05, |
|
"loss": 3.1322, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.9758321707224954e-05, |
|
"loss": 3.1306, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.975269111371197e-05, |
|
"loss": 3.189, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.974699600880869e-05, |
|
"loss": 3.2598, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.974123640735791e-05, |
|
"loss": 3.3483, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.973541232437052e-05, |
|
"loss": 3.4678, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.972952377502545e-05, |
|
"loss": 2.9668, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.9723570774669687e-05, |
|
"loss": 3.372, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.971755333881814e-05, |
|
"loss": 3.2808, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.971147148315367e-05, |
|
"loss": 3.2235, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.9705325223527057e-05, |
|
"loss": 3.0441, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.96991145759569e-05, |
|
"loss": 3.4724, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.969283955662963e-05, |
|
"loss": 3.389, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.968650018189943e-05, |
|
"loss": 3.3817, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.968009646828822e-05, |
|
"loss": 3.2597, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.967362843248559e-05, |
|
"loss": 3.253, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.966709609134878e-05, |
|
"loss": 3.1903, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.9660499461902645e-05, |
|
"loss": 3.1286, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.965383856133953e-05, |
|
"loss": 3.292, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.964711340701935e-05, |
|
"loss": 3.4217, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.964032401646944e-05, |
|
"loss": 3.2587, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.963347040738457e-05, |
|
"loss": 3.3325, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.962655259762687e-05, |
|
"loss": 3.3475, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.961957060522581e-05, |
|
"loss": 3.2882, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.961252444837809e-05, |
|
"loss": 3.3631, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.9605414145447696e-05, |
|
"loss": 3.2589, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.959823971496574e-05, |
|
"loss": 3.4855, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.959100117563052e-05, |
|
"loss": 3.1354, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.958369854630737e-05, |
|
"loss": 3.3037, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.957633184602867e-05, |
|
"loss": 3.1023, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.956890109399381e-05, |
|
"loss": 3.1933, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.956140630956908e-05, |
|
"loss": 3.3994, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.955384751228767e-05, |
|
"loss": 3.1178, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.954622472184961e-05, |
|
"loss": 3.2513, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.9538537958121686e-05, |
|
"loss": 3.5002, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.953078724113744e-05, |
|
"loss": 3.1867, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.9522972591097074e-05, |
|
"loss": 3.3883, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.9515094028367425e-05, |
|
"loss": 3.3708, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.950715157348191e-05, |
|
"loss": 3.2425, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.9499145247140434e-05, |
|
"loss": 3.1046, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.9491075070209385e-05, |
|
"loss": 3.3236, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.9482941063721576e-05, |
|
"loss": 3.4957, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.947474324887614e-05, |
|
"loss": 3.2225, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.9466481647038554e-05, |
|
"loss": 3.5314, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.9458156279740484e-05, |
|
"loss": 3.1028, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.944976716867984e-05, |
|
"loss": 3.2322, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.9441314335720616e-05, |
|
"loss": 3.1332, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.943279780289292e-05, |
|
"loss": 3.2028, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.9424217592392854e-05, |
|
"loss": 3.2408, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.9415573726582465e-05, |
|
"loss": 3.0121, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.940686622798974e-05, |
|
"loss": 3.2182, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.93980951193085e-05, |
|
"loss": 3.4794, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.93892604233983e-05, |
|
"loss": 3.3226, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.938036216328448e-05, |
|
"loss": 3.1917, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.9371400362158016e-05, |
|
"loss": 3.2176, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.936237504337549e-05, |
|
"loss": 2.933, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.935328623045902e-05, |
|
"loss": 3.134, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.934413394709621e-05, |
|
"loss": 2.983, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.933491821714009e-05, |
|
"loss": 3.4337, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.9325639064609045e-05, |
|
"loss": 3.4699, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.931629651368673e-05, |
|
"loss": 3.4081, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.930689058872208e-05, |
|
"loss": 3.2174, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.929742131422913e-05, |
|
"loss": 3.4953, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.92878887148871e-05, |
|
"loss": 3.217, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.927829281554017e-05, |
|
"loss": 3.2689, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.926863364119756e-05, |
|
"loss": 3.2994, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.9258911217033355e-05, |
|
"loss": 3.3868, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.924912556838651e-05, |
|
"loss": 3.3169, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.923927672076075e-05, |
|
"loss": 3.0466, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.92293646998245e-05, |
|
"loss": 3.0857, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.921938953141084e-05, |
|
"loss": 3.5001, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.920935124151744e-05, |
|
"loss": 3.1633, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.919924985630647e-05, |
|
"loss": 3.4816, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.9189085402104516e-05, |
|
"loss": 3.2032, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.917885790540258e-05, |
|
"loss": 3.164, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.916856739285593e-05, |
|
"loss": 3.3047, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.91582138912841e-05, |
|
"loss": 3.2366, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.9147797427670766e-05, |
|
"loss": 3.3105, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.91373180291637e-05, |
|
"loss": 3.4871, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.912677572307473e-05, |
|
"loss": 3.0442, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.911617053687957e-05, |
|
"loss": 3.1934, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.910550249821787e-05, |
|
"loss": 3.27, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.9094771634893075e-05, |
|
"loss": 3.3252, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.908397797487237e-05, |
|
"loss": 2.9852, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.907312154628658e-05, |
|
"loss": 3.1331, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.906220237743014e-05, |
|
"loss": 3.1444, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.905122049676101e-05, |
|
"loss": 3.3702, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.904017593290056e-05, |
|
"loss": 3.0531, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.902906871463355e-05, |
|
"loss": 3.2007, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.901789887090804e-05, |
|
"loss": 3.0837, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.900666643083528e-05, |
|
"loss": 3.2268, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.8995371423689684e-05, |
|
"loss": 2.9249, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.898401387890873e-05, |
|
"loss": 3.2058, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.897259382609285e-05, |
|
"loss": 3.0971, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.8961111295005446e-05, |
|
"loss": 3.2227, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.894956631557269e-05, |
|
"loss": 3.2907, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.893795891788357e-05, |
|
"loss": 3.1382, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.892628913218969e-05, |
|
"loss": 2.8996, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.8914556988905315e-05, |
|
"loss": 2.9693, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.890276251860717e-05, |
|
"loss": 3.0134, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.889090575203445e-05, |
|
"loss": 2.9696, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.887898672008871e-05, |
|
"loss": 3.3306, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.8867005453833774e-05, |
|
"loss": 3.2199, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.885496198449567e-05, |
|
"loss": 3.2795, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.8842856343462535e-05, |
|
"loss": 3.3482, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.883068856228454e-05, |
|
"loss": 3.322, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.881845867267382e-05, |
|
"loss": 3.0443, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.8806166706504375e-05, |
|
"loss": 3.1826, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.879381269581197e-05, |
|
"loss": 3.3732, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.878139667279411e-05, |
|
"loss": 3.3457, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.876891866980988e-05, |
|
"loss": 3.182, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.875637871937994e-05, |
|
"loss": 3.2416, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.874377685418635e-05, |
|
"loss": 3.2411, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.873111310707259e-05, |
|
"loss": 3.2836, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.8718387511043385e-05, |
|
"loss": 3.1773, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.870560009926465e-05, |
|
"loss": 3.1723, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.8692750905063424e-05, |
|
"loss": 3.1535, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.867983996192776e-05, |
|
"loss": 2.977, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.866686730350663e-05, |
|
"loss": 3.1236, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.865383296360987e-05, |
|
"loss": 3.4016, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.864073697620807e-05, |
|
"loss": 3.1547, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.862757937543248e-05, |
|
"loss": 3.143, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.861436019557492e-05, |
|
"loss": 3.1989, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.860107947108772e-05, |
|
"loss": 3.0308, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.858773723658361e-05, |
|
"loss": 3.409, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.8574333526835605e-05, |
|
"loss": 3.2011, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.856086837677697e-05, |
|
"loss": 3.1954, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.854734182150107e-05, |
|
"loss": 3.1366, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.853375389626133e-05, |
|
"loss": 3.3789, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.85201046364711e-05, |
|
"loss": 3.1337, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.85063940777036e-05, |
|
"loss": 3.2278, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.849262225569181e-05, |
|
"loss": 3.1339, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.847878920632834e-05, |
|
"loss": 2.954, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.846489496566542e-05, |
|
"loss": 3.0878, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.845093956991475e-05, |
|
"loss": 3.2329, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.843692305544738e-05, |
|
"loss": 3.2245, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.84228454587937e-05, |
|
"loss": 3.2865, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.8408706816643254e-05, |
|
"loss": 3.1569, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.83945071658447e-05, |
|
"loss": 3.2279, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.838024654340571e-05, |
|
"loss": 3.0554, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.836592498649283e-05, |
|
"loss": 3.3924, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.835154253243147e-05, |
|
"loss": 3.0695, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.8337099218705695e-05, |
|
"loss": 3.2666, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.832259508295822e-05, |
|
"loss": 3.135, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.830803016299027e-05, |
|
"loss": 2.947, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.829340449676147e-05, |
|
"loss": 3.13, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.82787181223898e-05, |
|
"loss": 3.038, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.826397107815142e-05, |
|
"loss": 3.0761, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.824916340248064e-05, |
|
"loss": 3.1496, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.823429513396977e-05, |
|
"loss": 2.8758, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.821936631136906e-05, |
|
"loss": 3.218, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.820437697358656e-05, |
|
"loss": 3.0933, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.8189327159688035e-05, |
|
"loss": 3.2388, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.817421690889689e-05, |
|
"loss": 3.1914, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.815904626059401e-05, |
|
"loss": 3.2259, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.814381525431771e-05, |
|
"loss": 3.1867, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.8128523929763626e-05, |
|
"loss": 3.1134, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.811317232678456e-05, |
|
"loss": 3.3846, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.8097760485390444e-05, |
|
"loss": 3.2507, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.8082288445748194e-05, |
|
"loss": 3.0992, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.806675624818162e-05, |
|
"loss": 3.0715, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.8051163933171316e-05, |
|
"loss": 3.2028, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.8035511541354547e-05, |
|
"loss": 3.2714, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.801979911352519e-05, |
|
"loss": 3.1238, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.800402669063353e-05, |
|
"loss": 3.2508, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.7988194313786275e-05, |
|
"loss": 3.2359, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.797230202424633e-05, |
|
"loss": 2.8538, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.7956349863432794e-05, |
|
"loss": 3.1991, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.794033787292078e-05, |
|
"loss": 3.0467, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.7924266094441346e-05, |
|
"loss": 3.1088, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.7908134569881344e-05, |
|
"loss": 3.128, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.789194334128338e-05, |
|
"loss": 3.3195, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.7875692450845624e-05, |
|
"loss": 3.2049, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.785938194092176e-05, |
|
"loss": 3.1477, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.7843011854020846e-05, |
|
"loss": 3.2506, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.7826582232807213e-05, |
|
"loss": 3.1822, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.7810093120100374e-05, |
|
"loss": 2.9576, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.779354455887485e-05, |
|
"loss": 3.2086, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.777693659226013e-05, |
|
"loss": 3.2935, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.776026926354052e-05, |
|
"loss": 2.9873, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.774354261615503e-05, |
|
"loss": 3.2671, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.772675669369728e-05, |
|
"loss": 3.0882, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.770991153991537e-05, |
|
"loss": 3.2727, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.769300719871177e-05, |
|
"loss": 2.9907, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.7676043714143195e-05, |
|
"loss": 3.1303, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.765902113042053e-05, |
|
"loss": 3.1412, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.7641939491908674e-05, |
|
"loss": 3.3585, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.7624798843126404e-05, |
|
"loss": 3.1958, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.7607599228746355e-05, |
|
"loss": 3.1581, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.759034069359478e-05, |
|
"loss": 3.224, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.7573023282651536e-05, |
|
"loss": 3.1031, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.75556470410499e-05, |
|
"loss": 3.0914, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.753821201407648e-05, |
|
"loss": 3.2474, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.75207182471711e-05, |
|
"loss": 3.2045, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.7503165785926676e-05, |
|
"loss": 2.9288, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.748555467608909e-05, |
|
"loss": 3.3084, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.7467884963557064e-05, |
|
"loss": 3.1761, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.745015669438207e-05, |
|
"loss": 3.0644, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.7432369914768185e-05, |
|
"loss": 3.0874, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.7414524671071994e-05, |
|
"loss": 3.1106, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.739662100980242e-05, |
|
"loss": 2.8364, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.737865897762066e-05, |
|
"loss": 3.0884, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.736063862134004e-05, |
|
"loss": 3.1134, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.734255998792589e-05, |
|
"loss": 3.3668, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.732442312449541e-05, |
|
"loss": 2.9823, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.730622807831758e-05, |
|
"loss": 3.0893, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.728797489681301e-05, |
|
"loss": 3.1091, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.726966362755383e-05, |
|
"loss": 2.9348, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.725129431826355e-05, |
|
"loss": 3.2506, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.723286701681695e-05, |
|
"loss": 2.9876, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.7214381771239946e-05, |
|
"loss": 2.9158, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.719583862970949e-05, |
|
"loss": 3.2035, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.7177237640553414e-05, |
|
"loss": 2.9031, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.7158578852250304e-05, |
|
"loss": 3.2779, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.7139862313429385e-05, |
|
"loss": 3.1219, |
|
"step": 1500 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 9730, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 300, |
|
"total_flos": 1.7702871849566208e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|