|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.620760534429599, |
|
"eval_steps": 500, |
|
"global_step": 5100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9999979150098865e-05, |
|
"loss": 4.0418, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999989444743508e-05, |
|
"loss": 3.8361, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999974458911041e-05, |
|
"loss": 3.8737, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9999529575515425e-05, |
|
"loss": 3.806, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9999249407210515e-05, |
|
"loss": 3.4125, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9998904084925845e-05, |
|
"loss": 3.6489, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.999849360956141e-05, |
|
"loss": 3.4719, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.999811831976564e-05, |
|
"loss": 3.5265, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.999759057166764e-05, |
|
"loss": 3.6889, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9996997673913204e-05, |
|
"loss": 3.5104, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.999633962804756e-05, |
|
"loss": 3.5778, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.999561643578575e-05, |
|
"loss": 3.3353, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.999482809901257e-05, |
|
"loss": 3.3511, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9993974619782625e-05, |
|
"loss": 3.6355, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.999305600032027e-05, |
|
"loss": 3.3915, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.999207224301965e-05, |
|
"loss": 3.4926, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.999102335044467e-05, |
|
"loss": 3.4918, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9989909325328996e-05, |
|
"loss": 3.3984, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.998873017057605e-05, |
|
"loss": 3.4989, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.998748588925897e-05, |
|
"loss": 3.6368, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9986176484620665e-05, |
|
"loss": 3.4412, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.998480196007375e-05, |
|
"loss": 3.4708, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9983362319200554e-05, |
|
"loss": 3.37, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.998185756575313e-05, |
|
"loss": 3.4524, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9980287703653225e-05, |
|
"loss": 3.3863, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.997865273699227e-05, |
|
"loss": 3.4414, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.997729789123732e-05, |
|
"loss": 3.3335, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.997554574721849e-05, |
|
"loss": 3.231, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.997372851099727e-05, |
|
"loss": 3.5962, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.997184618730983e-05, |
|
"loss": 3.2795, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.996989878106192e-05, |
|
"loss": 3.5384, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.996788629732897e-05, |
|
"loss": 3.3274, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.996580874135597e-05, |
|
"loss": 3.2292, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.996366611855753e-05, |
|
"loss": 3.4829, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.996145843451785e-05, |
|
"loss": 3.1998, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.9959185694990655e-05, |
|
"loss": 3.3757, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.995684790589926e-05, |
|
"loss": 3.1001, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.99544450733365e-05, |
|
"loss": 3.2301, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.995197720356471e-05, |
|
"loss": 3.3187, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.994944430301576e-05, |
|
"loss": 3.527, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.994684637829098e-05, |
|
"loss": 3.3808, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.994418343616119e-05, |
|
"loss": 3.2406, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.994145548356664e-05, |
|
"loss": 3.3463, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.993866252761702e-05, |
|
"loss": 3.2972, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.993580457559145e-05, |
|
"loss": 3.4135, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.9932881634938424e-05, |
|
"loss": 3.4025, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.992989371327581e-05, |
|
"loss": 3.662, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.992684081839086e-05, |
|
"loss": 3.4279, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.9923722958240124e-05, |
|
"loss": 3.3075, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.992054014094951e-05, |
|
"loss": 3.2826, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.991729237481417e-05, |
|
"loss": 3.4992, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.991397966829856e-05, |
|
"loss": 3.2421, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.991060203003639e-05, |
|
"loss": 3.2615, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.9907159468830585e-05, |
|
"loss": 3.3299, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.990365199365327e-05, |
|
"loss": 3.3117, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.9900079613645754e-05, |
|
"loss": 3.4729, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.9896442338118524e-05, |
|
"loss": 3.2521, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.989274017655117e-05, |
|
"loss": 3.261, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.9888973138592414e-05, |
|
"loss": 3.4403, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.988514123406004e-05, |
|
"loss": 3.3408, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.9881244472940914e-05, |
|
"loss": 3.3208, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.9877282865390904e-05, |
|
"loss": 3.6889, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.9873256421734915e-05, |
|
"loss": 3.1537, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.986916515246681e-05, |
|
"loss": 3.1837, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.986500906824941e-05, |
|
"loss": 3.23, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.9860788179914456e-05, |
|
"loss": 3.4343, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.985650249846258e-05, |
|
"loss": 3.3317, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.9852152035063294e-05, |
|
"loss": 3.155, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.984773680105493e-05, |
|
"loss": 3.3527, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.984415798673684e-05, |
|
"loss": 3.3783, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.9839626194743575e-05, |
|
"loss": 3.2567, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.983502966478656e-05, |
|
"loss": 3.1278, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.9830368408845425e-05, |
|
"loss": 3.2344, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.982564243906852e-05, |
|
"loss": 3.3458, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.9820851767772844e-05, |
|
"loss": 3.1467, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.981599640744402e-05, |
|
"loss": 3.217, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.981107637073628e-05, |
|
"loss": 3.4011, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.98060916704724e-05, |
|
"loss": 3.4098, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.9801042319643686e-05, |
|
"loss": 3.3332, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.979592833140996e-05, |
|
"loss": 3.4042, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.979074971909948e-05, |
|
"loss": 3.3127, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.978550649620894e-05, |
|
"loss": 3.1726, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.978019867640342e-05, |
|
"loss": 3.3029, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.977482627351637e-05, |
|
"loss": 3.3519, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.976938930154954e-05, |
|
"loss": 3.2507, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.976388777467297e-05, |
|
"loss": 3.1322, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.9758321707224954e-05, |
|
"loss": 3.1306, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.975269111371197e-05, |
|
"loss": 3.189, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.974699600880869e-05, |
|
"loss": 3.2598, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.974123640735791e-05, |
|
"loss": 3.3483, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.973541232437052e-05, |
|
"loss": 3.4678, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.972952377502545e-05, |
|
"loss": 2.9668, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.9723570774669687e-05, |
|
"loss": 3.372, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.971755333881814e-05, |
|
"loss": 3.2808, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.971147148315367e-05, |
|
"loss": 3.2235, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.9705325223527057e-05, |
|
"loss": 3.0441, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.96991145759569e-05, |
|
"loss": 3.4724, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.969283955662963e-05, |
|
"loss": 3.389, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.968650018189943e-05, |
|
"loss": 3.3817, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.968009646828822e-05, |
|
"loss": 3.2597, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.967362843248559e-05, |
|
"loss": 3.253, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.966709609134878e-05, |
|
"loss": 3.1903, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.9660499461902645e-05, |
|
"loss": 3.1286, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.965383856133953e-05, |
|
"loss": 3.292, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.964711340701935e-05, |
|
"loss": 3.4217, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.964032401646944e-05, |
|
"loss": 3.2587, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.963347040738457e-05, |
|
"loss": 3.3325, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.962655259762687e-05, |
|
"loss": 3.3475, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.961957060522581e-05, |
|
"loss": 3.2882, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.961252444837809e-05, |
|
"loss": 3.3631, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.9605414145447696e-05, |
|
"loss": 3.2589, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.959823971496574e-05, |
|
"loss": 3.4855, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.959100117563052e-05, |
|
"loss": 3.1354, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.958369854630737e-05, |
|
"loss": 3.3037, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.957633184602867e-05, |
|
"loss": 3.1023, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.956890109399381e-05, |
|
"loss": 3.1933, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.956140630956908e-05, |
|
"loss": 3.3994, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.955384751228767e-05, |
|
"loss": 3.1178, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.954622472184961e-05, |
|
"loss": 3.2513, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.9538537958121686e-05, |
|
"loss": 3.5002, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.953078724113744e-05, |
|
"loss": 3.1867, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.9522972591097074e-05, |
|
"loss": 3.3883, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.9515094028367425e-05, |
|
"loss": 3.3708, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.950715157348191e-05, |
|
"loss": 3.2425, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.9499145247140434e-05, |
|
"loss": 3.1046, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.9491075070209385e-05, |
|
"loss": 3.3236, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.9482941063721576e-05, |
|
"loss": 3.4957, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.947474324887614e-05, |
|
"loss": 3.2225, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.9466481647038554e-05, |
|
"loss": 3.5314, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.9458156279740484e-05, |
|
"loss": 3.1028, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.944976716867984e-05, |
|
"loss": 3.2322, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.9441314335720616e-05, |
|
"loss": 3.1332, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.943279780289292e-05, |
|
"loss": 3.2028, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.9424217592392854e-05, |
|
"loss": 3.2408, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.9415573726582465e-05, |
|
"loss": 3.0121, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.940686622798974e-05, |
|
"loss": 3.2182, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.93980951193085e-05, |
|
"loss": 3.4794, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.93892604233983e-05, |
|
"loss": 3.3226, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.938036216328448e-05, |
|
"loss": 3.1917, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.9371400362158016e-05, |
|
"loss": 3.2176, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.936237504337549e-05, |
|
"loss": 2.933, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.935328623045902e-05, |
|
"loss": 3.134, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.934413394709621e-05, |
|
"loss": 2.983, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.933491821714009e-05, |
|
"loss": 3.4337, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.9325639064609045e-05, |
|
"loss": 3.4699, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.931629651368673e-05, |
|
"loss": 3.4081, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.930689058872208e-05, |
|
"loss": 3.2174, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.929742131422913e-05, |
|
"loss": 3.4953, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.92878887148871e-05, |
|
"loss": 3.217, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.927829281554017e-05, |
|
"loss": 3.2689, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.926863364119756e-05, |
|
"loss": 3.2994, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.9258911217033355e-05, |
|
"loss": 3.3868, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.924912556838651e-05, |
|
"loss": 3.3169, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.923927672076075e-05, |
|
"loss": 3.0466, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.92293646998245e-05, |
|
"loss": 3.0857, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.921938953141084e-05, |
|
"loss": 3.5001, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.920935124151744e-05, |
|
"loss": 3.1633, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.919924985630647e-05, |
|
"loss": 3.4816, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.9189085402104516e-05, |
|
"loss": 3.2032, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.917885790540258e-05, |
|
"loss": 3.164, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.916856739285593e-05, |
|
"loss": 3.3047, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.91582138912841e-05, |
|
"loss": 3.2366, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.9147797427670766e-05, |
|
"loss": 3.3105, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.91373180291637e-05, |
|
"loss": 3.4871, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.912677572307473e-05, |
|
"loss": 3.0442, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.911617053687957e-05, |
|
"loss": 3.1934, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.910550249821787e-05, |
|
"loss": 3.27, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.9094771634893075e-05, |
|
"loss": 3.3252, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.908397797487237e-05, |
|
"loss": 2.9852, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.907312154628658e-05, |
|
"loss": 3.1331, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.906220237743014e-05, |
|
"loss": 3.1444, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.905122049676101e-05, |
|
"loss": 3.3702, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.904017593290056e-05, |
|
"loss": 3.0531, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.902906871463355e-05, |
|
"loss": 3.2007, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.901789887090804e-05, |
|
"loss": 3.0837, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.900666643083528e-05, |
|
"loss": 3.2268, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.8995371423689684e-05, |
|
"loss": 2.9249, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.898401387890873e-05, |
|
"loss": 3.2058, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.897259382609285e-05, |
|
"loss": 3.0971, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.8961111295005446e-05, |
|
"loss": 3.2227, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.894956631557269e-05, |
|
"loss": 3.2907, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.893795891788357e-05, |
|
"loss": 3.1382, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.892628913218969e-05, |
|
"loss": 2.8996, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.8914556988905315e-05, |
|
"loss": 2.9693, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.890276251860717e-05, |
|
"loss": 3.0134, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.889090575203445e-05, |
|
"loss": 2.9696, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.887898672008871e-05, |
|
"loss": 3.3306, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.8867005453833774e-05, |
|
"loss": 3.2199, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.885496198449567e-05, |
|
"loss": 3.2795, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.8842856343462535e-05, |
|
"loss": 3.3482, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.883068856228454e-05, |
|
"loss": 3.322, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.881845867267382e-05, |
|
"loss": 3.0443, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.8806166706504375e-05, |
|
"loss": 3.1826, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.879381269581197e-05, |
|
"loss": 3.3732, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.878139667279411e-05, |
|
"loss": 3.3457, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.876891866980988e-05, |
|
"loss": 3.182, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.875637871937994e-05, |
|
"loss": 3.2416, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.874377685418635e-05, |
|
"loss": 3.2411, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.873111310707259e-05, |
|
"loss": 3.2836, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.8718387511043385e-05, |
|
"loss": 3.1773, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.870560009926465e-05, |
|
"loss": 3.1723, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.8692750905063424e-05, |
|
"loss": 3.1535, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.867983996192776e-05, |
|
"loss": 2.977, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.866686730350663e-05, |
|
"loss": 3.1236, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.865383296360987e-05, |
|
"loss": 3.4016, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.864073697620807e-05, |
|
"loss": 3.1547, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.862757937543248e-05, |
|
"loss": 3.143, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.861436019557492e-05, |
|
"loss": 3.1989, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.860107947108772e-05, |
|
"loss": 3.0308, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.858773723658361e-05, |
|
"loss": 3.409, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.8574333526835605e-05, |
|
"loss": 3.2011, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.856086837677697e-05, |
|
"loss": 3.1954, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.854734182150107e-05, |
|
"loss": 3.1366, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.853375389626133e-05, |
|
"loss": 3.3789, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.85201046364711e-05, |
|
"loss": 3.1337, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.85063940777036e-05, |
|
"loss": 3.2278, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.849262225569181e-05, |
|
"loss": 3.1339, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.847878920632834e-05, |
|
"loss": 2.954, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.846489496566542e-05, |
|
"loss": 3.0878, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.845093956991475e-05, |
|
"loss": 3.2329, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.843692305544738e-05, |
|
"loss": 3.2245, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.84228454587937e-05, |
|
"loss": 3.2865, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.8408706816643254e-05, |
|
"loss": 3.1569, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.83945071658447e-05, |
|
"loss": 3.2279, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.838024654340571e-05, |
|
"loss": 3.0554, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.836592498649283e-05, |
|
"loss": 3.3924, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.835154253243147e-05, |
|
"loss": 3.0695, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.8337099218705695e-05, |
|
"loss": 3.2666, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.832259508295822e-05, |
|
"loss": 3.135, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.830803016299027e-05, |
|
"loss": 2.947, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.829340449676147e-05, |
|
"loss": 3.13, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.82787181223898e-05, |
|
"loss": 3.038, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.826397107815142e-05, |
|
"loss": 3.0761, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.824916340248064e-05, |
|
"loss": 3.1496, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.823429513396977e-05, |
|
"loss": 2.8758, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.821936631136906e-05, |
|
"loss": 3.218, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.820437697358656e-05, |
|
"loss": 3.0933, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.8189327159688035e-05, |
|
"loss": 3.2388, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.817421690889689e-05, |
|
"loss": 3.1914, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.815904626059401e-05, |
|
"loss": 3.2259, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.814381525431771e-05, |
|
"loss": 3.1867, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.8128523929763626e-05, |
|
"loss": 3.1134, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.811317232678456e-05, |
|
"loss": 3.3846, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.8097760485390444e-05, |
|
"loss": 3.2507, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.8082288445748194e-05, |
|
"loss": 3.0992, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.806675624818162e-05, |
|
"loss": 3.0715, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.8051163933171316e-05, |
|
"loss": 3.2028, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.8035511541354547e-05, |
|
"loss": 3.2714, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.801979911352519e-05, |
|
"loss": 3.1238, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.800402669063353e-05, |
|
"loss": 3.2508, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.7988194313786275e-05, |
|
"loss": 3.2359, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.797230202424633e-05, |
|
"loss": 2.8538, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.7956349863432794e-05, |
|
"loss": 3.1991, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.794033787292078e-05, |
|
"loss": 3.0467, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.7924266094441346e-05, |
|
"loss": 3.1088, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.7908134569881344e-05, |
|
"loss": 3.128, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.789194334128338e-05, |
|
"loss": 3.3195, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.7875692450845624e-05, |
|
"loss": 3.2049, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.785938194092176e-05, |
|
"loss": 3.1477, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.7843011854020846e-05, |
|
"loss": 3.2506, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.7826582232807213e-05, |
|
"loss": 3.1822, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.7810093120100374e-05, |
|
"loss": 2.9576, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.779354455887485e-05, |
|
"loss": 3.2086, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.777693659226013e-05, |
|
"loss": 3.2935, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.776026926354052e-05, |
|
"loss": 2.9873, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.774354261615503e-05, |
|
"loss": 3.2671, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.772675669369728e-05, |
|
"loss": 3.0882, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.770991153991537e-05, |
|
"loss": 3.2727, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.769300719871177e-05, |
|
"loss": 2.9907, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.7676043714143195e-05, |
|
"loss": 3.1303, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.765902113042053e-05, |
|
"loss": 3.1412, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.7641939491908674e-05, |
|
"loss": 3.3585, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.7624798843126404e-05, |
|
"loss": 3.1958, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.7607599228746355e-05, |
|
"loss": 3.1581, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.759034069359478e-05, |
|
"loss": 3.224, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.7573023282651536e-05, |
|
"loss": 3.1031, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.75556470410499e-05, |
|
"loss": 3.0914, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.753821201407648e-05, |
|
"loss": 3.2474, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.75207182471711e-05, |
|
"loss": 3.2045, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.7503165785926676e-05, |
|
"loss": 2.9288, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.748555467608909e-05, |
|
"loss": 3.3084, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.7467884963557064e-05, |
|
"loss": 3.1761, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.745015669438207e-05, |
|
"loss": 3.0644, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.7432369914768185e-05, |
|
"loss": 3.0874, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.7414524671071994e-05, |
|
"loss": 3.1106, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.739662100980242e-05, |
|
"loss": 2.8364, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.737865897762066e-05, |
|
"loss": 3.0884, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.736063862134004e-05, |
|
"loss": 3.1134, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.734255998792589e-05, |
|
"loss": 3.3668, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.732442312449541e-05, |
|
"loss": 2.9823, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.730622807831758e-05, |
|
"loss": 3.0893, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.728797489681301e-05, |
|
"loss": 3.1091, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.726966362755383e-05, |
|
"loss": 2.9348, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.725129431826355e-05, |
|
"loss": 3.2506, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.723286701681695e-05, |
|
"loss": 2.9876, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.7214381771239946e-05, |
|
"loss": 2.9158, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.719583862970949e-05, |
|
"loss": 3.2035, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.7177237640553414e-05, |
|
"loss": 2.9031, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.7158578852250304e-05, |
|
"loss": 3.2779, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.7139862313429385e-05, |
|
"loss": 3.1219, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.712108807287042e-05, |
|
"loss": 3.1643, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.7102256179503525e-05, |
|
"loss": 3.1993, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.708336668240908e-05, |
|
"loss": 3.1773, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.706441963081762e-05, |
|
"loss": 2.9617, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.7045415074109646e-05, |
|
"loss": 3.2621, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.702635306181554e-05, |
|
"loss": 3.2206, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.700723364361545e-05, |
|
"loss": 3.3413, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.6988056869339104e-05, |
|
"loss": 3.3453, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.696882278896574e-05, |
|
"loss": 3.0481, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.694953145262393e-05, |
|
"loss": 3.2183, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.693018291059147e-05, |
|
"loss": 3.4063, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.691077721329527e-05, |
|
"loss": 3.1694, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.6891314411311184e-05, |
|
"loss": 3.0084, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.6871794555363885e-05, |
|
"loss": 3.318, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.685221769632676e-05, |
|
"loss": 3.3126, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.683258388522175e-05, |
|
"loss": 3.1085, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.681289317321923e-05, |
|
"loss": 3.0566, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.679314561163787e-05, |
|
"loss": 3.0642, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.6773341251944496e-05, |
|
"loss": 2.9335, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.675348014575398e-05, |
|
"loss": 3.1551, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.673356234482908e-05, |
|
"loss": 3.0211, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.671358790108033e-05, |
|
"loss": 3.28, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.6693556866565835e-05, |
|
"loss": 3.0621, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.667346929349126e-05, |
|
"loss": 3.1141, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.6653325234209575e-05, |
|
"loss": 3.1941, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.663312474122099e-05, |
|
"loss": 3.1874, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.661286786717278e-05, |
|
"loss": 3.2085, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.659255466485918e-05, |
|
"loss": 3.398, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.65721851872212e-05, |
|
"loss": 3.1911, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.655175948734654e-05, |
|
"loss": 3.2273, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.653127761846943e-05, |
|
"loss": 3.2084, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.6510739633970485e-05, |
|
"loss": 3.2789, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.649014558737656e-05, |
|
"loss": 3.1193, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.646949553236064e-05, |
|
"loss": 3.2083, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.6448789522741665e-05, |
|
"loss": 3.0306, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.642802761248441e-05, |
|
"loss": 3.0084, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.6407209855699365e-05, |
|
"loss": 3.2976, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.6386336306642514e-05, |
|
"loss": 3.3373, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.63654070197153e-05, |
|
"loss": 3.049, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.634442204946442e-05, |
|
"loss": 3.1669, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.632338145058167e-05, |
|
"loss": 3.1078, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.630228527790386e-05, |
|
"loss": 3.1724, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.628113358641263e-05, |
|
"loss": 3.2197, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.625992643123429e-05, |
|
"loss": 3.0047, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.623866386763974e-05, |
|
"loss": 2.9798, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.621734595104427e-05, |
|
"loss": 3.2065, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.619597273700742e-05, |
|
"loss": 3.0981, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.6174544281232865e-05, |
|
"loss": 2.7712, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.615306063956826e-05, |
|
"loss": 3.2469, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.613152186800506e-05, |
|
"loss": 3.3318, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.610992802267842e-05, |
|
"loss": 3.216, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.608827915986704e-05, |
|
"loss": 3.0298, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.606657533599298e-05, |
|
"loss": 3.1146, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.604481660762158e-05, |
|
"loss": 3.1621, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.6023003031461234e-05, |
|
"loss": 2.9654, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.600113466436331e-05, |
|
"loss": 3.3586, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.597921156332196e-05, |
|
"loss": 3.1212, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.5957233785474e-05, |
|
"loss": 2.8321, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.5935201388098716e-05, |
|
"loss": 2.9001, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.5913114428617785e-05, |
|
"loss": 3.248, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.589097296459508e-05, |
|
"loss": 2.8819, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.5868777053736476e-05, |
|
"loss": 3.0377, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.584652675388982e-05, |
|
"loss": 3.3161, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.582422212304467e-05, |
|
"loss": 3.1995, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.580186321933217e-05, |
|
"loss": 3.4196, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.577945010102496e-05, |
|
"loss": 3.35, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.575698282653693e-05, |
|
"loss": 3.2269, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.573446145442314e-05, |
|
"loss": 3.1207, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.571188604337963e-05, |
|
"loss": 3.3682, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.568925665224327e-05, |
|
"loss": 3.0202, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.5666573339991646e-05, |
|
"loss": 3.0227, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.564383616574285e-05, |
|
"loss": 3.1553, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.562104518875535e-05, |
|
"loss": 3.1961, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.5598200468427854e-05, |
|
"loss": 2.9751, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.557530206429912e-05, |
|
"loss": 2.9953, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.555235003604782e-05, |
|
"loss": 2.9119, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.55293444434924e-05, |
|
"loss": 3.1541, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.5506285346590894e-05, |
|
"loss": 2.9233, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.5483172805440785e-05, |
|
"loss": 3.3379, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.546000688027884e-05, |
|
"loss": 3.1169, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.543678763148096e-05, |
|
"loss": 3.3383, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.5413515119562023e-05, |
|
"loss": 3.1096, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.539018940517572e-05, |
|
"loss": 3.1755, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.53668105491144e-05, |
|
"loss": 3.1069, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.534337861230891e-05, |
|
"loss": 3.045, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.5319893655828446e-05, |
|
"loss": 2.975, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.5296355740880385e-05, |
|
"loss": 3.4444, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.5272764928810115e-05, |
|
"loss": 3.1052, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.52491212811009e-05, |
|
"loss": 3.0536, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.522542485937369e-05, |
|
"loss": 3.0542, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.520167572538699e-05, |
|
"loss": 3.158, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5177873941036683e-05, |
|
"loss": 3.0392, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5154019568355876e-05, |
|
"loss": 3.1487, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5130112669514726e-05, |
|
"loss": 2.9061, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.510615330682028e-05, |
|
"loss": 2.9249, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5082141542716336e-05, |
|
"loss": 3.0888, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.505807743978324e-05, |
|
"loss": 2.9417, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5033961060737774e-05, |
|
"loss": 2.8967, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 4.5009792468432946e-05, |
|
"loss": 2.9344, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 4.498557172585782e-05, |
|
"loss": 3.1597, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 4.496129889613744e-05, |
|
"loss": 2.9188, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 4.493697404253254e-05, |
|
"loss": 2.7781, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 4.4912597228439466e-05, |
|
"loss": 3.0411, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 4.488816851738999e-05, |
|
"loss": 3.0727, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 4.486368797305114e-05, |
|
"loss": 3.0783, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 4.483915565922501e-05, |
|
"loss": 2.823, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 4.481457163984864e-05, |
|
"loss": 2.6414, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 4.478993597899382e-05, |
|
"loss": 2.9076, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 4.476524874086695e-05, |
|
"loss": 2.8118, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 4.4740509989808815e-05, |
|
"loss": 3.0352, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.4715719790294474e-05, |
|
"loss": 3.0765, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.4690878206933086e-05, |
|
"loss": 2.6218, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.466598530446771e-05, |
|
"loss": 3.0266, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.464104114777515e-05, |
|
"loss": 3.0852, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 4.4616045801865814e-05, |
|
"loss": 2.9536, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 4.4590999331883505e-05, |
|
"loss": 2.7272, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 4.456590180310527e-05, |
|
"loss": 3.0185, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 4.454075328094123e-05, |
|
"loss": 3.1669, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.45155538309344e-05, |
|
"loss": 2.7681, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.449030351876053e-05, |
|
"loss": 3.0262, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.446500241022794e-05, |
|
"loss": 2.7877, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.443965057127731e-05, |
|
"loss": 2.9973, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 4.441424806798156e-05, |
|
"loss": 2.9694, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 4.438879496654565e-05, |
|
"loss": 2.9082, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 4.43632913333064e-05, |
|
"loss": 3.1398, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 4.4337737234732334e-05, |
|
"loss": 2.9788, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 4.431213273742351e-05, |
|
"loss": 3.0885, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 4.42864779081113e-05, |
|
"loss": 3.1436, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 4.4260772813658316e-05, |
|
"loss": 2.9983, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 4.423501752105812e-05, |
|
"loss": 3.0249, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 4.420921209743511e-05, |
|
"loss": 2.8603, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 4.4183356610044364e-05, |
|
"loss": 2.8446, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 4.415745112627141e-05, |
|
"loss": 3.0299, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 4.4131495713632104e-05, |
|
"loss": 2.8172, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 4.4105490439772404e-05, |
|
"loss": 2.9729, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 4.407943537246822e-05, |
|
"loss": 2.9726, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 4.4053330579625265e-05, |
|
"loss": 2.8896, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4.4027176129278815e-05, |
|
"loss": 3.1341, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4.4000972089593566e-05, |
|
"loss": 2.8074, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4.397471852886349e-05, |
|
"loss": 3.0711, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4.3948415515511586e-05, |
|
"loss": 3.0245, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 4.3922063118089765e-05, |
|
"loss": 3.0007, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 4.389566140527861e-05, |
|
"loss": 2.9818, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 4.386921044588727e-05, |
|
"loss": 2.9922, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 4.384271030885322e-05, |
|
"loss": 2.8606, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 4.381616106324211e-05, |
|
"loss": 2.7615, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 4.3789562778247585e-05, |
|
"loss": 2.7838, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 4.3762915523191084e-05, |
|
"loss": 2.9275, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 4.373621936752168e-05, |
|
"loss": 3.0331, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.37094743808159e-05, |
|
"loss": 2.9586, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.3682680632777515e-05, |
|
"loss": 2.7475, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.36558381932374e-05, |
|
"loss": 3.0034, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.362894713215334e-05, |
|
"loss": 2.9839, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 4.360200751960982e-05, |
|
"loss": 2.8333, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 4.3575019425817856e-05, |
|
"loss": 2.853, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 4.354798292111483e-05, |
|
"loss": 3.266, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 4.35208980759643e-05, |
|
"loss": 3.1553, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 4.34937649609558e-05, |
|
"loss": 3.1441, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 4.3466583646804674e-05, |
|
"loss": 2.8948, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 4.3439354204351866e-05, |
|
"loss": 3.0963, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 4.341207670456379e-05, |
|
"loss": 3.0771, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 4.3384751218532086e-05, |
|
"loss": 3.0364, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 4.335737781747343e-05, |
|
"loss": 2.9928, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 4.332995657272944e-05, |
|
"loss": 3.1993, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 4.330248755576636e-05, |
|
"loss": 2.9873, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 4.3274970838175e-05, |
|
"loss": 2.9258, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 4.324740649167044e-05, |
|
"loss": 2.7857, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 4.321979458809191e-05, |
|
"loss": 3.1293, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 4.31921351994026e-05, |
|
"loss": 3.0588, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 4.3164428397689436e-05, |
|
"loss": 3.116, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 4.313667425516292e-05, |
|
"loss": 3.0814, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 4.310887284415695e-05, |
|
"loss": 2.9781, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 4.308102423712859e-05, |
|
"loss": 2.8611, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 4.3053128506657934e-05, |
|
"loss": 3.1201, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 4.302518572544787e-05, |
|
"loss": 3.108, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 4.299719596632392e-05, |
|
"loss": 2.8409, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 4.2969159302234044e-05, |
|
"loss": 2.8566, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 4.294107580624845e-05, |
|
"loss": 2.7636, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 4.291294555155939e-05, |
|
"loss": 3.1108, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 4.2884768611481e-05, |
|
"loss": 3.0492, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 4.2856545059449063e-05, |
|
"loss": 3.1709, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 4.282827496902086e-05, |
|
"loss": 3.0867, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 4.279995841387496e-05, |
|
"loss": 2.8054, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 4.277159546781103e-05, |
|
"loss": 2.8368, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 4.274318620474964e-05, |
|
"loss": 3.0, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 4.2714730698732076e-05, |
|
"loss": 2.9218, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 4.268622902392014e-05, |
|
"loss": 2.9109, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 4.265768125459597e-05, |
|
"loss": 3.074, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 4.263480990108211e-05, |
|
"loss": 2.8238, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 4.260617934921023e-05, |
|
"loss": 2.9209, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 4.257750291145457e-05, |
|
"loss": 2.8323, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 4.254878066255274e-05, |
|
"loss": 3.2091, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 4.252001267736174e-05, |
|
"loss": 2.8372, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 4.2491199030857764e-05, |
|
"loss": 3.0106, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 4.246233979813602e-05, |
|
"loss": 2.9969, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 4.243343505441052e-05, |
|
"loss": 3.1595, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 4.2404484875013895e-05, |
|
"loss": 2.8051, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 4.237548933539718e-05, |
|
"loss": 2.8842, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 4.234644851112964e-05, |
|
"loss": 2.9729, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 4.231736247789856e-05, |
|
"loss": 2.7805, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 4.228823131150904e-05, |
|
"loss": 2.9725, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 4.225905508788382e-05, |
|
"loss": 3.0599, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 4.222983388306308e-05, |
|
"loss": 2.7567, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 4.2200567773204194e-05, |
|
"loss": 2.884, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.217125683458161e-05, |
|
"loss": 2.8594, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.214190114358659e-05, |
|
"loss": 2.9515, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.211250077672704e-05, |
|
"loss": 2.992, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 4.208305581062728e-05, |
|
"loss": 3.1784, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 4.205356632202789e-05, |
|
"loss": 3.0196, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 4.20240323877855e-05, |
|
"loss": 2.7891, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 4.1994454084872525e-05, |
|
"loss": 2.8974, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.196483149037707e-05, |
|
"loss": 2.8219, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.1935164681502624e-05, |
|
"loss": 2.8833, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.190545373556796e-05, |
|
"loss": 3.0166, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.187569873000684e-05, |
|
"loss": 3.1676, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 4.184589974236788e-05, |
|
"loss": 2.9239, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 4.181605685031432e-05, |
|
"loss": 3.0094, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 4.1786170131623825e-05, |
|
"loss": 3.0235, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 4.1756239664188273e-05, |
|
"loss": 3.058, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.172626552601358e-05, |
|
"loss": 2.7391, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.169624779521944e-05, |
|
"loss": 2.9975, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.1666186550039214e-05, |
|
"loss": 2.7703, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.163608186881964e-05, |
|
"loss": 2.8288, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.160593383002066e-05, |
|
"loss": 2.9945, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.157574251221521e-05, |
|
"loss": 2.8768, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.1545507994089064e-05, |
|
"loss": 2.7161, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.1515230354440524e-05, |
|
"loss": 3.0821, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.148490967218033e-05, |
|
"loss": 3.1119, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.145454602633137e-05, |
|
"loss": 2.8815, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.14241394960285e-05, |
|
"loss": 2.9891, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.1393690160518384e-05, |
|
"loss": 3.2624, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.1363198099159205e-05, |
|
"loss": 2.7588, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.1332663391420515e-05, |
|
"loss": 3.0405, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.130208611688302e-05, |
|
"loss": 2.9254, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.1271466355238354e-05, |
|
"loss": 2.8499, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.124080418628888e-05, |
|
"loss": 3.0828, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.1210099689947513e-05, |
|
"loss": 2.8524, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.117935294623743e-05, |
|
"loss": 3.0711, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.1148564035291956e-05, |
|
"loss": 3.0343, |
|
"step": 2695 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.1117733037354314e-05, |
|
"loss": 2.8869, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.1086860032777395e-05, |
|
"loss": 3.0381, |
|
"step": 2705 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.105594510202359e-05, |
|
"loss": 2.9049, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.102498832566454e-05, |
|
"loss": 3.0071, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.0993989784380947e-05, |
|
"loss": 2.9955, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.096294955896239e-05, |
|
"loss": 2.9568, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.093186773030705e-05, |
|
"loss": 2.9727, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.090074437942155e-05, |
|
"loss": 3.0501, |
|
"step": 2735 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.086957958742075e-05, |
|
"loss": 3.2328, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.0838373435527494e-05, |
|
"loss": 2.7966, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.08071260050724e-05, |
|
"loss": 2.8666, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 4.077583737749373e-05, |
|
"loss": 2.9765, |
|
"step": 2755 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 4.074450763433704e-05, |
|
"loss": 2.6923, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 4.07131368572551e-05, |
|
"loss": 2.9031, |
|
"step": 2765 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 4.06817251280076e-05, |
|
"loss": 2.8287, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 4.065027252846095e-05, |
|
"loss": 3.1046, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 4.0618779140588124e-05, |
|
"loss": 3.0414, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 4.058724504646834e-05, |
|
"loss": 2.8644, |
|
"step": 2785 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 4.055567032828695e-05, |
|
"loss": 2.9589, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 4.052405506833516e-05, |
|
"loss": 2.886, |
|
"step": 2795 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 4.0492399349009846e-05, |
|
"loss": 2.8026, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 4.0460703252813326e-05, |
|
"loss": 3.0376, |
|
"step": 2805 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 4.042896686235316e-05, |
|
"loss": 2.9751, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 4.039719026034191e-05, |
|
"loss": 2.8638, |
|
"step": 2815 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 4.036537352959694e-05, |
|
"loss": 3.0403, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 4.033351675304022e-05, |
|
"loss": 2.8241, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 4.030162001369807e-05, |
|
"loss": 3.1196, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.026968339470096e-05, |
|
"loss": 2.8388, |
|
"step": 2835 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.0237706979283304e-05, |
|
"loss": 2.9974, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.020569085078324e-05, |
|
"loss": 3.0178, |
|
"step": 2845 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.0173635092642386e-05, |
|
"loss": 3.0166, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 4.014153978840567e-05, |
|
"loss": 3.0657, |
|
"step": 2855 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 4.010940502172108e-05, |
|
"loss": 3.0229, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 4.007723087633943e-05, |
|
"loss": 2.5904, |
|
"step": 2865 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 4.0045017436114205e-05, |
|
"loss": 2.8607, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 4.001276478500127e-05, |
|
"loss": 3.0797, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 3.998047300705868e-05, |
|
"loss": 2.9475, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 3.9948142186446486e-05, |
|
"loss": 2.7829, |
|
"step": 2885 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 3.991577240742649e-05, |
|
"loss": 3.0715, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 3.9883363754362004e-05, |
|
"loss": 3.0126, |
|
"step": 2895 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 3.985091631171769e-05, |
|
"loss": 3.0835, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 3.981843016405927e-05, |
|
"loss": 2.9109, |
|
"step": 2905 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 3.978590539605338e-05, |
|
"loss": 3.0058, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 3.975334209246727e-05, |
|
"loss": 2.7376, |
|
"step": 2915 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 3.972074033816866e-05, |
|
"loss": 2.7173, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 3.968810021812544e-05, |
|
"loss": 2.8642, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 3.965542181740553e-05, |
|
"loss": 2.8643, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 3.9622705221176596e-05, |
|
"loss": 3.1752, |
|
"step": 2935 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 3.958995051470585e-05, |
|
"loss": 2.7045, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 3.955715778335984e-05, |
|
"loss": 3.0828, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 3.952432711260421e-05, |
|
"loss": 2.982, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 3.949145858800348e-05, |
|
"loss": 2.9762, |
|
"step": 2955 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 3.945855229522084e-05, |
|
"loss": 3.1026, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 3.942560832001789e-05, |
|
"loss": 3.1225, |
|
"step": 2965 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 3.9392626748254456e-05, |
|
"loss": 3.0462, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 3.9359607665888346e-05, |
|
"loss": 2.8954, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 3.932655115897513e-05, |
|
"loss": 2.8361, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 3.92934573136679e-05, |
|
"loss": 3.1644, |
|
"step": 2985 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 3.9260326216217105e-05, |
|
"loss": 3.0226, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 3.922715795297022e-05, |
|
"loss": 3.0285, |
|
"step": 2995 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 3.919395261037163e-05, |
|
"loss": 3.0582, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 3.9160710274962345e-05, |
|
"loss": 2.9447, |
|
"step": 3005 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 3.912743103337977e-05, |
|
"loss": 3.0433, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 3.9094114972357524e-05, |
|
"loss": 3.0276, |
|
"step": 3015 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 3.9060762178725165e-05, |
|
"loss": 3.1617, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 3.9027372739407994e-05, |
|
"loss": 2.962, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 3.899394674142682e-05, |
|
"loss": 2.9722, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 3.8960484271897736e-05, |
|
"loss": 2.9605, |
|
"step": 3035 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 3.892698541803187e-05, |
|
"loss": 3.1036, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 3.889345026713519e-05, |
|
"loss": 3.1859, |
|
"step": 3045 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 3.8859878906608275e-05, |
|
"loss": 3.0349, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 3.882627142394605e-05, |
|
"loss": 2.9102, |
|
"step": 3055 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 3.87926279067376e-05, |
|
"loss": 2.9039, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 3.8758948442665914e-05, |
|
"loss": 3.0984, |
|
"step": 3065 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 3.872523311950768e-05, |
|
"loss": 2.9782, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 3.869148202513303e-05, |
|
"loss": 2.7941, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 3.8657695247505325e-05, |
|
"loss": 3.1176, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 3.862387287468094e-05, |
|
"loss": 2.8274, |
|
"step": 3085 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 3.859001499480901e-05, |
|
"loss": 3.0035, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 3.8556121696131194e-05, |
|
"loss": 3.0127, |
|
"step": 3095 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 3.852219306698149e-05, |
|
"loss": 3.0512, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 3.848822919578596e-05, |
|
"loss": 2.8802, |
|
"step": 3105 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 3.845423017106251e-05, |
|
"loss": 2.9527, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 3.842019608142069e-05, |
|
"loss": 2.9869, |
|
"step": 3115 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 3.838612701556138e-05, |
|
"loss": 2.7298, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 3.835202306227669e-05, |
|
"loss": 3.0647, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 3.8317884310449614e-05, |
|
"loss": 2.9665, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 3.8283710849053834e-05, |
|
"loss": 2.9411, |
|
"step": 3135 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 3.824950276715351e-05, |
|
"loss": 2.8853, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 3.8215260153903024e-05, |
|
"loss": 2.7117, |
|
"step": 3145 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 3.818098309854676e-05, |
|
"loss": 3.1676, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 3.8146671690418865e-05, |
|
"loss": 2.9812, |
|
"step": 3155 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 3.811232601894301e-05, |
|
"loss": 2.8832, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 3.8077946173632175e-05, |
|
"loss": 3.0479, |
|
"step": 3165 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 3.80435322440884e-05, |
|
"loss": 2.9631, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 3.800908432000254e-05, |
|
"loss": 2.9479, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 3.7974602491154094e-05, |
|
"loss": 2.752, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 3.7940086847410874e-05, |
|
"loss": 3.0117, |
|
"step": 3185 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 3.7905537478728845e-05, |
|
"loss": 3.0633, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 3.787095447515187e-05, |
|
"loss": 3.1068, |
|
"step": 3195 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 3.783633792681147e-05, |
|
"loss": 2.9438, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 3.7801687923926577e-05, |
|
"loss": 2.9187, |
|
"step": 3205 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 3.7767004556803346e-05, |
|
"loss": 3.0672, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 3.773228791583484e-05, |
|
"loss": 2.9911, |
|
"step": 3215 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 3.76975380915009e-05, |
|
"loss": 3.0206, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 3.7662755174367795e-05, |
|
"loss": 3.1758, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 3.7627939255088064e-05, |
|
"loss": 2.5882, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 3.759309042440028e-05, |
|
"loss": 3.1835, |
|
"step": 3235 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 3.755820877312876e-05, |
|
"loss": 3.0573, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 3.752329439218337e-05, |
|
"loss": 2.9557, |
|
"step": 3245 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 3.748834737255927e-05, |
|
"loss": 2.7683, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 3.74533678053367e-05, |
|
"loss": 2.9666, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 3.741835578168071e-05, |
|
"loss": 3.1286, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 3.738331139284096e-05, |
|
"loss": 2.8983, |
|
"step": 3265 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 3.734823473015143e-05, |
|
"loss": 2.9258, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 3.7313125885030244e-05, |
|
"loss": 3.2793, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 3.727798494897938e-05, |
|
"loss": 2.7685, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 3.724281201358447e-05, |
|
"loss": 2.7007, |
|
"step": 3285 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 3.7207607170514534e-05, |
|
"loss": 3.0827, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 3.717237051152175e-05, |
|
"loss": 3.1145, |
|
"step": 3295 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 3.7137102128441216e-05, |
|
"loss": 2.9999, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 3.710180211319071e-05, |
|
"loss": 2.9787, |
|
"step": 3305 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 3.7066470557770463e-05, |
|
"loss": 2.9137, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 3.703110755426289e-05, |
|
"loss": 3.0994, |
|
"step": 3315 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 3.6995713194832375e-05, |
|
"loss": 2.8384, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 3.696028757172503e-05, |
|
"loss": 2.9642, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 3.6924830777268425e-05, |
|
"loss": 2.7456, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 3.68893429038714e-05, |
|
"loss": 2.8045, |
|
"step": 3335 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 3.685382404402378e-05, |
|
"loss": 3.2544, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 3.681827429029613e-05, |
|
"loss": 2.9062, |
|
"step": 3345 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 3.678269373533956e-05, |
|
"loss": 3.0275, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 3.674708247188545e-05, |
|
"loss": 2.9737, |
|
"step": 3355 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 3.6711440592745205e-05, |
|
"loss": 3.2186, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 3.6675768190810026e-05, |
|
"loss": 3.2481, |
|
"step": 3365 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 3.6640065359050645e-05, |
|
"loss": 2.9293, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 3.660433219051714e-05, |
|
"loss": 2.9425, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 3.656856877833863e-05, |
|
"loss": 2.8838, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 3.6532775215723044e-05, |
|
"loss": 2.9395, |
|
"step": 3385 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 3.6496951595956924e-05, |
|
"loss": 2.9398, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 3.646109801240511e-05, |
|
"loss": 3.0734, |
|
"step": 3395 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 3.642521455851058e-05, |
|
"loss": 3.0898, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 3.638930132779411e-05, |
|
"loss": 2.8408, |
|
"step": 3405 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 3.635335841385411e-05, |
|
"loss": 2.8395, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 3.6317385910366364e-05, |
|
"loss": 3.0667, |
|
"step": 3415 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 3.6281383911083744e-05, |
|
"loss": 2.7302, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 3.624535250983601e-05, |
|
"loss": 2.8523, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 3.620929180052955e-05, |
|
"loss": 2.9712, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 3.6173201877147134e-05, |
|
"loss": 3.0531, |
|
"step": 3435 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 3.613708283374767e-05, |
|
"loss": 2.9067, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 3.610093476446597e-05, |
|
"loss": 2.7005, |
|
"step": 3445 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 3.606475776351248e-05, |
|
"loss": 2.8974, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 3.6028551925173046e-05, |
|
"loss": 2.6924, |
|
"step": 3455 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 3.599231734380869e-05, |
|
"loss": 2.9246, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 3.595605411385533e-05, |
|
"loss": 2.9283, |
|
"step": 3465 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 3.5919762329823554e-05, |
|
"loss": 3.091, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 3.588344208629838e-05, |
|
"loss": 2.8846, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 3.5847093477938956e-05, |
|
"loss": 2.9335, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 3.581071659947842e-05, |
|
"loss": 3.0189, |
|
"step": 3485 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 3.5774311545723525e-05, |
|
"loss": 2.7938, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 3.5737878411554514e-05, |
|
"loss": 3.0325, |
|
"step": 3495 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 3.5701417291924765e-05, |
|
"loss": 3.0631, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 3.566492828186063e-05, |
|
"loss": 2.6561, |
|
"step": 3505 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 3.562841147646111e-05, |
|
"loss": 3.0796, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 3.559186697089769e-05, |
|
"loss": 2.6858, |
|
"step": 3515 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 3.555529486041401e-05, |
|
"loss": 3.1426, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 3.55186952403257e-05, |
|
"loss": 3.1105, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 3.548206820602004e-05, |
|
"loss": 3.189, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 3.544541385295578e-05, |
|
"loss": 2.7971, |
|
"step": 3535 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 3.540873227666288e-05, |
|
"loss": 2.766, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 3.537202357274223e-05, |
|
"loss": 2.9254, |
|
"step": 3545 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 3.533528783686541e-05, |
|
"loss": 2.6065, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.52985251647745e-05, |
|
"loss": 2.9204, |
|
"step": 3555 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.5261735652281727e-05, |
|
"loss": 2.8152, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.5224919395269315e-05, |
|
"loss": 2.9444, |
|
"step": 3565 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.5188076489689165e-05, |
|
"loss": 3.0586, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 3.515120703156264e-05, |
|
"loss": 2.8679, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 3.5114311116980285e-05, |
|
"loss": 2.8359, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 3.507738884210164e-05, |
|
"loss": 2.978, |
|
"step": 3585 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 3.5040440303154895e-05, |
|
"loss": 3.0614, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 3.500346559643675e-05, |
|
"loss": 2.9246, |
|
"step": 3595 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 3.4966464818312025e-05, |
|
"loss": 2.8654, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 3.492943806521356e-05, |
|
"loss": 2.9768, |
|
"step": 3605 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 3.489238543364187e-05, |
|
"loss": 2.9184, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 3.4855307020164904e-05, |
|
"loss": 3.0659, |
|
"step": 3615 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 3.4818202921417805e-05, |
|
"loss": 2.8185, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 3.4781073234102665e-05, |
|
"loss": 3.0411, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 3.474391805498827e-05, |
|
"loss": 3.0205, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 3.470673748090984e-05, |
|
"loss": 3.0916, |
|
"step": 3635 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 3.4669531608768775e-05, |
|
"loss": 2.9119, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 3.463230053553241e-05, |
|
"loss": 2.9236, |
|
"step": 3645 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 3.459504435823378e-05, |
|
"loss": 2.7746, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 3.455776317397129e-05, |
|
"loss": 3.0066, |
|
"step": 3655 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 3.4520457079908605e-05, |
|
"loss": 2.667, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 3.448312617327423e-05, |
|
"loss": 2.8988, |
|
"step": 3665 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 3.444577055136139e-05, |
|
"loss": 2.8698, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 3.4408390311527686e-05, |
|
"loss": 2.8258, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 3.4370985551194926e-05, |
|
"loss": 2.9826, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 3.4333556367848776e-05, |
|
"loss": 2.7939, |
|
"step": 3685 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 3.429610285903858e-05, |
|
"loss": 2.959, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 3.425862512237708e-05, |
|
"loss": 2.8027, |
|
"step": 3695 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 3.4221123255540157e-05, |
|
"loss": 2.8562, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 3.418359735626656e-05, |
|
"loss": 2.8358, |
|
"step": 3705 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 3.414604752235772e-05, |
|
"loss": 2.8507, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 3.4108473851677405e-05, |
|
"loss": 2.954, |
|
"step": 3715 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 3.407087644215153e-05, |
|
"loss": 3.0872, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 3.4033255391767865e-05, |
|
"loss": 2.9302, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 3.39956107985758e-05, |
|
"loss": 2.8434, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 3.395794276068609e-05, |
|
"loss": 2.984, |
|
"step": 3735 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 3.3920251376270575e-05, |
|
"loss": 2.8695, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 3.388253674356196e-05, |
|
"loss": 2.9782, |
|
"step": 3745 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 3.384479896085353e-05, |
|
"loss": 3.0113, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 3.380703812649891e-05, |
|
"loss": 3.076, |
|
"step": 3755 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 3.376925433891179e-05, |
|
"loss": 2.7471, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 3.373144769656571e-05, |
|
"loss": 2.89, |
|
"step": 3765 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 3.369361829799375e-05, |
|
"loss": 2.9204, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 3.365576624178828e-05, |
|
"loss": 2.8335, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 3.3617891626600786e-05, |
|
"loss": 3.0041, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 3.357999455114148e-05, |
|
"loss": 2.9935, |
|
"step": 3785 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 3.3542075114179164e-05, |
|
"loss": 2.9154, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 3.350413341454086e-05, |
|
"loss": 2.8998, |
|
"step": 3795 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 3.346616955111168e-05, |
|
"loss": 2.6922, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 3.342818362283444e-05, |
|
"loss": 2.7723, |
|
"step": 3805 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 3.33901757287095e-05, |
|
"loss": 2.8529, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 3.335214596779446e-05, |
|
"loss": 3.0667, |
|
"step": 3815 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 3.33140944392039e-05, |
|
"loss": 3.0768, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 3.327602124210915e-05, |
|
"loss": 2.8217, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 3.3237926475737986e-05, |
|
"loss": 3.0252, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 3.319981023937442e-05, |
|
"loss": 3.0058, |
|
"step": 3835 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 3.316167263235842e-05, |
|
"loss": 2.8522, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 3.3123513754085634e-05, |
|
"loss": 3.023, |
|
"step": 3845 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 3.308533370400717e-05, |
|
"loss": 3.0671, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 3.30471325816293e-05, |
|
"loss": 2.7645, |
|
"step": 3855 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 3.300891048651321e-05, |
|
"loss": 2.9496, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 3.297066751827478e-05, |
|
"loss": 3.0305, |
|
"step": 3865 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 3.293240377658424e-05, |
|
"loss": 2.9011, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 3.2894119361166e-05, |
|
"loss": 2.7675, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 3.2855814371798325e-05, |
|
"loss": 2.9937, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 3.2817488908313134e-05, |
|
"loss": 3.1056, |
|
"step": 3885 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 3.277914307059566e-05, |
|
"loss": 2.6727, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 3.274077695858428e-05, |
|
"loss": 2.7927, |
|
"step": 3895 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 3.2702390672270175e-05, |
|
"loss": 2.7103, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 3.266398431169714e-05, |
|
"loss": 2.7382, |
|
"step": 3905 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 3.262555797696125e-05, |
|
"loss": 2.5843, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 3.258711176821068e-05, |
|
"loss": 2.7592, |
|
"step": 3915 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 3.254864578564535e-05, |
|
"loss": 2.8419, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 3.2510160129516775e-05, |
|
"loss": 2.6239, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 3.247165490012769e-05, |
|
"loss": 2.6813, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 3.243313019783187e-05, |
|
"loss": 2.8535, |
|
"step": 3935 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 3.2394586123033844e-05, |
|
"loss": 2.7436, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 3.235602277618862e-05, |
|
"loss": 2.8294, |
|
"step": 3945 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 3.231744025780143e-05, |
|
"loss": 2.8097, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 3.227883866842748e-05, |
|
"loss": 2.535, |
|
"step": 3955 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 3.2240218108671684e-05, |
|
"loss": 2.5515, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 3.2201578679188395e-05, |
|
"loss": 2.6408, |
|
"step": 3965 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 3.2162920480681126e-05, |
|
"loss": 2.7759, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 3.212424361390232e-05, |
|
"loss": 2.6673, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 3.2085548179653094e-05, |
|
"loss": 2.6833, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 3.204683427878291e-05, |
|
"loss": 2.9401, |
|
"step": 3985 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 3.200810201218942e-05, |
|
"loss": 2.5877, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 3.1969351480818075e-05, |
|
"loss": 2.8027, |
|
"step": 3995 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 3.193058278566199e-05, |
|
"loss": 2.7086, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 3.189179602776157e-05, |
|
"loss": 2.7855, |
|
"step": 4005 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 3.185299130820432e-05, |
|
"loss": 2.7144, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 3.181416872812455e-05, |
|
"loss": 2.8703, |
|
"step": 4015 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 3.177532838870313e-05, |
|
"loss": 2.8389, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 3.1736470391167196e-05, |
|
"loss": 2.733, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 3.169759483678992e-05, |
|
"loss": 2.7172, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 3.165870182689024e-05, |
|
"loss": 2.7626, |
|
"step": 4035 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 3.161979146283255e-05, |
|
"loss": 2.4813, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 3.158086384602652e-05, |
|
"loss": 2.7431, |
|
"step": 4045 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 3.1541919077926756e-05, |
|
"loss": 2.6488, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 3.1502957260032564e-05, |
|
"loss": 2.9131, |
|
"step": 4055 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 3.1463978493887695e-05, |
|
"loss": 2.834, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 3.142498288108007e-05, |
|
"loss": 2.9725, |
|
"step": 4065 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 3.1385970523241524e-05, |
|
"loss": 2.7835, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 3.13469415220475e-05, |
|
"loss": 2.5171, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 3.1307895979216876e-05, |
|
"loss": 2.4543, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 3.126883399651158e-05, |
|
"loss": 2.8861, |
|
"step": 4085 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 3.1229755675736425e-05, |
|
"loss": 2.8512, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 3.119066111873879e-05, |
|
"loss": 2.6645, |
|
"step": 4095 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 3.1151550427408385e-05, |
|
"loss": 2.9194, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 3.111242370367694e-05, |
|
"loss": 3.0082, |
|
"step": 4105 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 3.1073281049517994e-05, |
|
"loss": 2.8563, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 3.10341225669466e-05, |
|
"loss": 2.8531, |
|
"step": 4115 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 3.099494835801906e-05, |
|
"loss": 2.7171, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 3.0955758524832664e-05, |
|
"loss": 2.7625, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 3.0916553169525424e-05, |
|
"loss": 2.9918, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 3.08773323942758e-05, |
|
"loss": 2.9665, |
|
"step": 4135 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 3.083809630130246e-05, |
|
"loss": 2.7593, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 3.0798844992863954e-05, |
|
"loss": 2.6305, |
|
"step": 4145 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 3.075957857125854e-05, |
|
"loss": 2.7587, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 3.072029713882381e-05, |
|
"loss": 2.7514, |
|
"step": 4155 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 3.068100079793652e-05, |
|
"loss": 2.831, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 3.064168965101225e-05, |
|
"loss": 2.7585, |
|
"step": 4165 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 3.060236380050519e-05, |
|
"loss": 2.7944, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.056302334890786e-05, |
|
"loss": 2.8257, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.05236683987508e-05, |
|
"loss": 2.7217, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.0484299052602355e-05, |
|
"loss": 2.8344, |
|
"step": 4185 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.04449154130684e-05, |
|
"loss": 2.7256, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.0405517582792036e-05, |
|
"loss": 2.5206, |
|
"step": 4195 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.036610566445337e-05, |
|
"loss": 2.6443, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.0326679760769226e-05, |
|
"loss": 2.7363, |
|
"step": 4205 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.028723997449286e-05, |
|
"loss": 2.5985, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.0247786408413724e-05, |
|
"loss": 2.9206, |
|
"step": 4215 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.0208319165357174e-05, |
|
"loss": 2.8855, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.0168838348184225e-05, |
|
"loss": 2.7868, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.0129344059791253e-05, |
|
"loss": 2.7129, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.0089836403109762e-05, |
|
"loss": 2.4369, |
|
"step": 4235 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.005031548110607e-05, |
|
"loss": 2.629, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.00107813967811e-05, |
|
"loss": 2.6645, |
|
"step": 4245 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 2.9971234253170055e-05, |
|
"loss": 2.8677, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 2.993167415334218e-05, |
|
"loss": 2.7891, |
|
"step": 4255 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 2.9892101200400485e-05, |
|
"loss": 2.6506, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 2.985251549748149e-05, |
|
"loss": 2.7657, |
|
"step": 4265 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 2.9812917147754937e-05, |
|
"loss": 2.5059, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 2.9773306254423515e-05, |
|
"loss": 2.6195, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 2.973368292072263e-05, |
|
"loss": 2.8329, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 2.9694047249920092e-05, |
|
"loss": 2.8922, |
|
"step": 4285 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 2.9654399345315864e-05, |
|
"loss": 2.9282, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 2.961473931024182e-05, |
|
"loss": 2.7919, |
|
"step": 4295 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 2.95750672480614e-05, |
|
"loss": 2.8352, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 2.9535383262169435e-05, |
|
"loss": 2.5651, |
|
"step": 4305 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 2.9495687455991816e-05, |
|
"loss": 2.8988, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 2.9455979932985232e-05, |
|
"loss": 2.7066, |
|
"step": 4315 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 2.9416260796636925e-05, |
|
"loss": 2.7755, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 2.9376530150464386e-05, |
|
"loss": 2.6079, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 2.9336788098015116e-05, |
|
"loss": 2.609, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 2.9297034742866337e-05, |
|
"loss": 2.7253, |
|
"step": 4335 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 2.925727018862474e-05, |
|
"loss": 2.9559, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 2.9217494538926183e-05, |
|
"loss": 2.9942, |
|
"step": 4345 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 2.9177707897435463e-05, |
|
"loss": 2.9635, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 2.913791036784601e-05, |
|
"loss": 2.7588, |
|
"step": 4355 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 2.9098102053879644e-05, |
|
"loss": 2.9231, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 2.905828305928627e-05, |
|
"loss": 2.9618, |
|
"step": 4365 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 2.901845348784365e-05, |
|
"loss": 2.6759, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 2.8978613443357105e-05, |
|
"loss": 2.8548, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 2.893876302965925e-05, |
|
"loss": 2.8764, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 2.889890235060972e-05, |
|
"loss": 2.5567, |
|
"step": 4385 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 2.8859031510094914e-05, |
|
"loss": 2.7917, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 2.88191506120277e-05, |
|
"loss": 2.6198, |
|
"step": 4395 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 2.8779259760347187e-05, |
|
"loss": 2.7678, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 2.873935905901839e-05, |
|
"loss": 2.5505, |
|
"step": 4405 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.869944861203202e-05, |
|
"loss": 2.7345, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.8659528523404168e-05, |
|
"loss": 2.8262, |
|
"step": 4415 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.861959889717607e-05, |
|
"loss": 2.7795, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.8579659837413812e-05, |
|
"loss": 2.591, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.8539711448208074e-05, |
|
"loss": 2.6929, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.8499753833673824e-05, |
|
"loss": 3.0178, |
|
"step": 4435 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.845978709795012e-05, |
|
"loss": 3.0475, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.8419811345199738e-05, |
|
"loss": 2.6604, |
|
"step": 4445 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.8379826679609e-05, |
|
"loss": 2.6987, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.8339833205387434e-05, |
|
"loss": 2.7681, |
|
"step": 4455 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.8299831026767526e-05, |
|
"loss": 2.7053, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.825982024800445e-05, |
|
"loss": 2.8991, |
|
"step": 4465 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.8219800973375803e-05, |
|
"loss": 2.7856, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.8179773307181306e-05, |
|
"loss": 2.6422, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.813973735374258e-05, |
|
"loss": 2.6979, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.8099693217402806e-05, |
|
"loss": 2.9674, |
|
"step": 4485 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.805964100252652e-05, |
|
"loss": 2.6377, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.8019580813499303e-05, |
|
"loss": 2.798, |
|
"step": 4495 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.7979512754727527e-05, |
|
"loss": 2.8589, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.7939436930638048e-05, |
|
"loss": 2.9183, |
|
"step": 4505 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.7899353445678e-05, |
|
"loss": 3.087, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.7859262404314452e-05, |
|
"loss": 2.8895, |
|
"step": 4515 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.7819163911034172e-05, |
|
"loss": 2.7229, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.7779058070343367e-05, |
|
"loss": 2.7135, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.773894498676737e-05, |
|
"loss": 2.6625, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.7698824764850408e-05, |
|
"loss": 2.8783, |
|
"step": 4535 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.7658697509155294e-05, |
|
"loss": 2.528, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.761856332426321e-05, |
|
"loss": 2.5445, |
|
"step": 4545 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.7578422314773344e-05, |
|
"loss": 2.7371, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.7538274585302703e-05, |
|
"loss": 2.7187, |
|
"step": 4555 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.7498120240485815e-05, |
|
"loss": 2.6928, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.745795938497443e-05, |
|
"loss": 2.6319, |
|
"step": 4565 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.7417792123437263e-05, |
|
"loss": 2.6975, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.7377618560559746e-05, |
|
"loss": 2.8003, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.7337438801043724e-05, |
|
"loss": 2.9561, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.7297252949607193e-05, |
|
"loss": 2.5395, |
|
"step": 4585 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.7257061110984007e-05, |
|
"loss": 2.842, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.7216863389923658e-05, |
|
"loss": 2.8684, |
|
"step": 4595 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.717665989119094e-05, |
|
"loss": 2.714, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.713645071956573e-05, |
|
"loss": 2.8792, |
|
"step": 4605 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.709623597984266e-05, |
|
"loss": 2.9185, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.7056015776830905e-05, |
|
"loss": 2.596, |
|
"step": 4615 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.7015790215353853e-05, |
|
"loss": 2.883, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.6975559400248872e-05, |
|
"loss": 2.7508, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.6935323436367023e-05, |
|
"loss": 2.7681, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.6895082428572776e-05, |
|
"loss": 2.9875, |
|
"step": 4635 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.6854836481743756e-05, |
|
"loss": 2.6518, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.681458570077045e-05, |
|
"loss": 2.5739, |
|
"step": 4645 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.6774330190555953e-05, |
|
"loss": 3.0008, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.673407005601569e-05, |
|
"loss": 2.6662, |
|
"step": 4655 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.669380540207712e-05, |
|
"loss": 2.7791, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.6653536333679502e-05, |
|
"loss": 2.9536, |
|
"step": 4665 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.6613262955773578e-05, |
|
"loss": 2.6102, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.6572985373321342e-05, |
|
"loss": 2.8118, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.6532703691295734e-05, |
|
"loss": 2.7839, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 2.6492418014680385e-05, |
|
"loss": 2.7449, |
|
"step": 4685 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 2.6452128448469333e-05, |
|
"loss": 2.7359, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 2.6411835097666747e-05, |
|
"loss": 2.784, |
|
"step": 4695 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 2.6371538067286678e-05, |
|
"loss": 2.4048, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 2.6331237462352742e-05, |
|
"loss": 2.8536, |
|
"step": 4705 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 2.6290933387897898e-05, |
|
"loss": 2.8984, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 2.6250625948964124e-05, |
|
"loss": 2.6593, |
|
"step": 4715 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 2.6210315250602185e-05, |
|
"loss": 2.6529, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 2.617000139787132e-05, |
|
"loss": 2.8256, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 2.6129684495839014e-05, |
|
"loss": 2.7692, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 2.6089364649580676e-05, |
|
"loss": 2.5697, |
|
"step": 4735 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 2.604904196417941e-05, |
|
"loss": 2.7452, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 2.6008716544725693e-05, |
|
"loss": 2.5728, |
|
"step": 4745 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 2.5968388496317157e-05, |
|
"loss": 2.7245, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 2.5928057924058263e-05, |
|
"loss": 2.6465, |
|
"step": 4755 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 2.5887724933060065e-05, |
|
"loss": 2.7824, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 2.5847389628439906e-05, |
|
"loss": 3.0099, |
|
"step": 4765 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 2.580705211532118e-05, |
|
"loss": 2.7486, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 2.5766712498833008e-05, |
|
"loss": 2.7712, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 2.5726370884110018e-05, |
|
"loss": 2.5944, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 2.5686027376292037e-05, |
|
"loss": 2.844, |
|
"step": 4785 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 2.5645682080523826e-05, |
|
"loss": 2.4769, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 2.5605335101954797e-05, |
|
"loss": 2.5758, |
|
"step": 4795 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 2.5564986545738766e-05, |
|
"loss": 2.6202, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 2.5524636517033646e-05, |
|
"loss": 2.4537, |
|
"step": 4805 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 2.54842851210012e-05, |
|
"loss": 2.5937, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 2.544393246280673e-05, |
|
"loss": 2.4738, |
|
"step": 4815 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 2.5403578647618865e-05, |
|
"loss": 2.6147, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 2.5363223780609214e-05, |
|
"loss": 2.7137, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 2.5322867966952146e-05, |
|
"loss": 2.7939, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 2.5282511311824497e-05, |
|
"loss": 2.821, |
|
"step": 4835 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 2.524215392040529e-05, |
|
"loss": 2.9476, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 2.520179589787547e-05, |
|
"loss": 2.5596, |
|
"step": 4845 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 2.5161437349417622e-05, |
|
"loss": 2.7911, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 2.512107838021571e-05, |
|
"loss": 2.5827, |
|
"step": 4855 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 2.5080719095454796e-05, |
|
"loss": 2.6661, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 2.5040359600320747e-05, |
|
"loss": 2.9065, |
|
"step": 4865 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 2.5e-05, |
|
"loss": 2.6174, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 2.4959640399679252e-05, |
|
"loss": 2.7372, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 2.4919280904545213e-05, |
|
"loss": 2.615, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 2.4878921619784295e-05, |
|
"loss": 2.8296, |
|
"step": 4885 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 2.483856265058238e-05, |
|
"loss": 2.5063, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 2.4798204102124533e-05, |
|
"loss": 2.7276, |
|
"step": 4895 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 2.475784607959472e-05, |
|
"loss": 2.7801, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 2.4717488688175512e-05, |
|
"loss": 2.8598, |
|
"step": 4905 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 2.4685203300121436e-05, |
|
"loss": 2.6644, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 2.4644847309756042e-05, |
|
"loss": 2.4651, |
|
"step": 4915 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 2.4604492245002873e-05, |
|
"loss": 2.8326, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 2.456413821103682e-05, |
|
"loss": 2.6974, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 2.4523785313030085e-05, |
|
"loss": 2.5726, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 2.44834336561519e-05, |
|
"loss": 2.5724, |
|
"step": 4935 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 2.4443083345568253e-05, |
|
"loss": 2.9383, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 2.4402734486441646e-05, |
|
"loss": 2.5757, |
|
"step": 4945 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 2.4362387183930775e-05, |
|
"loss": 2.9013, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 2.432204154319031e-05, |
|
"loss": 2.7228, |
|
"step": 4955 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 2.4281697669370545e-05, |
|
"loss": 2.7228, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 2.424135566761721e-05, |
|
"loss": 2.6018, |
|
"step": 4965 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 2.4201015643071136e-05, |
|
"loss": 2.7756, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 2.416067770086801e-05, |
|
"loss": 2.8861, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 2.4120341946138085e-05, |
|
"loss": 2.9409, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 2.4080008484005907e-05, |
|
"loss": 2.5961, |
|
"step": 4985 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 2.4039677419590066e-05, |
|
"loss": 2.7308, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 2.3999348858002888e-05, |
|
"loss": 3.0326, |
|
"step": 4995 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 2.395902290435017e-05, |
|
"loss": 2.7954, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 2.391869966373094e-05, |
|
"loss": 2.8, |
|
"step": 5005 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 2.3878379241237136e-05, |
|
"loss": 2.4567, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 2.3838061741953334e-05, |
|
"loss": 2.7514, |
|
"step": 5015 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 2.3797747270956522e-05, |
|
"loss": 2.7957, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 2.3757435933315785e-05, |
|
"loss": 2.8452, |
|
"step": 5025 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 2.371712783409205e-05, |
|
"loss": 2.8488, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 2.367682307833777e-05, |
|
"loss": 2.7709, |
|
"step": 5035 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 2.3636521771096725e-05, |
|
"loss": 2.8874, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 2.3596224017403683e-05, |
|
"loss": 3.0164, |
|
"step": 5045 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 2.355592992228418e-05, |
|
"loss": 2.8052, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 2.3515639590754163e-05, |
|
"loss": 2.8099, |
|
"step": 5055 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 2.3475353127819823e-05, |
|
"loss": 2.734, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 2.3435070638477237e-05, |
|
"loss": 2.7923, |
|
"step": 5065 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 2.339479222771215e-05, |
|
"loss": 2.8617, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 2.3354518000499652e-05, |
|
"loss": 2.4441, |
|
"step": 5075 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 2.3314248061803948e-05, |
|
"loss": 2.7458, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 2.327398251657805e-05, |
|
"loss": 2.5911, |
|
"step": 5085 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 2.3233721469763556e-05, |
|
"loss": 2.7941, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 2.319346502629028e-05, |
|
"loss": 2.5942, |
|
"step": 5095 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 2.315321329107609e-05, |
|
"loss": 3.008, |
|
"step": 5100 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 9730, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 300, |
|
"total_flos": 6.024653876352614e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|