|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.8313847752663029, |
|
"eval_steps": 500, |
|
"global_step": 4000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 5e-06, |
|
"loss": 29.8799, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1e-05, |
|
"loss": 27.433, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.5e-05, |
|
"loss": 24.1339, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2e-05, |
|
"loss": 20.4671, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.5e-05, |
|
"loss": 15.3115, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3e-05, |
|
"loss": 11.2635, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5e-05, |
|
"loss": 9.7985, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4e-05, |
|
"loss": 8.8605, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5e-05, |
|
"loss": 8.1809, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5e-05, |
|
"loss": 7.9851, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 7.8894, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6e-05, |
|
"loss": 7.7912, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 7.6229, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7e-05, |
|
"loss": 7.3963, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 7.1799, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 8e-05, |
|
"loss": 6.9941, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 8.5e-05, |
|
"loss": 6.7444, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9e-05, |
|
"loss": 6.5158, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.5e-05, |
|
"loss": 6.3326, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.0001, |
|
"loss": 6.1639, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.999972205865686e-05, |
|
"loss": 5.9857, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.999888823771751e-05, |
|
"loss": 5.9197, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.999749854645204e-05, |
|
"loss": 5.8061, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.99955530003106e-05, |
|
"loss": 5.6575, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.99930516209231e-05, |
|
"loss": 5.5549, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.998999443609897e-05, |
|
"loss": 5.4521, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.998638147982696e-05, |
|
"loss": 5.3554, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.998221279227467e-05, |
|
"loss": 5.2904, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.997748841978812e-05, |
|
"loss": 5.2108, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.997220841489122e-05, |
|
"loss": 5.2108, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.996637283628528e-05, |
|
"loss": 5.0989, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.995998174884821e-05, |
|
"loss": 4.9972, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.995303522363394e-05, |
|
"loss": 4.9647, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.99455333378715e-05, |
|
"loss": 4.9165, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.993747617496428e-05, |
|
"loss": 4.9282, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.9928863824489e-05, |
|
"loss": 4.708, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.99196963821948e-05, |
|
"loss": 4.7795, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.990997395000217e-05, |
|
"loss": 4.7472, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.989969663600169e-05, |
|
"loss": 4.6689, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.9888864554453e-05, |
|
"loss": 4.582, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.987747782578342e-05, |
|
"loss": 4.5709, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.986553657658668e-05, |
|
"loss": 4.5239, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.985304093962145e-05, |
|
"loss": 4.4984, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.983999105380988e-05, |
|
"loss": 4.4821, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.982638706423608e-05, |
|
"loss": 4.374, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.98122291221445e-05, |
|
"loss": 4.474, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.979751738493826e-05, |
|
"loss": 4.3922, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.978225201617732e-05, |
|
"loss": 4.4391, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.976643318557678e-05, |
|
"loss": 4.4058, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.975006106900495e-05, |
|
"loss": 4.3625, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.973313584848132e-05, |
|
"loss": 4.27, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.971565771217464e-05, |
|
"loss": 4.3206, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.969762685440076e-05, |
|
"loss": 4.2774, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.967904347562054e-05, |
|
"loss": 4.2523, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.965990778243755e-05, |
|
"loss": 4.2444, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.964021998759577e-05, |
|
"loss": 4.3125, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.961998030997733e-05, |
|
"loss": 4.2492, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.95991889745999e-05, |
|
"loss": 4.1899, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.957784621261441e-05, |
|
"loss": 4.1044, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.955595226130226e-05, |
|
"loss": 4.1067, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.953350736407282e-05, |
|
"loss": 4.1925, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.951051177046069e-05, |
|
"loss": 4.058, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.948696573612292e-05, |
|
"loss": 4.1544, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.946286952283618e-05, |
|
"loss": 4.0483, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.943822339849381e-05, |
|
"loss": 4.1003, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.941302763710288e-05, |
|
"loss": 4.0585, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.938728251878116e-05, |
|
"loss": 4.0378, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.936098832975393e-05, |
|
"loss": 3.9946, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.933414536235091e-05, |
|
"loss": 3.9808, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.93067539150029e-05, |
|
"loss": 4.0468, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.927881429223853e-05, |
|
"loss": 3.9166, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.925032680468085e-05, |
|
"loss": 3.9693, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.922129176904388e-05, |
|
"loss": 4.0137, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.919170950812911e-05, |
|
"loss": 3.8965, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.916158035082184e-05, |
|
"loss": 3.9472, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.913090463208763e-05, |
|
"loss": 4.0117, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.90996826929685e-05, |
|
"loss": 3.9192, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.906791488057916e-05, |
|
"loss": 3.872, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.903560154810313e-05, |
|
"loss": 3.9242, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.900274305478887e-05, |
|
"loss": 3.8956, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.896933976594572e-05, |
|
"loss": 3.8382, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.893539205293989e-05, |
|
"loss": 3.82, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.890090029319028e-05, |
|
"loss": 3.7987, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.886586487016433e-05, |
|
"loss": 3.8981, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.883028617337378e-05, |
|
"loss": 3.8939, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.879416459837022e-05, |
|
"loss": 3.8075, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.875750054674082e-05, |
|
"loss": 3.7353, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.872029442610382e-05, |
|
"loss": 3.8437, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.8682546650104e-05, |
|
"loss": 3.7572, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.864425763840802e-05, |
|
"loss": 3.8191, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.860542781669988e-05, |
|
"loss": 3.7573, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.85660576166761e-05, |
|
"loss": 3.714, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.852614747604093e-05, |
|
"loss": 3.7538, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.848569783850145e-05, |
|
"loss": 3.6779, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.844470915376278e-05, |
|
"loss": 3.7681, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.840318187752292e-05, |
|
"loss": 3.8159, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.836111647146771e-05, |
|
"loss": 3.736, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.831851340326577e-05, |
|
"loss": 3.6486, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.82753731465633e-05, |
|
"loss": 3.7658, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.823169618097871e-05, |
|
"loss": 3.7251, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.81874829920974e-05, |
|
"loss": 3.6231, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.814273407146623e-05, |
|
"loss": 3.6462, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.809744991658829e-05, |
|
"loss": 3.6517, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.805163103091708e-05, |
|
"loss": 3.6368, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.800527792385112e-05, |
|
"loss": 3.6112, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.79583911107282e-05, |
|
"loss": 3.5536, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.791097111281968e-05, |
|
"loss": 3.515, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.786301845732467e-05, |
|
"loss": 3.4876, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.781453367736418e-05, |
|
"loss": 3.6177, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.776551731197524e-05, |
|
"loss": 3.5863, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.771596990610478e-05, |
|
"loss": 3.5994, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.766589201060372e-05, |
|
"loss": 3.5847, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.761528418222077e-05, |
|
"loss": 3.4879, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.756414698359624e-05, |
|
"loss": 3.5164, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.75124809832558e-05, |
|
"loss": 3.5733, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.746028675560413e-05, |
|
"loss": 3.5022, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.740756488091861e-05, |
|
"loss": 3.4674, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.735431594534277e-05, |
|
"loss": 3.4944, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.730054054087983e-05, |
|
"loss": 3.4335, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.724623926538612e-05, |
|
"loss": 3.5377, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.719141272256443e-05, |
|
"loss": 3.5095, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.713606152195726e-05, |
|
"loss": 3.4461, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.708018627894011e-05, |
|
"loss": 3.4871, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.702378761471456e-05, |
|
"loss": 3.4649, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.696686615630146e-05, |
|
"loss": 3.492, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.690942253653385e-05, |
|
"loss": 3.4281, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.685145739405002e-05, |
|
"loss": 3.4253, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.679297137328634e-05, |
|
"loss": 3.4106, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.673396512447013e-05, |
|
"loss": 3.4202, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.667443930361247e-05, |
|
"loss": 3.4491, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.661439457250076e-05, |
|
"loss": 3.4569, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.655383159869158e-05, |
|
"loss": 3.392, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.649275105550309e-05, |
|
"loss": 3.3412, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.643115362200762e-05, |
|
"loss": 3.3846, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.636903998302409e-05, |
|
"loss": 3.3408, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.630641082911045e-05, |
|
"loss": 3.3308, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.624326685655593e-05, |
|
"loss": 3.3972, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.617960876737337e-05, |
|
"loss": 3.3683, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.611543726929134e-05, |
|
"loss": 3.3586, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.605075307574635e-05, |
|
"loss": 3.3433, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.598555690587487e-05, |
|
"loss": 3.3729, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.591984948450532e-05, |
|
"loss": 3.2936, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.585363154215008e-05, |
|
"loss": 3.2759, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.578690381499728e-05, |
|
"loss": 3.2235, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.571966704490271e-05, |
|
"loss": 3.2234, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.565192197938148e-05, |
|
"loss": 3.3816, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.558366937159977e-05, |
|
"loss": 3.274, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.551490998036646e-05, |
|
"loss": 3.2878, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.544564457012463e-05, |
|
"loss": 3.1834, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.537587391094314e-05, |
|
"loss": 3.2147, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.5305598778508e-05, |
|
"loss": 3.2893, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.52348199541138e-05, |
|
"loss": 3.2168, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.516353822465504e-05, |
|
"loss": 3.3475, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.509175438261726e-05, |
|
"loss": 3.2327, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.501946922606838e-05, |
|
"loss": 3.2455, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.494668355864973e-05, |
|
"loss": 3.2065, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.487339818956716e-05, |
|
"loss": 3.2766, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.479961393358203e-05, |
|
"loss": 3.2513, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.472533161100215e-05, |
|
"loss": 3.2642, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.465055204767265e-05, |
|
"loss": 3.1821, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.457527607496685e-05, |
|
"loss": 3.1724, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.44995045297769e-05, |
|
"loss": 3.201, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.442323825450464e-05, |
|
"loss": 3.1444, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.43464780970521e-05, |
|
"loss": 3.0952, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.426922491081212e-05, |
|
"loss": 3.1379, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.419147955465888e-05, |
|
"loss": 3.1045, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.411324289293832e-05, |
|
"loss": 3.1535, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.403451579545859e-05, |
|
"loss": 3.114, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.395529913748025e-05, |
|
"loss": 3.1811, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.387559379970672e-05, |
|
"loss": 3.0746, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.379540066827431e-05, |
|
"loss": 3.0929, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.371472063474248e-05, |
|
"loss": 3.0959, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.363355459608394e-05, |
|
"loss": 3.1466, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.355190345467457e-05, |
|
"loss": 3.1544, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.346976811828352e-05, |
|
"loss": 3.1571, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.338714950006297e-05, |
|
"loss": 3.1883, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.330404851853817e-05, |
|
"loss": 3.1336, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.3220466097597e-05, |
|
"loss": 3.0967, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.313640316647991e-05, |
|
"loss": 3.1326, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.305186065976945e-05, |
|
"loss": 3.0668, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.296683951737993e-05, |
|
"loss": 3.0858, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.288134068454697e-05, |
|
"loss": 3.0823, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.2795365111817e-05, |
|
"loss": 3.1015, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.270891375503665e-05, |
|
"loss": 3.0477, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.262198757534218e-05, |
|
"loss": 3.064, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.253458753914874e-05, |
|
"loss": 3.0634, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.244671461813969e-05, |
|
"loss": 3.066, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.235836978925572e-05, |
|
"loss": 3.0452, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.226955403468406e-05, |
|
"loss": 3.0402, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.21802683418475e-05, |
|
"loss": 3.074, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.209051370339347e-05, |
|
"loss": 3.0252, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.200029111718295e-05, |
|
"loss": 2.9939, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.190960158627941e-05, |
|
"loss": 3.0302, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.181844611893766e-05, |
|
"loss": 3.0826, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.172682572859261e-05, |
|
"loss": 3.0184, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.163474143384806e-05, |
|
"loss": 3.0368, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.154219425846528e-05, |
|
"loss": 2.9918, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.144918523135175e-05, |
|
"loss": 3.1033, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.13557153865496e-05, |
|
"loss": 3.0215, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.12617857632242e-05, |
|
"loss": 3.02, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.116739740565259e-05, |
|
"loss": 2.9849, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.107255136321184e-05, |
|
"loss": 3.0294, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.09772486903674e-05, |
|
"loss": 3.0632, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.08814904466614e-05, |
|
"loss": 2.9455, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.078527769670085e-05, |
|
"loss": 2.8982, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.068861151014575e-05, |
|
"loss": 3.0747, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.05914929616973e-05, |
|
"loss": 2.9585, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.04939231310859e-05, |
|
"loss": 2.9938, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.039590310305914e-05, |
|
"loss": 2.9533, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.029743396736974e-05, |
|
"loss": 2.9592, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.019851681876348e-05, |
|
"loss": 2.9796, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.009915275696693e-05, |
|
"loss": 3.0166, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 8.999934288667534e-05, |
|
"loss": 2.9008, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 8.989908831754028e-05, |
|
"loss": 2.8915, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 8.979839016415735e-05, |
|
"loss": 2.9184, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 8.969724954605373e-05, |
|
"loss": 2.9251, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.959566758767581e-05, |
|
"loss": 2.984, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.949364541837661e-05, |
|
"loss": 2.9141, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.939118417240329e-05, |
|
"loss": 2.9883, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.92882849888845e-05, |
|
"loss": 2.961, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.918494901181773e-05, |
|
"loss": 2.9109, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.908117739005659e-05, |
|
"loss": 2.9364, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.897697127729805e-05, |
|
"loss": 2.9389, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.887233183206957e-05, |
|
"loss": 2.941, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.876726021771627e-05, |
|
"loss": 2.9366, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.866175760238798e-05, |
|
"loss": 2.9375, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.855582515902625e-05, |
|
"loss": 2.8662, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.844946406535131e-05, |
|
"loss": 2.9072, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.834267550384893e-05, |
|
"loss": 2.8851, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.823546066175741e-05, |
|
"loss": 2.9376, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.81278207310542e-05, |
|
"loss": 2.9149, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.801975690844278e-05, |
|
"loss": 2.909, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.791127039533934e-05, |
|
"loss": 2.9776, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.780236239785935e-05, |
|
"loss": 2.8058, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.76930341268042e-05, |
|
"loss": 2.8686, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.758328679764776e-05, |
|
"loss": 2.9272, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.747312163052284e-05, |
|
"loss": 2.8655, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.736253985020761e-05, |
|
"loss": 2.8024, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.725154268611203e-05, |
|
"loss": 2.9482, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.714013137226411e-05, |
|
"loss": 2.9301, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.702830714729628e-05, |
|
"loss": 2.9352, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.691607125443153e-05, |
|
"loss": 2.9802, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.680342494146967e-05, |
|
"loss": 2.8809, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.66903694607734e-05, |
|
"loss": 2.8537, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.65769060692544e-05, |
|
"loss": 2.9273, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 8.646303602835936e-05, |
|
"loss": 2.8748, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 8.634876060405597e-05, |
|
"loss": 2.8821, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 8.623408106681884e-05, |
|
"loss": 2.9078, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 8.611899869161535e-05, |
|
"loss": 2.8469, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 8.600351475789147e-05, |
|
"loss": 2.9, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 8.588763054955764e-05, |
|
"loss": 2.9243, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 8.57713473549743e-05, |
|
"loss": 2.8607, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 8.565466646693778e-05, |
|
"loss": 2.885, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 8.553758918266578e-05, |
|
"loss": 2.8247, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 8.5420116803783e-05, |
|
"loss": 2.8273, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 8.530225063630668e-05, |
|
"loss": 2.8478, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 8.518399199063205e-05, |
|
"loss": 2.813, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 8.50653421815178e-05, |
|
"loss": 2.8184, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 8.494630252807138e-05, |
|
"loss": 2.8531, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 8.482687435373449e-05, |
|
"loss": 2.8971, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 8.470705898626817e-05, |
|
"loss": 2.8041, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 8.458685775773822e-05, |
|
"loss": 2.8813, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 8.446627200450025e-05, |
|
"loss": 2.8455, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 8.434530306718493e-05, |
|
"loss": 2.8421, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 8.4223952290683e-05, |
|
"loss": 2.8123, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 8.41022210241304e-05, |
|
"loss": 2.8004, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 8.398011062089316e-05, |
|
"loss": 2.8251, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 8.385762243855249e-05, |
|
"loss": 2.8038, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 8.373475783888958e-05, |
|
"loss": 2.7219, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 8.36115181878705e-05, |
|
"loss": 2.8054, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 8.348790485563101e-05, |
|
"loss": 2.8962, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 8.336391921646134e-05, |
|
"loss": 2.8574, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 8.323956264879089e-05, |
|
"loss": 2.8421, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 8.311483653517294e-05, |
|
"loss": 2.7508, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 8.298974226226919e-05, |
|
"loss": 2.796, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 8.28642812208345e-05, |
|
"loss": 2.8186, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 8.273845480570123e-05, |
|
"loss": 2.724, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 8.26122644157639e-05, |
|
"loss": 2.8581, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 8.248571145396362e-05, |
|
"loss": 2.7768, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 8.235879732727236e-05, |
|
"loss": 2.7922, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 8.223152344667745e-05, |
|
"loss": 2.8068, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 8.21038912271658e-05, |
|
"loss": 2.8673, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 8.197590208770824e-05, |
|
"loss": 2.7707, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 8.184755745124371e-05, |
|
"loss": 2.8066, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 8.171885874466342e-05, |
|
"loss": 2.7592, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 8.158980739879507e-05, |
|
"loss": 2.8379, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 8.146040484838677e-05, |
|
"loss": 2.8846, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 8.133065253209132e-05, |
|
"loss": 2.6556, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 8.120055189245e-05, |
|
"loss": 2.7585, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 8.10701043758767e-05, |
|
"loss": 2.8079, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 8.093931143264174e-05, |
|
"loss": 2.8197, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 8.080817451685576e-05, |
|
"loss": 2.7835, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 8.067669508645356e-05, |
|
"loss": 2.8843, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 8.054487460317797e-05, |
|
"loss": 2.7771, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 8.041271453256345e-05, |
|
"loss": 2.7758, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 8.02802163439199e-05, |
|
"loss": 2.7681, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 8.01473815103163e-05, |
|
"loss": 2.8478, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 8.001421150856434e-05, |
|
"loss": 2.769, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 7.988070781920197e-05, |
|
"loss": 2.8445, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 7.9746871926477e-05, |
|
"loss": 2.7668, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 7.961270531833052e-05, |
|
"loss": 2.7993, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 7.947820948638045e-05, |
|
"loss": 2.846, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 7.934338592590486e-05, |
|
"loss": 2.8119, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 7.92082361358254e-05, |
|
"loss": 2.8292, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 7.907276161869065e-05, |
|
"loss": 2.8951, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 7.893696388065936e-05, |
|
"loss": 2.8654, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 7.88008444314838e-05, |
|
"loss": 2.7954, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 7.866440478449283e-05, |
|
"loss": 2.7773, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 7.852764645657522e-05, |
|
"loss": 2.8437, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 7.839057096816271e-05, |
|
"loss": 2.7838, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 7.82531798432131e-05, |
|
"loss": 2.7778, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 7.811547460919333e-05, |
|
"loss": 2.7605, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 7.797745679706254e-05, |
|
"loss": 2.7999, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 7.783912794125496e-05, |
|
"loss": 2.6584, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 7.770048957966291e-05, |
|
"loss": 2.7273, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 7.756154325361967e-05, |
|
"loss": 2.7116, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 7.74222905078824e-05, |
|
"loss": 2.6467, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 7.728273289061489e-05, |
|
"loss": 2.7489, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 7.714287195337044e-05, |
|
"loss": 2.7581, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 7.700270925107448e-05, |
|
"loss": 2.7173, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 7.686224634200742e-05, |
|
"loss": 2.774, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 7.672148478778722e-05, |
|
"loss": 2.6909, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 7.658042615335212e-05, |
|
"loss": 2.7975, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 7.643907200694318e-05, |
|
"loss": 2.6911, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 7.629742392008684e-05, |
|
"loss": 2.7238, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 7.615548346757749e-05, |
|
"loss": 2.7475, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 7.60132522274599e-05, |
|
"loss": 2.7669, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 7.587073178101178e-05, |
|
"loss": 2.8193, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 7.572792371272609e-05, |
|
"loss": 2.7584, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 7.55848296102935e-05, |
|
"loss": 2.7299, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 7.544145106458465e-05, |
|
"loss": 2.7639, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 7.529778966963259e-05, |
|
"loss": 2.7609, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 7.515384702261496e-05, |
|
"loss": 2.6675, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 7.500962472383627e-05, |
|
"loss": 2.7524, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 7.486512437671011e-05, |
|
"loss": 2.7107, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 7.472034758774128e-05, |
|
"loss": 2.7223, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 7.457529596650797e-05, |
|
"loss": 2.724, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 7.442997112564392e-05, |
|
"loss": 2.7557, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 7.428437468082037e-05, |
|
"loss": 2.7122, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 7.413850825072817e-05, |
|
"loss": 2.6931, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 7.39923734570598e-05, |
|
"loss": 2.7722, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 7.384597192449126e-05, |
|
"loss": 2.7353, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 7.369930528066412e-05, |
|
"loss": 2.7948, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 7.355237515616732e-05, |
|
"loss": 2.682, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 7.340518318451914e-05, |
|
"loss": 2.6622, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 7.325773100214893e-05, |
|
"loss": 2.7158, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 7.311002024837899e-05, |
|
"loss": 2.7879, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 7.296205256540633e-05, |
|
"loss": 2.7377, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 7.281382959828443e-05, |
|
"loss": 2.7309, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 7.26653529949049e-05, |
|
"loss": 2.6173, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 7.25166244059792e-05, |
|
"loss": 2.7026, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 7.236764548502029e-05, |
|
"loss": 2.6857, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 7.221841788832421e-05, |
|
"loss": 2.7383, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 7.206894327495173e-05, |
|
"loss": 2.6494, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 7.191922330670982e-05, |
|
"loss": 2.6985, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 7.176925964813326e-05, |
|
"loss": 2.6444, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 7.161905396646607e-05, |
|
"loss": 2.801, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 7.146860793164299e-05, |
|
"loss": 2.6989, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 7.131792321627098e-05, |
|
"loss": 2.6961, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 7.116700149561048e-05, |
|
"loss": 2.6882, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 7.101584444755696e-05, |
|
"loss": 2.6867, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 7.086445375262212e-05, |
|
"loss": 2.6346, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 7.071283109391528e-05, |
|
"loss": 2.6841, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 7.056097815712466e-05, |
|
"loss": 2.7121, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 7.040889663049862e-05, |
|
"loss": 2.6328, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 7.025658820482693e-05, |
|
"loss": 2.7205, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 7.010405457342192e-05, |
|
"loss": 2.6246, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 6.995129743209967e-05, |
|
"loss": 2.7023, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 6.97983184791612e-05, |
|
"loss": 2.6673, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 6.964511941537355e-05, |
|
"loss": 2.6638, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 6.949170194395083e-05, |
|
"loss": 2.6786, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 6.933806777053536e-05, |
|
"loss": 2.679, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 6.918421860317872e-05, |
|
"loss": 2.6699, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 6.903015615232263e-05, |
|
"loss": 2.5678, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 6.887588213078012e-05, |
|
"loss": 2.6155, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 6.87213982537163e-05, |
|
"loss": 2.6461, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 6.856670623862943e-05, |
|
"loss": 2.6989, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 6.841180780533179e-05, |
|
"loss": 2.6236, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 6.82567046759305e-05, |
|
"loss": 2.716, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 6.810139857480844e-05, |
|
"loss": 2.6551, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 6.794589122860509e-05, |
|
"loss": 2.6196, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 6.779018436619725e-05, |
|
"loss": 2.6293, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 6.763427971867992e-05, |
|
"loss": 2.6149, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 6.747817901934699e-05, |
|
"loss": 2.7279, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 6.732188400367197e-05, |
|
"loss": 2.6773, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 6.716539640928871e-05, |
|
"loss": 2.6809, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 6.70087179759721e-05, |
|
"loss": 2.6995, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 6.685185044561874e-05, |
|
"loss": 2.6429, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 6.669479556222747e-05, |
|
"loss": 2.6672, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 6.653755507188013e-05, |
|
"loss": 2.6419, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 6.638013072272205e-05, |
|
"loss": 2.6948, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 6.622252426494259e-05, |
|
"loss": 2.6161, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 6.606473745075581e-05, |
|
"loss": 2.6865, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 6.590677203438084e-05, |
|
"loss": 2.6181, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 6.574862977202252e-05, |
|
"loss": 2.6625, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 6.559031242185174e-05, |
|
"loss": 2.6406, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 6.543182174398597e-05, |
|
"loss": 2.5716, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 6.52731595004697e-05, |
|
"loss": 2.705, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 6.51143274552548e-05, |
|
"loss": 2.6411, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 6.495532737418098e-05, |
|
"loss": 2.6001, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 6.479616102495605e-05, |
|
"loss": 2.7229, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 6.463683017713638e-05, |
|
"loss": 2.6842, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 6.447733660210715e-05, |
|
"loss": 2.6771, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 6.431768207306272e-05, |
|
"loss": 2.65, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 6.415786836498684e-05, |
|
"loss": 2.6842, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 6.399789725463298e-05, |
|
"loss": 2.6151, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 6.383777052050458e-05, |
|
"loss": 2.6342, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 6.367748994283518e-05, |
|
"loss": 2.6278, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 6.351705730356877e-05, |
|
"loss": 2.6531, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 6.335647438633987e-05, |
|
"loss": 2.5613, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 6.319574297645374e-05, |
|
"loss": 2.6218, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 6.303486486086654e-05, |
|
"loss": 2.6416, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 6.287384182816546e-05, |
|
"loss": 2.6308, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 6.271267566854883e-05, |
|
"loss": 2.5941, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 6.255136817380618e-05, |
|
"loss": 2.6483, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 6.23899211372984e-05, |
|
"loss": 2.6202, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 6.222833635393772e-05, |
|
"loss": 2.6073, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 6.206661562016782e-05, |
|
"loss": 2.6869, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 6.190476073394382e-05, |
|
"loss": 2.7117, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 6.17427734947123e-05, |
|
"loss": 2.6469, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 6.158065570339127e-05, |
|
"loss": 2.7132, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 6.141840916235021e-05, |
|
"loss": 2.6131, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 6.125603567539001e-05, |
|
"loss": 2.575, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 6.109353704772284e-05, |
|
"loss": 2.6202, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 6.0930915085952164e-05, |
|
"loss": 2.6629, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 6.076817159805267e-05, |
|
"loss": 2.5923, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 6.06053083933501e-05, |
|
"loss": 2.5753, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 6.044232728250116e-05, |
|
"loss": 2.6239, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 6.027923007747339e-05, |
|
"loss": 2.6523, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 6.011601859152506e-05, |
|
"loss": 2.6239, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.995269463918495e-05, |
|
"loss": 2.6056, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.97892600362322e-05, |
|
"loss": 2.637, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.962571659967614e-05, |
|
"loss": 2.666, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.946206614773606e-05, |
|
"loss": 2.647, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.929831049982103e-05, |
|
"loss": 2.6821, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.9134451476509633e-05, |
|
"loss": 2.6113, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.897049089952974e-05, |
|
"loss": 2.6378, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.880643059173826e-05, |
|
"loss": 2.6414, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.864227237710093e-05, |
|
"loss": 2.6706, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.847801808067189e-05, |
|
"loss": 2.7012, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.831366952857357e-05, |
|
"loss": 2.618, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.814922854797622e-05, |
|
"loss": 2.595, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.798469696707775e-05, |
|
"loss": 2.6449, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.782007661508331e-05, |
|
"loss": 2.6274, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.765536932218495e-05, |
|
"loss": 2.6248, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.7490576919541315e-05, |
|
"loss": 2.631, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.732570123925729e-05, |
|
"loss": 2.596, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.7160744114363593e-05, |
|
"loss": 2.5762, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.699570737879641e-05, |
|
"loss": 2.6107, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.683059286737702e-05, |
|
"loss": 2.6111, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.666540241579139e-05, |
|
"loss": 2.659, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.6500137860569766e-05, |
|
"loss": 2.6552, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.633480103906624e-05, |
|
"loss": 2.6814, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.616939378943834e-05, |
|
"loss": 2.5766, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.6003917950626595e-05, |
|
"loss": 2.542, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.583837536233407e-05, |
|
"loss": 2.5861, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.567276786500596e-05, |
|
"loss": 2.646, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.5507097299809054e-05, |
|
"loss": 2.6426, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.534136550861133e-05, |
|
"loss": 2.5992, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.5175574333961465e-05, |
|
"loss": 2.6798, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.500972561906832e-05, |
|
"loss": 2.6533, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.484382120778048e-05, |
|
"loss": 2.5703, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.467786294456575e-05, |
|
"loss": 2.5574, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.451185267449061e-05, |
|
"loss": 2.5726, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.43457922431998e-05, |
|
"loss": 2.6345, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.417968349689566e-05, |
|
"loss": 2.575, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.401352828231772e-05, |
|
"loss": 2.6487, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.384732844672211e-05, |
|
"loss": 2.5863, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.368108583786107e-05, |
|
"loss": 2.6253, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.3514802303962344e-05, |
|
"loss": 2.6075, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.334847969370868e-05, |
|
"loss": 2.5637, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.3182119856217284e-05, |
|
"loss": 2.5556, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.3015724641019214e-05, |
|
"loss": 2.6009, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.284929589803884e-05, |
|
"loss": 2.5977, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.2682835477573336e-05, |
|
"loss": 2.5513, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.2516345230271965e-05, |
|
"loss": 2.5894, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.234982700711569e-05, |
|
"loss": 2.6071, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.218328265939643e-05, |
|
"loss": 2.5323, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.201671403869657e-05, |
|
"loss": 2.6103, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.1850122996868366e-05, |
|
"loss": 2.584, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.168351138601334e-05, |
|
"loss": 2.5879, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.1516881058461675e-05, |
|
"loss": 2.5871, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.135023386675166e-05, |
|
"loss": 2.5992, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.118357166360906e-05, |
|
"loss": 2.5349, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.101689630192655e-05, |
|
"loss": 2.6656, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.085020963474307e-05, |
|
"loss": 2.59, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.068351351522329e-05, |
|
"loss": 2.6537, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.0516809796636935e-05, |
|
"loss": 2.56, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.035010033233821e-05, |
|
"loss": 2.5155, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.018338697574523e-05, |
|
"loss": 2.5768, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.0016671580319354e-05, |
|
"loss": 2.6093, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.984995599954461e-05, |
|
"loss": 2.5758, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.968324208690712e-05, |
|
"loss": 2.6337, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.951653169587441e-05, |
|
"loss": 2.6114, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.93498266798749e-05, |
|
"loss": 2.6359, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.918312889227722e-05, |
|
"loss": 2.6163, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.901644018636966e-05, |
|
"loss": 2.5452, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.8849762415339526e-05, |
|
"loss": 2.5188, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.868309743225256e-05, |
|
"loss": 2.5023, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.851644709003233e-05, |
|
"loss": 2.5923, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.834981324143964e-05, |
|
"loss": 2.5956, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.818319773905191e-05, |
|
"loss": 2.5318, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.801660243524261e-05, |
|
"loss": 2.5517, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.7850029182160626e-05, |
|
"loss": 2.5419, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.768347983170973e-05, |
|
"loss": 2.6064, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.7516956235527884e-05, |
|
"loss": 2.5786, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.735046024496682e-05, |
|
"loss": 2.5382, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.7183993711071286e-05, |
|
"loss": 2.5729, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.7017558484558554e-05, |
|
"loss": 2.4836, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.6851156415797844e-05, |
|
"loss": 2.5917, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.6684789354789746e-05, |
|
"loss": 2.554, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.651845915114563e-05, |
|
"loss": 2.5096, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.6352167654067095e-05, |
|
"loss": 2.5738, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.618591671232544e-05, |
|
"loss": 2.5964, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.601970817424106e-05, |
|
"loss": 2.616, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.585354388766292e-05, |
|
"loss": 2.5538, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.568742569994802e-05, |
|
"loss": 2.5416, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.552135545794086e-05, |
|
"loss": 2.5169, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.535533500795288e-05, |
|
"loss": 2.5653, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.5189366195741953e-05, |
|
"loss": 2.5354, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.502345086649186e-05, |
|
"loss": 2.5276, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.485759086479179e-05, |
|
"loss": 2.5914, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.469178803461579e-05, |
|
"loss": 2.4788, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.4526044219302326e-05, |
|
"loss": 2.5158, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.4360361261533745e-05, |
|
"loss": 2.5225, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.419474100331579e-05, |
|
"loss": 2.5303, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.402918528595715e-05, |
|
"loss": 2.5461, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.386369595004896e-05, |
|
"loss": 2.5966, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.3698274835444354e-05, |
|
"loss": 2.5282, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.3532923781238e-05, |
|
"loss": 2.5484, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.336764462574566e-05, |
|
"loss": 2.5657, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.320243920648376e-05, |
|
"loss": 2.5559, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.303730936014894e-05, |
|
"loss": 2.5714, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.287225692259765e-05, |
|
"loss": 2.5882, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.270728372882575e-05, |
|
"loss": 2.5953, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.254239161294804e-05, |
|
"loss": 2.6016, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.237758240817802e-05, |
|
"loss": 2.5299, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.2212857946807336e-05, |
|
"loss": 2.5383, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.2048220060185516e-05, |
|
"loss": 2.5878, |
|
"step": 2695 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.188367057869957e-05, |
|
"loss": 2.5821, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.171921133175365e-05, |
|
"loss": 2.5801, |
|
"step": 2705 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.155484414774872e-05, |
|
"loss": 2.5879, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.139057085406221e-05, |
|
"loss": 2.5334, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.1226393277027726e-05, |
|
"loss": 2.5482, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.106231324191471e-05, |
|
"loss": 2.6068, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.089833257290817e-05, |
|
"loss": 2.5262, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.073445309308842e-05, |
|
"loss": 2.5446, |
|
"step": 2735 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.0570676624410756e-05, |
|
"loss": 2.5103, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.040700498768525e-05, |
|
"loss": 2.5549, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.024344000255648e-05, |
|
"loss": 2.5926, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.0079983487483313e-05, |
|
"loss": 2.5357, |
|
"step": 2755 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 3.9916637259718683e-05, |
|
"loss": 2.5389, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 3.9753403135289396e-05, |
|
"loss": 2.5505, |
|
"step": 2765 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 3.9590282928975914e-05, |
|
"loss": 2.4951, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 3.942727845429221e-05, |
|
"loss": 2.5567, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 3.926439152346558e-05, |
|
"loss": 2.491, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 3.910162394741653e-05, |
|
"loss": 2.5106, |
|
"step": 2785 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 3.893897753573861e-05, |
|
"loss": 2.5632, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 3.877645409667829e-05, |
|
"loss": 2.5156, |
|
"step": 2795 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 3.861405543711491e-05, |
|
"loss": 2.5741, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 3.8451783362540507e-05, |
|
"loss": 2.526, |
|
"step": 2805 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 3.828963967703983e-05, |
|
"loss": 2.5703, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 3.8127626183270223e-05, |
|
"loss": 2.4656, |
|
"step": 2815 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 3.796574468244161e-05, |
|
"loss": 2.546, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 3.7803996974296444e-05, |
|
"loss": 2.5126, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 3.7642384857089776e-05, |
|
"loss": 2.5554, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 3.748091012756915e-05, |
|
"loss": 2.5583, |
|
"step": 2835 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 3.731957458095467e-05, |
|
"loss": 2.5827, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 3.71583800109191e-05, |
|
"loss": 2.546, |
|
"step": 2845 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 3.699732820956784e-05, |
|
"loss": 2.5355, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 3.6836420967419057e-05, |
|
"loss": 2.4966, |
|
"step": 2855 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 3.6675660073383745e-05, |
|
"loss": 2.5611, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.6515047314745856e-05, |
|
"loss": 2.6172, |
|
"step": 2865 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.6354584477142437e-05, |
|
"loss": 2.5622, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.6194273344543736e-05, |
|
"loss": 2.5628, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.6034115699233425e-05, |
|
"loss": 2.5526, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.5874113321788736e-05, |
|
"loss": 2.5349, |
|
"step": 2885 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.571426799106071e-05, |
|
"loss": 2.5576, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.555458148415437e-05, |
|
"loss": 2.472, |
|
"step": 2895 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.539505557640901e-05, |
|
"loss": 2.5237, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.523569204137843e-05, |
|
"loss": 2.6098, |
|
"step": 2905 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.5076492650811246e-05, |
|
"loss": 2.5069, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.491745917463113e-05, |
|
"loss": 2.5266, |
|
"step": 2915 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.475859338091721e-05, |
|
"loss": 2.5698, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.4599897035884374e-05, |
|
"loss": 2.4727, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.444137190386363e-05, |
|
"loss": 2.5459, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.4283019747282514e-05, |
|
"loss": 2.4824, |
|
"step": 2935 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.412484232664545e-05, |
|
"loss": 2.4816, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.396684140051424e-05, |
|
"loss": 2.6093, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.3809018725488466e-05, |
|
"loss": 2.5011, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.365137605618598e-05, |
|
"loss": 2.5476, |
|
"step": 2955 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.3493915145223395e-05, |
|
"loss": 2.5065, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.3336637743196584e-05, |
|
"loss": 2.4185, |
|
"step": 2965 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.317954559866126e-05, |
|
"loss": 2.5768, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.302264045811344e-05, |
|
"loss": 2.5758, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.286592406597021e-05, |
|
"loss": 2.5368, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.270939816455012e-05, |
|
"loss": 2.4785, |
|
"step": 2985 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.255306449405395e-05, |
|
"loss": 2.5797, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.2396924792545304e-05, |
|
"loss": 2.5745, |
|
"step": 2995 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.224098079593132e-05, |
|
"loss": 2.4761, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.2085234237943354e-05, |
|
"loss": 2.5106, |
|
"step": 3005 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.19296868501177e-05, |
|
"loss": 2.5279, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.177434036177636e-05, |
|
"loss": 2.5396, |
|
"step": 3015 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.1619196500007804e-05, |
|
"loss": 2.5156, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.146425698964776e-05, |
|
"loss": 2.5055, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.1309523553260046e-05, |
|
"loss": 2.4747, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.115499791111743e-05, |
|
"loss": 2.5335, |
|
"step": 3035 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.10006817811825e-05, |
|
"loss": 2.4489, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.084657687908855e-05, |
|
"loss": 2.5037, |
|
"step": 3045 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.069268491812052e-05, |
|
"loss": 2.471, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.0539007609195934e-05, |
|
"loss": 2.4567, |
|
"step": 3055 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.0385546660845908e-05, |
|
"loss": 2.4883, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.0232303779196132e-05, |
|
"loss": 2.4534, |
|
"step": 3065 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.0079280667947885e-05, |
|
"loss": 2.5136, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 2.9926479028359132e-05, |
|
"loss": 2.4968, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 2.97739005592256e-05, |
|
"loss": 2.5201, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 2.962154695686187e-05, |
|
"loss": 2.5269, |
|
"step": 3085 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 2.9469419915082536e-05, |
|
"loss": 2.5021, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 2.9317521125183368e-05, |
|
"loss": 2.4473, |
|
"step": 3095 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 2.9165852275922524e-05, |
|
"loss": 2.5478, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 2.901441505350174e-05, |
|
"loss": 2.4711, |
|
"step": 3105 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 2.886321114154762e-05, |
|
"loss": 2.5281, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 2.87122422210929e-05, |
|
"loss": 2.4918, |
|
"step": 3115 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 2.8561509970557736e-05, |
|
"loss": 2.5088, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 2.8411016065731146e-05, |
|
"loss": 2.4758, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 2.826076217975222e-05, |
|
"loss": 2.528, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 2.8110749983091632e-05, |
|
"loss": 2.4255, |
|
"step": 3135 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 2.7960981143533053e-05, |
|
"loss": 2.4685, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 2.781145732615457e-05, |
|
"loss": 2.5306, |
|
"step": 3145 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 2.7662180193310218e-05, |
|
"loss": 2.5112, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 2.751315140461145e-05, |
|
"loss": 2.4746, |
|
"step": 3155 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 2.7364372616908744e-05, |
|
"loss": 2.5424, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 2.7215845484273152e-05, |
|
"loss": 2.512, |
|
"step": 3165 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 2.7067571657977893e-05, |
|
"loss": 2.5009, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 2.691955278648003e-05, |
|
"loss": 2.4422, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 2.6771790515402112e-05, |
|
"loss": 2.5525, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 2.6624286487513916e-05, |
|
"loss": 2.5018, |
|
"step": 3185 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 2.6477042342714137e-05, |
|
"loss": 2.4995, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 2.633005971801219e-05, |
|
"loss": 2.5088, |
|
"step": 3195 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 2.6183340247510013e-05, |
|
"loss": 2.5089, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 2.6036885562383856e-05, |
|
"loss": 2.5495, |
|
"step": 3205 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 2.5890697290866206e-05, |
|
"loss": 2.4974, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 2.5744777058227642e-05, |
|
"loss": 2.4555, |
|
"step": 3215 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 2.5599126486758777e-05, |
|
"loss": 2.5266, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 2.5453747195752243e-05, |
|
"loss": 2.5788, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 2.530864080148464e-05, |
|
"loss": 2.4584, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 2.5163808917198615e-05, |
|
"loss": 2.4825, |
|
"step": 3235 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 2.501925315308492e-05, |
|
"loss": 2.5289, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 2.4874975116264477e-05, |
|
"loss": 2.4568, |
|
"step": 3245 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.4730976410770534e-05, |
|
"loss": 2.4557, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.458725863753084e-05, |
|
"loss": 2.4803, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.4443823394349834e-05, |
|
"loss": 2.4752, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.430067227589088e-05, |
|
"loss": 2.5153, |
|
"step": 3265 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.4157806873658517e-05, |
|
"loss": 2.5158, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.401522877598087e-05, |
|
"loss": 2.4906, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.3872939567991827e-05, |
|
"loss": 2.4976, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.373094083161353e-05, |
|
"loss": 2.4723, |
|
"step": 3285 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.358923414553877e-05, |
|
"loss": 2.5291, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.3447821085213405e-05, |
|
"loss": 2.5485, |
|
"step": 3295 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.3306703222818878e-05, |
|
"loss": 2.5368, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.3165882127254705e-05, |
|
"loss": 2.4903, |
|
"step": 3305 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.302535936412108e-05, |
|
"loss": 2.5063, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.2885136495701415e-05, |
|
"loss": 2.5079, |
|
"step": 3315 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.274521508094501e-05, |
|
"loss": 2.5061, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.2605596675449698e-05, |
|
"loss": 2.5266, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.246628283144457e-05, |
|
"loss": 2.4616, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.232727509777269e-05, |
|
"loss": 2.4788, |
|
"step": 3335 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.2188575019873932e-05, |
|
"loss": 2.4738, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.2050184139767704e-05, |
|
"loss": 2.4994, |
|
"step": 3345 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.191210399603591e-05, |
|
"loss": 2.4562, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.1774336123805772e-05, |
|
"loss": 2.4252, |
|
"step": 3355 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.1636882054732776e-05, |
|
"loss": 2.5364, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.1499743316983684e-05, |
|
"loss": 2.4273, |
|
"step": 3365 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.1362921435219473e-05, |
|
"loss": 2.5531, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.1226417930578464e-05, |
|
"loss": 2.4517, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.109023432065935e-05, |
|
"loss": 2.5296, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.095437211950434e-05, |
|
"loss": 2.4706, |
|
"step": 3385 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.0818832837582352e-05, |
|
"loss": 2.4773, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.068361798177218e-05, |
|
"loss": 2.4383, |
|
"step": 3395 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.0548729055345778e-05, |
|
"loss": 2.4946, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.0414167557951514e-05, |
|
"loss": 2.4582, |
|
"step": 3405 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.0279934985597527e-05, |
|
"loss": 2.4347, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.0146032830635054e-05, |
|
"loss": 2.4751, |
|
"step": 3415 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.001246258174192e-05, |
|
"loss": 2.3989, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.9879225723905886e-05, |
|
"loss": 2.4765, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.9746323738408203e-05, |
|
"loss": 2.5107, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.9613758102807117e-05, |
|
"loss": 2.4686, |
|
"step": 3435 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.9481530290921474e-05, |
|
"loss": 2.4089, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.934964177281428e-05, |
|
"loss": 2.4485, |
|
"step": 3445 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.9218094014776434e-05, |
|
"loss": 2.4366, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.9086888479310333e-05, |
|
"loss": 2.5288, |
|
"step": 3455 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.895602662511371e-05, |
|
"loss": 2.5458, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.8825509907063327e-05, |
|
"loss": 2.4899, |
|
"step": 3465 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.8695339776198872e-05, |
|
"loss": 2.4295, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.8565517679706783e-05, |
|
"loss": 2.5168, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.8436045060904174e-05, |
|
"loss": 2.4941, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.830692335922279e-05, |
|
"loss": 2.4933, |
|
"step": 3485 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.8178154010192994e-05, |
|
"loss": 2.4336, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.8049738445427822e-05, |
|
"loss": 2.4892, |
|
"step": 3495 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7921678092607052e-05, |
|
"loss": 2.4893, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7793974375461352e-05, |
|
"loss": 2.5045, |
|
"step": 3505 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7666628713756417e-05, |
|
"loss": 2.4226, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7539642523277228e-05, |
|
"loss": 2.3991, |
|
"step": 3515 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7413017215812273e-05, |
|
"loss": 2.452, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.728675419913788e-05, |
|
"loss": 2.4538, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.716085487700253e-05, |
|
"loss": 2.4156, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.703532064911131e-05, |
|
"loss": 2.4516, |
|
"step": 3535 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.6910152911110283e-05, |
|
"loss": 2.478, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.6785353054571024e-05, |
|
"loss": 2.506, |
|
"step": 3545 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.666092246697512e-05, |
|
"loss": 2.4904, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.6536862531698766e-05, |
|
"loss": 2.4779, |
|
"step": 3555 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.6413174627997328e-05, |
|
"loss": 2.4793, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.6289860130990147e-05, |
|
"loss": 2.4775, |
|
"step": 3565 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.6166920411645064e-05, |
|
"loss": 2.484, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.6044356836763315e-05, |
|
"loss": 2.5021, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.5922170768964285e-05, |
|
"loss": 2.4005, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.5800363566670362e-05, |
|
"loss": 2.4397, |
|
"step": 3585 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.5678936584091852e-05, |
|
"loss": 2.5512, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.5557891171211892e-05, |
|
"loss": 2.4878, |
|
"step": 3595 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.5437228673771465e-05, |
|
"loss": 2.5047, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.5316950433254445e-05, |
|
"loss": 2.5024, |
|
"step": 3605 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.5197057786872649e-05, |
|
"loss": 2.486, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.5077552067551015e-05, |
|
"loss": 2.5127, |
|
"step": 3615 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.4958434603912747e-05, |
|
"loss": 2.4308, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.4839706720264546e-05, |
|
"loss": 2.4217, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.4721369736581924e-05, |
|
"loss": 2.4883, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.4603424968494484e-05, |
|
"loss": 2.4701, |
|
"step": 3635 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.448587372727132e-05, |
|
"loss": 2.4835, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.4368717319806419e-05, |
|
"loss": 2.44, |
|
"step": 3645 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.4251957048604152e-05, |
|
"loss": 2.5276, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.413559421176479e-05, |
|
"loss": 2.4847, |
|
"step": 3655 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.4019630102970056e-05, |
|
"loss": 2.4601, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.3904066011468753e-05, |
|
"loss": 2.3937, |
|
"step": 3665 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.3788903222062433e-05, |
|
"loss": 2.4342, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.3674143015091118e-05, |
|
"loss": 2.4455, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.355978666641905e-05, |
|
"loss": 2.536, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.3445835447420507e-05, |
|
"loss": 2.4733, |
|
"step": 3685 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.3332290624965688e-05, |
|
"loss": 2.4751, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.3219153461406609e-05, |
|
"loss": 2.4376, |
|
"step": 3695 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.3106425214563078e-05, |
|
"loss": 2.4833, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.2994107137708716e-05, |
|
"loss": 2.492, |
|
"step": 3705 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.2882200479556988e-05, |
|
"loss": 2.4543, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.2770706484247397e-05, |
|
"loss": 2.4866, |
|
"step": 3715 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.2659626391331564e-05, |
|
"loss": 2.503, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.2548961435759493e-05, |
|
"loss": 2.4523, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.2438712847865846e-05, |
|
"loss": 2.4445, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.2328881853356244e-05, |
|
"loss": 2.5052, |
|
"step": 3735 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.221946967329365e-05, |
|
"loss": 2.516, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.2110477524084796e-05, |
|
"loss": 2.4626, |
|
"step": 3745 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.2001906617466657e-05, |
|
"loss": 2.4387, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.1893758160492978e-05, |
|
"loss": 2.4533, |
|
"step": 3755 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.1786033355520859e-05, |
|
"loss": 2.4612, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.1678733400197373e-05, |
|
"loss": 2.505, |
|
"step": 3765 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.1571859487446263e-05, |
|
"loss": 2.526, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.1465412805454695e-05, |
|
"loss": 2.4685, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.1359394537660011e-05, |
|
"loss": 2.4817, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.125380586273661e-05, |
|
"loss": 2.4658, |
|
"step": 3785 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.1148647954582808e-05, |
|
"loss": 2.4495, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.1043921982307819e-05, |
|
"loss": 2.4093, |
|
"step": 3795 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.0939629110218735e-05, |
|
"loss": 2.3968, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.0835770497807596e-05, |
|
"loss": 2.451, |
|
"step": 3805 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.0732347299738493e-05, |
|
"loss": 2.4379, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.0629360665834732e-05, |
|
"loss": 2.4889, |
|
"step": 3815 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.052681174106604e-05, |
|
"loss": 2.5502, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.0424701665535852e-05, |
|
"loss": 2.4583, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.0323031574468638e-05, |
|
"loss": 2.3988, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.0221802598197261e-05, |
|
"loss": 2.4369, |
|
"step": 3835 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.0121015862150423e-05, |
|
"loss": 2.5173, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.0020672486840154e-05, |
|
"loss": 2.4678, |
|
"step": 3845 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.920773587849364e-06, |
|
"loss": 2.4289, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.821320275819401e-06, |
|
"loss": 2.4819, |
|
"step": 3855 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.72231365643777e-06, |
|
"loss": 2.494, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.623754830425779e-06, |
|
"loss": 2.4796, |
|
"step": 3865 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.52564489352632e-06, |
|
"loss": 2.4549, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.427984936491702e-06, |
|
"loss": 2.4683, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.330776045071509e-06, |
|
"loss": 2.4794, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.23401930000054e-06, |
|
"loss": 2.4335, |
|
"step": 3885 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.137715776986772e-06, |
|
"loss": 2.4153, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.041866546699434e-06, |
|
"loss": 2.5357, |
|
"step": 3895 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 8.946472674757078e-06, |
|
"loss": 2.393, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 8.851535221715735e-06, |
|
"loss": 2.4078, |
|
"step": 3905 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 8.757055243057132e-06, |
|
"loss": 2.4774, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 8.663033789176967e-06, |
|
"loss": 2.4464, |
|
"step": 3915 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 8.5694719053732e-06, |
|
"loss": 2.4593, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 8.476370631834458e-06, |
|
"loss": 2.4536, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 8.383731003628452e-06, |
|
"loss": 2.4909, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 8.291554050690508e-06, |
|
"loss": 2.3645, |
|
"step": 3935 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 8.199840797812058e-06, |
|
"loss": 2.4908, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 8.108592264629295e-06, |
|
"loss": 2.4304, |
|
"step": 3945 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 8.017809465611803e-06, |
|
"loss": 2.4529, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.927493410051324e-06, |
|
"loss": 2.4155, |
|
"step": 3955 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.837645102050473e-06, |
|
"loss": 2.4213, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.748265540511635e-06, |
|
"loss": 2.4998, |
|
"step": 3965 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.65935571912582e-06, |
|
"loss": 2.4524, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.5709166263616405e-06, |
|
"loss": 2.4409, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.482949245454302e-06, |
|
"loss": 2.433, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.3954545543946876e-06, |
|
"loss": 2.439, |
|
"step": 3985 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.308433525918468e-06, |
|
"loss": 2.4606, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.221887127495313e-06, |
|
"loss": 2.5289, |
|
"step": 3995 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.1358163213181114e-06, |
|
"loss": 2.4313, |
|
"step": 4000 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 4811, |
|
"num_train_epochs": 1, |
|
"save_steps": 1000, |
|
"total_flos": 1.1658574908358656e+19, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|