|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.3039665636779954, |
|
"eval_steps": 500, |
|
"global_step": 1900, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9999964908081455e-05, |
|
"loss": 0.7285, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999985963242432e-05, |
|
"loss": 0.6712, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999968417332415e-05, |
|
"loss": 0.6081, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999943853127351e-05, |
|
"loss": 0.6383, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999912270696202e-05, |
|
"loss": 0.6456, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9998736701276295e-05, |
|
"loss": 0.6228, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.99982805153e-05, |
|
"loss": 0.6134, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9997754150313815e-05, |
|
"loss": 0.5975, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999715760779541e-05, |
|
"loss": 0.6053, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9996490889419514e-05, |
|
"loss": 0.6064, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999575399705783e-05, |
|
"loss": 0.5947, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999494693277907e-05, |
|
"loss": 0.5839, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999406969884897e-05, |
|
"loss": 0.6106, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999312229773022e-05, |
|
"loss": 0.6146, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.99921047320825e-05, |
|
"loss": 0.5659, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9991017004762496e-05, |
|
"loss": 0.5682, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.998985911882384e-05, |
|
"loss": 0.6352, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.998863107751711e-05, |
|
"loss": 0.6018, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.998733288428987e-05, |
|
"loss": 0.6342, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9985964542786614e-05, |
|
"loss": 0.5886, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.998452605684874e-05, |
|
"loss": 0.6097, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.998301743051459e-05, |
|
"loss": 0.5687, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.998143866801942e-05, |
|
"loss": 0.5866, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.997978977379536e-05, |
|
"loss": 0.5612, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.997807075247146e-05, |
|
"loss": 0.6221, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.997628160887361e-05, |
|
"loss": 0.5728, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.997442234802456e-05, |
|
"loss": 0.6105, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.997249297514394e-05, |
|
"loss": 0.6161, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.997049349564814e-05, |
|
"loss": 0.6511, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.996842391515044e-05, |
|
"loss": 0.6108, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.996628423946087e-05, |
|
"loss": 0.5664, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.996407447458626e-05, |
|
"loss": 0.6127, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.99617946267302e-05, |
|
"loss": 0.6213, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.995944470229302e-05, |
|
"loss": 0.596, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9957024707871806e-05, |
|
"loss": 0.5731, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.995453465026032e-05, |
|
"loss": 0.5704, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.995197453644905e-05, |
|
"loss": 0.5767, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.994934437362513e-05, |
|
"loss": 0.6134, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9946644169172355e-05, |
|
"loss": 0.5919, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.994387393067117e-05, |
|
"loss": 0.6031, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.994103366589859e-05, |
|
"loss": 0.6236, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.993812338282826e-05, |
|
"loss": 0.6307, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.993514308963036e-05, |
|
"loss": 0.5618, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.993209279467164e-05, |
|
"loss": 0.6093, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.992897250651535e-05, |
|
"loss": 0.5744, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.992578223392124e-05, |
|
"loss": 0.5844, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.992252198584554e-05, |
|
"loss": 0.5358, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9919191771440905e-05, |
|
"loss": 0.6067, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.991579160005644e-05, |
|
"loss": 0.6147, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.991232148123761e-05, |
|
"loss": 0.6185, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.990878142472628e-05, |
|
"loss": 0.5797, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.990517144046064e-05, |
|
"loss": 0.58, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9901491538575185e-05, |
|
"loss": 0.6051, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9897741729400705e-05, |
|
"loss": 0.6199, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9893922023464236e-05, |
|
"loss": 0.6173, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.989003243148904e-05, |
|
"loss": 0.5907, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.988607296439458e-05, |
|
"loss": 0.5818, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.988204363329648e-05, |
|
"loss": 0.5767, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.987794444950651e-05, |
|
"loss": 0.579, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.987377542453251e-05, |
|
"loss": 0.6454, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.986953657007841e-05, |
|
"loss": 0.5777, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.986522789804417e-05, |
|
"loss": 0.532, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9860849420525766e-05, |
|
"loss": 0.56, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9856401149815126e-05, |
|
"loss": 0.5624, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.985188309840012e-05, |
|
"loss": 0.6008, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9847295278964514e-05, |
|
"loss": 0.6425, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.984263770438793e-05, |
|
"loss": 0.5907, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9837910387745845e-05, |
|
"loss": 0.5926, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.98331133423095e-05, |
|
"loss": 0.6115, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.982824658154589e-05, |
|
"loss": 0.5685, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.982331011911774e-05, |
|
"loss": 0.5412, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.981830396888344e-05, |
|
"loss": 0.6032, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.981322814489703e-05, |
|
"loss": 0.568, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.980808266140813e-05, |
|
"loss": 0.5835, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.980286753286195e-05, |
|
"loss": 0.5633, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.979758277389919e-05, |
|
"loss": 0.574, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.979222839935602e-05, |
|
"loss": 0.5774, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9786804424264085e-05, |
|
"loss": 0.5608, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9781310863850405e-05, |
|
"loss": 0.5659, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.977574773353732e-05, |
|
"loss": 0.6167, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.977011504894252e-05, |
|
"loss": 0.5523, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9764412825878943e-05, |
|
"loss": 0.5804, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.975864108035474e-05, |
|
"loss": 0.5811, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.975279982857324e-05, |
|
"loss": 0.5832, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9746889086932895e-05, |
|
"loss": 0.6035, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.974090887202726e-05, |
|
"loss": 0.6077, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9734859200644905e-05, |
|
"loss": 0.6147, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.97287400897694e-05, |
|
"loss": 0.6825, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.972255155657925e-05, |
|
"loss": 0.5573, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.971629361844785e-05, |
|
"loss": 0.5691, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9709966292943455e-05, |
|
"loss": 0.5768, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.970356959782909e-05, |
|
"loss": 0.5953, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9697103551062556e-05, |
|
"loss": 0.6207, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.969056817079633e-05, |
|
"loss": 0.5845, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.968396347537751e-05, |
|
"loss": 0.5719, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.967728948334784e-05, |
|
"loss": 0.5889, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.967054621344356e-05, |
|
"loss": 0.5574, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.966373368459541e-05, |
|
"loss": 0.6037, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.965685191592859e-05, |
|
"loss": 0.5707, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.964990092676263e-05, |
|
"loss": 0.6586, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.964288073661142e-05, |
|
"loss": 0.5559, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.963579136518312e-05, |
|
"loss": 0.5609, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.96286328323801e-05, |
|
"loss": 0.6081, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.96214051582989e-05, |
|
"loss": 0.5724, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.9614108363230135e-05, |
|
"loss": 0.5758, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.960674246765851e-05, |
|
"loss": 0.6191, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.959930749226269e-05, |
|
"loss": 0.5638, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.959180345791528e-05, |
|
"loss": 0.5698, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.958423038568274e-05, |
|
"loss": 0.5878, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.9576588296825386e-05, |
|
"loss": 0.5734, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.956887721279726e-05, |
|
"loss": 0.606, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.956109715524608e-05, |
|
"loss": 0.5871, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.955324814601324e-05, |
|
"loss": 0.6306, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.9545330207133664e-05, |
|
"loss": 0.6231, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.953734336083583e-05, |
|
"loss": 0.6278, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.952928762954161e-05, |
|
"loss": 0.5551, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.952116303586631e-05, |
|
"loss": 0.6441, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.951296960261853e-05, |
|
"loss": 0.5753, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.9504707352800125e-05, |
|
"loss": 0.5458, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.949637630960617e-05, |
|
"loss": 0.5668, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.948797649642484e-05, |
|
"loss": 0.5727, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.9479507936837364e-05, |
|
"loss": 0.628, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.947097065461801e-05, |
|
"loss": 0.5958, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.946236467373392e-05, |
|
"loss": 0.6173, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.9453690018345144e-05, |
|
"loss": 0.5473, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.9444946712804494e-05, |
|
"loss": 0.6012, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.943613478165753e-05, |
|
"loss": 0.6303, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.9427254249642444e-05, |
|
"loss": 0.6188, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.941830514169004e-05, |
|
"loss": 0.6079, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.940928748292363e-05, |
|
"loss": 0.5589, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.940020129865895e-05, |
|
"loss": 0.5652, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.939104661440415e-05, |
|
"loss": 0.5415, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.938182345585966e-05, |
|
"loss": 0.5657, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.9372531848918145e-05, |
|
"loss": 0.6403, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.9363171819664434e-05, |
|
"loss": 0.6198, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.935374339437543e-05, |
|
"loss": 0.5685, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.934424659952006e-05, |
|
"loss": 0.5737, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.933468146175918e-05, |
|
"loss": 0.5975, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.9325048007945526e-05, |
|
"loss": 0.6033, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.9315346265123594e-05, |
|
"loss": 0.6587, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.9305576260529607e-05, |
|
"loss": 0.5903, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.929573802159143e-05, |
|
"loss": 0.5883, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.9285831575928465e-05, |
|
"loss": 0.6151, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.927585695135162e-05, |
|
"loss": 0.5687, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.9265814175863186e-05, |
|
"loss": 0.6008, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.925570327765678e-05, |
|
"loss": 0.6045, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.9245524285117274e-05, |
|
"loss": 0.5439, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.9235277226820695e-05, |
|
"loss": 0.61, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.922496213153416e-05, |
|
"loss": 0.5913, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.9214579028215776e-05, |
|
"loss": 0.6255, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.920412794601461e-05, |
|
"loss": 0.5834, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.9193608914270515e-05, |
|
"loss": 0.6102, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.918302196251415e-05, |
|
"loss": 0.5591, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.917236712046682e-05, |
|
"loss": 0.556, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.916164441804044e-05, |
|
"loss": 0.5774, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.9150853885337426e-05, |
|
"loss": 0.6256, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.913999555265062e-05, |
|
"loss": 0.6505, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.9129069450463186e-05, |
|
"loss": 0.6012, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.911807560944858e-05, |
|
"loss": 0.5913, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.910701406047037e-05, |
|
"loss": 0.6963, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.909588483458225e-05, |
|
"loss": 0.5588, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.9084687963027894e-05, |
|
"loss": 0.6078, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.907342347724087e-05, |
|
"loss": 0.5816, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.906209140884459e-05, |
|
"loss": 0.5989, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.905069178965215e-05, |
|
"loss": 0.5568, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.9039224651666325e-05, |
|
"loss": 0.6021, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.902769002707942e-05, |
|
"loss": 0.6009, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.90160879482732e-05, |
|
"loss": 0.5801, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.9004418447818815e-05, |
|
"loss": 0.5939, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.899268155847667e-05, |
|
"loss": 0.6027, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.898087731319636e-05, |
|
"loss": 0.5416, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.896900574511657e-05, |
|
"loss": 0.5596, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.8957066887565e-05, |
|
"loss": 0.6187, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.894506077405824e-05, |
|
"loss": 0.5733, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.893298743830168e-05, |
|
"loss": 0.5862, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.892084691418947e-05, |
|
"loss": 0.6191, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.8908639235804324e-05, |
|
"loss": 0.6258, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.889636443741752e-05, |
|
"loss": 0.5504, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.888402255348876e-05, |
|
"loss": 0.6241, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.887161361866608e-05, |
|
"loss": 0.5839, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.8859137667785735e-05, |
|
"loss": 0.5797, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.884659473587213e-05, |
|
"loss": 0.6189, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.8833984858137715e-05, |
|
"loss": 0.6194, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.8821308069982867e-05, |
|
"loss": 0.585, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.880856440699582e-05, |
|
"loss": 0.582, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.8795753904952534e-05, |
|
"loss": 0.5789, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.878287659981662e-05, |
|
"loss": 0.6236, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.8769932527739225e-05, |
|
"loss": 0.5614, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.8756921725058934e-05, |
|
"loss": 0.5972, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.874384422830167e-05, |
|
"loss": 0.5682, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.873070007418059e-05, |
|
"loss": 0.6327, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.871748929959598e-05, |
|
"loss": 0.5577, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.870421194163515e-05, |
|
"loss": 0.56, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.8690868037572346e-05, |
|
"loss": 0.6324, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.867745762486861e-05, |
|
"loss": 0.5764, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.8663980741171724e-05, |
|
"loss": 0.564, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.865043742431605e-05, |
|
"loss": 0.5534, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.863682771232248e-05, |
|
"loss": 0.5953, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.862315164339829e-05, |
|
"loss": 0.4992, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.860940925593703e-05, |
|
"loss": 0.6061, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.859560058851844e-05, |
|
"loss": 0.5429, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.8581725679908317e-05, |
|
"loss": 0.6121, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.856778456905846e-05, |
|
"loss": 0.5678, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.855377729510648e-05, |
|
"loss": 0.633, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.8539703897375755e-05, |
|
"loss": 0.6405, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.852556441537528e-05, |
|
"loss": 0.5485, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.851135888879958e-05, |
|
"loss": 0.5974, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.849708735752859e-05, |
|
"loss": 0.582, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.848274986162754e-05, |
|
"loss": 0.5806, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.846834644134686e-05, |
|
"loss": 0.6104, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.845387713712203e-05, |
|
"loss": 0.6216, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.84393419895735e-05, |
|
"loss": 0.612, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.8424741039506575e-05, |
|
"loss": 0.6155, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.841007432791129e-05, |
|
"loss": 0.5703, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.839534189596228e-05, |
|
"loss": 0.6044, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.8380543785018677e-05, |
|
"loss": 0.5819, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.8365680036624026e-05, |
|
"loss": 0.617, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.835075069250613e-05, |
|
"loss": 0.5707, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.833575579457691e-05, |
|
"loss": 0.6348, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.832069538493237e-05, |
|
"loss": 0.6339, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.830556950585238e-05, |
|
"loss": 0.5982, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.829037819980065e-05, |
|
"loss": 0.6028, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.827512150942454e-05, |
|
"loss": 0.5936, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.8259799477554965e-05, |
|
"loss": 0.5548, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.8244412147206284e-05, |
|
"loss": 0.5489, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.822895956157619e-05, |
|
"loss": 0.5788, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.821344176404554e-05, |
|
"loss": 0.582, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.819785879817827e-05, |
|
"loss": 0.5842, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.8182210707721284e-05, |
|
"loss": 0.6084, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.81664975366043e-05, |
|
"loss": 0.5531, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.8150719328939755e-05, |
|
"loss": 0.6259, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.813487612902264e-05, |
|
"loss": 0.5519, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.811896798133042e-05, |
|
"loss": 0.6005, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.810299493052289e-05, |
|
"loss": 0.5513, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.808695702144206e-05, |
|
"loss": 0.5715, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.8070854299111994e-05, |
|
"loss": 0.6532, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.805468680873874e-05, |
|
"loss": 0.5866, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.803845459571014e-05, |
|
"loss": 0.6018, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.802215770559577e-05, |
|
"loss": 0.6073, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.800579618414676e-05, |
|
"loss": 0.5287, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.798937007729568e-05, |
|
"loss": 0.5423, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.797287943115641e-05, |
|
"loss": 0.6094, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.795632429202405e-05, |
|
"loss": 0.5528, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.793970470637469e-05, |
|
"loss": 0.5419, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.7923020720865414e-05, |
|
"loss": 0.5872, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.790627238233405e-05, |
|
"loss": 0.6098, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.78894597377991e-05, |
|
"loss": 0.5886, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.787258283445962e-05, |
|
"loss": 0.5771, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.7855641719695023e-05, |
|
"loss": 0.5927, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.783863644106502e-05, |
|
"loss": 0.6261, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.782156704630944e-05, |
|
"loss": 0.5632, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.78044335833481e-05, |
|
"loss": 0.5792, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.7787236100280685e-05, |
|
"loss": 0.5769, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.776997464538662e-05, |
|
"loss": 0.5745, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.775264926712489e-05, |
|
"loss": 0.6351, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.7735260014133986e-05, |
|
"loss": 0.579, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.7717806935231665e-05, |
|
"loss": 0.5666, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.7700290079414896e-05, |
|
"loss": 0.6307, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.768270949585968e-05, |
|
"loss": 0.5851, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.7665065233920945e-05, |
|
"loss": 0.5708, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.764735734313236e-05, |
|
"loss": 0.5636, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.7629585873206226e-05, |
|
"loss": 0.5088, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.7611750874033356e-05, |
|
"loss": 0.5499, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.759385239568289e-05, |
|
"loss": 0.6735, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.7575890488402185e-05, |
|
"loss": 0.5632, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.7557865202616656e-05, |
|
"loss": 0.6031, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.753977658892967e-05, |
|
"loss": 0.6018, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.752162469812234e-05, |
|
"loss": 0.6149, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.750340958115346e-05, |
|
"loss": 0.5546, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.7485131289159276e-05, |
|
"loss": 0.5916, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.7466789873453444e-05, |
|
"loss": 0.5895, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.744838538552677e-05, |
|
"loss": 0.5661, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.742991787704719e-05, |
|
"loss": 0.6178, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.741138739985951e-05, |
|
"loss": 0.5959, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.7392794005985326e-05, |
|
"loss": 0.616, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.737413774762287e-05, |
|
"loss": 0.5876, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.735541867714687e-05, |
|
"loss": 0.5682, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.733663684710835e-05, |
|
"loss": 0.5658, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.731779231023456e-05, |
|
"loss": 0.5918, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.7298885119428773e-05, |
|
"loss": 0.6711, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.7279915327770155e-05, |
|
"loss": 0.5422, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.7260882988513624e-05, |
|
"loss": 0.6287, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.724178815508967e-05, |
|
"loss": 0.5955, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.722263088110426e-05, |
|
"loss": 0.6084, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.720341122033862e-05, |
|
"loss": 0.6089, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.718412922674913e-05, |
|
"loss": 0.6556, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.7164784954467166e-05, |
|
"loss": 0.5925, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.714537845779894e-05, |
|
"loss": 0.5911, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.712590979122534e-05, |
|
"loss": 0.5733, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.710637900940181e-05, |
|
"loss": 0.5898, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.708678616715815e-05, |
|
"loss": 0.5964, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.706713131949839e-05, |
|
"loss": 0.5852, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.7047414521600644e-05, |
|
"loss": 0.5726, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.702763582881692e-05, |
|
"loss": 0.5815, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.7007795296673006e-05, |
|
"loss": 0.5841, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.6987892980868296e-05, |
|
"loss": 0.5809, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.696792893727562e-05, |
|
"loss": 0.6363, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.694790322194111e-05, |
|
"loss": 0.5972, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.692781589108402e-05, |
|
"loss": 0.5575, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.690766700109659e-05, |
|
"loss": 0.5728, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.688745660854388e-05, |
|
"loss": 0.5417, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.686718477016361e-05, |
|
"loss": 0.6384, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.684685154286599e-05, |
|
"loss": 0.5762, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.682645698373357e-05, |
|
"loss": 0.5872, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.68060011500211e-05, |
|
"loss": 0.5602, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.678548409915532e-05, |
|
"loss": 0.5751, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.676490588873486e-05, |
|
"loss": 0.6352, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.674426657653003e-05, |
|
"loss": 0.587, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.6723566220482664e-05, |
|
"loss": 0.5306, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.670280487870598e-05, |
|
"loss": 0.5265, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.6681982609484416e-05, |
|
"loss": 0.5383, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.666109947127343e-05, |
|
"loss": 0.55, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.6640155522699374e-05, |
|
"loss": 0.5342, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.661915082255932e-05, |
|
"loss": 0.5897, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.659808542982088e-05, |
|
"loss": 0.5637, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.657695940362207e-05, |
|
"loss": 0.5857, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.65557728032711e-05, |
|
"loss": 0.5664, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.653452568824625e-05, |
|
"loss": 0.5778, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.651321811819568e-05, |
|
"loss": 0.5741, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.649185015293728e-05, |
|
"loss": 0.5822, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.647042185245847e-05, |
|
"loss": 0.5923, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.6448933276916076e-05, |
|
"loss": 0.5621, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.6427384486636113e-05, |
|
"loss": 0.5731, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.640577554211366e-05, |
|
"loss": 0.5662, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.638410650401267e-05, |
|
"loss": 0.5628, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.636237743316578e-05, |
|
"loss": 0.5775, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.634058839057417e-05, |
|
"loss": 0.5749, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.63187394374074e-05, |
|
"loss": 0.6108, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.629683063500319e-05, |
|
"loss": 0.5746, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.6274862044867304e-05, |
|
"loss": 0.579, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.625283372867333e-05, |
|
"loss": 0.566, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.623074574826254e-05, |
|
"loss": 0.6109, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.6208598165643715e-05, |
|
"loss": 0.5497, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.618639104299294e-05, |
|
"loss": 0.5226, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.616412444265345e-05, |
|
"loss": 0.5861, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.614179842713547e-05, |
|
"loss": 0.6045, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.611941305911602e-05, |
|
"loss": 0.5793, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.6096968401438745e-05, |
|
"loss": 0.5975, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.607446451711372e-05, |
|
"loss": 0.5727, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.605190146931731e-05, |
|
"loss": 0.5874, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.602927932139197e-05, |
|
"loss": 0.5785, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.6006598136846056e-05, |
|
"loss": 0.5767, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.598385797935368e-05, |
|
"loss": 0.5695, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.596105891275449e-05, |
|
"loss": 0.6196, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.593820100105355e-05, |
|
"loss": 0.6025, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.591528430842107e-05, |
|
"loss": 0.5807, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.589230889919232e-05, |
|
"loss": 0.4916, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.5869274837867394e-05, |
|
"loss": 0.595, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.5846182189111035e-05, |
|
"loss": 0.5592, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.5823031017752485e-05, |
|
"loss": 0.5677, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.579982138878527e-05, |
|
"loss": 0.5988, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.5776553367367e-05, |
|
"loss": 0.5468, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.575322701881926e-05, |
|
"loss": 0.6078, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.5729842408627334e-05, |
|
"loss": 0.5498, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.5706399602440106e-05, |
|
"loss": 0.6135, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.568289866606981e-05, |
|
"loss": 0.568, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.565933966549189e-05, |
|
"loss": 0.6276, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.5635722666844775e-05, |
|
"loss": 0.5823, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.561204773642974e-05, |
|
"loss": 0.6211, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.558831494071069e-05, |
|
"loss": 0.584, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.556452434631395e-05, |
|
"loss": 0.6095, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.5540676020028145e-05, |
|
"loss": 0.5747, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.5516770028803954e-05, |
|
"loss": 0.6151, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.5492806439753935e-05, |
|
"loss": 0.5874, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.5468785320152365e-05, |
|
"loss": 0.5754, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.5444706737435014e-05, |
|
"loss": 0.5963, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.542057075919897e-05, |
|
"loss": 0.597, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.5396377453202466e-05, |
|
"loss": 0.5501, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.5372126887364655e-05, |
|
"loss": 0.5672, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.534781912976546e-05, |
|
"loss": 0.505, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.5323454248645324e-05, |
|
"loss": 0.5872, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.529903231240511e-05, |
|
"loss": 0.6037, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.52745533896058e-05, |
|
"loss": 0.6332, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.5250017548968404e-05, |
|
"loss": 0.5695, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.522542485937369e-05, |
|
"loss": 0.5816, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.5200775389862026e-05, |
|
"loss": 0.5605, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.51760692096332e-05, |
|
"loss": 0.6041, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.5151306388046175e-05, |
|
"loss": 0.5865, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.512648699461897e-05, |
|
"loss": 0.549, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.510161109902837e-05, |
|
"loss": 0.5525, |
|
"step": 1900 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 9375, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 100, |
|
"total_flos": 4.3749938909321626e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|