|
{ |
|
"best_metric": 4.1215620040893555, |
|
"best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/pp-mod-subj2/lstm/3/checkpoints/checkpoint-763200", |
|
"epoch": 0.025000606015738065, |
|
"eval_steps": 10, |
|
"global_step": 763200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999998362119627e-05, |
|
"loss": 10.8194, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999161405248948e-05, |
|
"loss": 7.5719, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.998322810497896e-05, |
|
"loss": 7.067, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.997484215746844e-05, |
|
"loss": 6.9956, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.996645620995792e-05, |
|
"loss": 6.9424, |
|
"step": 2048 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.99580702624474e-05, |
|
"loss": 6.8558, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.994968431493688e-05, |
|
"loss": 6.6935, |
|
"step": 3072 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.994129836742636e-05, |
|
"loss": 6.5846, |
|
"step": 3584 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.993291241991584e-05, |
|
"loss": 6.4975, |
|
"step": 4096 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.992452647240532e-05, |
|
"loss": 6.4297, |
|
"step": 4608 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.99161405248948e-05, |
|
"loss": 6.3811, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.990775457738428e-05, |
|
"loss": 6.3144, |
|
"step": 5632 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.989936862987376e-05, |
|
"loss": 6.263, |
|
"step": 6144 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.989098268236324e-05, |
|
"loss": 6.1904, |
|
"step": 6656 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.988261311365645e-05, |
|
"loss": 6.136, |
|
"step": 7168 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.987422716614593e-05, |
|
"loss": 6.067, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.986584121863541e-05, |
|
"loss": 6.035, |
|
"step": 8192 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.985745527112489e-05, |
|
"loss": 5.9783, |
|
"step": 8704 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.984906932361437e-05, |
|
"loss": 5.9322, |
|
"step": 9216 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.984068337610385e-05, |
|
"loss": 5.8985, |
|
"step": 9728 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.983229742859333e-05, |
|
"loss": 5.8769, |
|
"step": 10240 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.982391148108281e-05, |
|
"loss": 5.8304, |
|
"step": 10752 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.981552553357229e-05, |
|
"loss": 5.8075, |
|
"step": 11264 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.980713958606178e-05, |
|
"loss": 5.7724, |
|
"step": 11776 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.979875363855125e-05, |
|
"loss": 5.7408, |
|
"step": 12288 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9790384069844466e-05, |
|
"loss": 5.7232, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9781998122333946e-05, |
|
"loss": 5.6853, |
|
"step": 13312 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9773612174823426e-05, |
|
"loss": 5.673, |
|
"step": 13824 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9765242606116635e-05, |
|
"loss": 5.648, |
|
"step": 14336 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9756856658606115e-05, |
|
"loss": 5.6195, |
|
"step": 14848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9748470711095595e-05, |
|
"loss": 5.6054, |
|
"step": 15360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.974008476358507e-05, |
|
"loss": 5.5823, |
|
"step": 15872 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.973169881607455e-05, |
|
"loss": 5.5682, |
|
"step": 16384 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.972331286856403e-05, |
|
"loss": 5.5513, |
|
"step": 16896 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9714926921053515e-05, |
|
"loss": 5.5308, |
|
"step": 17408 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9706540973542995e-05, |
|
"loss": 5.5215, |
|
"step": 17920 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9698171404836204e-05, |
|
"loss": 5.5139, |
|
"step": 18432 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9689785457325684e-05, |
|
"loss": 5.4756, |
|
"step": 18944 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9681399509815164e-05, |
|
"loss": 5.4657, |
|
"step": 19456 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9673013562304644e-05, |
|
"loss": 5.4519, |
|
"step": 19968 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9664627614794124e-05, |
|
"loss": 5.4357, |
|
"step": 20480 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9656241667283604e-05, |
|
"loss": 5.4216, |
|
"step": 20992 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.964787209857681e-05, |
|
"loss": 5.4229, |
|
"step": 21504 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.963948615106629e-05, |
|
"loss": 5.4026, |
|
"step": 22016 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.963110020355577e-05, |
|
"loss": 5.387, |
|
"step": 22528 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.962271425604525e-05, |
|
"loss": 5.3803, |
|
"step": 23040 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.961434468733847e-05, |
|
"loss": 5.3695, |
|
"step": 23552 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.960595873982795e-05, |
|
"loss": 5.3622, |
|
"step": 24064 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.959758917112116e-05, |
|
"loss": 5.3381, |
|
"step": 24576 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.958920322361064e-05, |
|
"loss": 5.327, |
|
"step": 25088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.958081727610012e-05, |
|
"loss": 5.3347, |
|
"step": 25600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.95724313285896e-05, |
|
"loss": 5.3237, |
|
"step": 26112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.956404538107908e-05, |
|
"loss": 5.3118, |
|
"step": 26624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.955565943356856e-05, |
|
"loss": 5.2875, |
|
"step": 27136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.954727348605804e-05, |
|
"loss": 5.2966, |
|
"step": 27648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.953888753854752e-05, |
|
"loss": 5.275, |
|
"step": 28160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9530501591037e-05, |
|
"loss": 5.2905, |
|
"step": 28672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9522132022330207e-05, |
|
"loss": 5.2563, |
|
"step": 29184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9513746074819686e-05, |
|
"loss": 5.2599, |
|
"step": 29696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9505360127309166e-05, |
|
"loss": 5.2398, |
|
"step": 30208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9496974179798646e-05, |
|
"loss": 5.2303, |
|
"step": 30720 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.948858823228813e-05, |
|
"loss": 5.2318, |
|
"step": 31232 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.948020228477761e-05, |
|
"loss": 5.2149, |
|
"step": 31744 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.947183271607082e-05, |
|
"loss": 5.21, |
|
"step": 32256 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.94634467685603e-05, |
|
"loss": 5.2032, |
|
"step": 32768 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.945506082104978e-05, |
|
"loss": 5.2125, |
|
"step": 33280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9446674873539255e-05, |
|
"loss": 5.1849, |
|
"step": 33792 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9438288926028735e-05, |
|
"loss": 5.1752, |
|
"step": 34304 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9429902978518215e-05, |
|
"loss": 5.1602, |
|
"step": 34816 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.942153340981143e-05, |
|
"loss": 5.1583, |
|
"step": 35328 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9413147462300904e-05, |
|
"loss": 5.1737, |
|
"step": 35840 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9404761514790384e-05, |
|
"loss": 5.1619, |
|
"step": 36352 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.939637556727987e-05, |
|
"loss": 5.1444, |
|
"step": 36864 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.938798961976935e-05, |
|
"loss": 5.1404, |
|
"step": 37376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.937960367225883e-05, |
|
"loss": 5.143, |
|
"step": 37888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9371250482355776e-05, |
|
"loss": 5.1359, |
|
"step": 38400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9362864534845256e-05, |
|
"loss": 5.1131, |
|
"step": 38912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.935447858733473e-05, |
|
"loss": 5.118, |
|
"step": 39424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.934609263982421e-05, |
|
"loss": 5.1037, |
|
"step": 39936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.933770669231369e-05, |
|
"loss": 5.1055, |
|
"step": 40448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.932932074480317e-05, |
|
"loss": 5.1022, |
|
"step": 40960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.932093479729265e-05, |
|
"loss": 5.0905, |
|
"step": 41472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.931254884978213e-05, |
|
"loss": 5.0925, |
|
"step": 41984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.930416290227161e-05, |
|
"loss": 5.0832, |
|
"step": 42496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9295793333564825e-05, |
|
"loss": 5.0675, |
|
"step": 43008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9287407386054305e-05, |
|
"loss": 5.0665, |
|
"step": 43520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9279021438543785e-05, |
|
"loss": 5.0716, |
|
"step": 44032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9270635491033264e-05, |
|
"loss": 5.0642, |
|
"step": 44544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9262249543522744e-05, |
|
"loss": 5.0513, |
|
"step": 45056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9253863596012224e-05, |
|
"loss": 5.0386, |
|
"step": 45568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9245494027305433e-05, |
|
"loss": 5.0431, |
|
"step": 46080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9237108079794913e-05, |
|
"loss": 5.0302, |
|
"step": 46592 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.922873851108812e-05, |
|
"loss": 5.0416, |
|
"step": 47104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.92203525635776e-05, |
|
"loss": 5.0192, |
|
"step": 47616 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.921196661606708e-05, |
|
"loss": 5.0148, |
|
"step": 48128 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.920358066855656e-05, |
|
"loss": 5.032, |
|
"step": 48640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.919519472104604e-05, |
|
"loss": 5.0004, |
|
"step": 49152 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.918680877353552e-05, |
|
"loss": 4.9947, |
|
"step": 49664 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.917842282602501e-05, |
|
"loss": 4.9991, |
|
"step": 50176 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.917003687851449e-05, |
|
"loss": 4.9971, |
|
"step": 50688 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.916165093100397e-05, |
|
"loss": 4.9973, |
|
"step": 51200 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.915328136229718e-05, |
|
"loss": 4.9856, |
|
"step": 51712 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.914489541478666e-05, |
|
"loss": 4.9793, |
|
"step": 52224 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.913650946727614e-05, |
|
"loss": 4.9719, |
|
"step": 52736 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.912812351976562e-05, |
|
"loss": 4.9706, |
|
"step": 53248 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.911973757225509e-05, |
|
"loss": 4.9546, |
|
"step": 53760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.911136800354831e-05, |
|
"loss": 4.9543, |
|
"step": 54272 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.910298205603779e-05, |
|
"loss": 4.9592, |
|
"step": 54784 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.909459610852727e-05, |
|
"loss": 4.9467, |
|
"step": 55296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.908621016101675e-05, |
|
"loss": 4.9415, |
|
"step": 55808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.907782421350623e-05, |
|
"loss": 4.9358, |
|
"step": 56320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.906943826599571e-05, |
|
"loss": 4.9273, |
|
"step": 56832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.906105231848519e-05, |
|
"loss": 4.9364, |
|
"step": 57344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.905266637097467e-05, |
|
"loss": 4.922, |
|
"step": 57856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9044296802267876e-05, |
|
"loss": 4.9191, |
|
"step": 58368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.903592723356109e-05, |
|
"loss": 4.9196, |
|
"step": 58880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9027541286050565e-05, |
|
"loss": 4.9136, |
|
"step": 59392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9019155338540045e-05, |
|
"loss": 4.9097, |
|
"step": 59904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9010769391029525e-05, |
|
"loss": 4.9072, |
|
"step": 60416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9002383443519005e-05, |
|
"loss": 4.9064, |
|
"step": 60928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.899403025361595e-05, |
|
"loss": 4.9048, |
|
"step": 61440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.898564430610543e-05, |
|
"loss": 4.8852, |
|
"step": 61952 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.897725835859491e-05, |
|
"loss": 4.8965, |
|
"step": 62464 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8968872411084396e-05, |
|
"loss": 4.8864, |
|
"step": 62976 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.896048646357387e-05, |
|
"loss": 4.8876, |
|
"step": 63488 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.895210051606335e-05, |
|
"loss": 4.8696, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.894371456855283e-05, |
|
"loss": 4.8786, |
|
"step": 64512 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.893532862104231e-05, |
|
"loss": 4.8627, |
|
"step": 65024 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.892694267353179e-05, |
|
"loss": 4.8685, |
|
"step": 65536 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.891855672602127e-05, |
|
"loss": 4.8463, |
|
"step": 66048 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.891017077851075e-05, |
|
"loss": 4.869, |
|
"step": 66560 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.890178483100023e-05, |
|
"loss": 4.874, |
|
"step": 67072 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.889343164109717e-05, |
|
"loss": 4.8545, |
|
"step": 67584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.888504569358665e-05, |
|
"loss": 4.8404, |
|
"step": 68096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8876659746076134e-05, |
|
"loss": 4.8522, |
|
"step": 68608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8868273798565614e-05, |
|
"loss": 4.8459, |
|
"step": 69120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8859887851055094e-05, |
|
"loss": 4.8439, |
|
"step": 69632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8851501903544574e-05, |
|
"loss": 4.8368, |
|
"step": 70144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8843115956034054e-05, |
|
"loss": 4.8399, |
|
"step": 70656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8834730008523534e-05, |
|
"loss": 4.8305, |
|
"step": 71168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.882637681862047e-05, |
|
"loss": 4.8263, |
|
"step": 71680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.881799087110995e-05, |
|
"loss": 4.8124, |
|
"step": 72192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.880960492359943e-05, |
|
"loss": 4.823, |
|
"step": 72704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.880121897608891e-05, |
|
"loss": 4.8183, |
|
"step": 73216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.879283302857839e-05, |
|
"loss": 4.8102, |
|
"step": 73728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.878444708106787e-05, |
|
"loss": 4.8228, |
|
"step": 74240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.877606113355735e-05, |
|
"loss": 4.8007, |
|
"step": 74752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.876769156485057e-05, |
|
"loss": 4.8102, |
|
"step": 75264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.875930561734005e-05, |
|
"loss": 4.8111, |
|
"step": 75776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.875091966982953e-05, |
|
"loss": 4.7998, |
|
"step": 76288 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.812014102935791, |
|
"eval_runtime": 329.0718, |
|
"eval_samples_per_second": 1159.598, |
|
"eval_steps_per_second": 36.238, |
|
"step": 76320 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.874253372231901e-05, |
|
"loss": 4.7881, |
|
"step": 76800 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.873414777480849e-05, |
|
"loss": 4.794, |
|
"step": 77312 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.872576182729797e-05, |
|
"loss": 4.7965, |
|
"step": 77824 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8717408637394906e-05, |
|
"loss": 4.7873, |
|
"step": 78336 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8709022689884386e-05, |
|
"loss": 4.788, |
|
"step": 78848 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8700636742373866e-05, |
|
"loss": 4.7708, |
|
"step": 79360 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8692250794863346e-05, |
|
"loss": 4.777, |
|
"step": 79872 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8683864847352826e-05, |
|
"loss": 4.7661, |
|
"step": 80384 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8675478899842306e-05, |
|
"loss": 4.7728, |
|
"step": 80896 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.866710933113552e-05, |
|
"loss": 4.7638, |
|
"step": 81408 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8658723383625e-05, |
|
"loss": 4.7689, |
|
"step": 81920 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.865035381491821e-05, |
|
"loss": 4.7706, |
|
"step": 82432 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.864196786740769e-05, |
|
"loss": 4.7564, |
|
"step": 82944 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.863358191989717e-05, |
|
"loss": 4.7541, |
|
"step": 83456 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.862519597238665e-05, |
|
"loss": 4.7389, |
|
"step": 83968 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.861681002487613e-05, |
|
"loss": 4.7494, |
|
"step": 84480 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.860842407736561e-05, |
|
"loss": 4.7428, |
|
"step": 84992 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.860003812985509e-05, |
|
"loss": 4.7369, |
|
"step": 85504 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.859165218234457e-05, |
|
"loss": 4.7337, |
|
"step": 86016 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.858326623483405e-05, |
|
"loss": 4.7617, |
|
"step": 86528 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.857488028732352e-05, |
|
"loss": 4.7338, |
|
"step": 87040 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.856649433981301e-05, |
|
"loss": 4.7344, |
|
"step": 87552 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.855810839230249e-05, |
|
"loss": 4.7383, |
|
"step": 88064 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.854972244479197e-05, |
|
"loss": 4.7276, |
|
"step": 88576 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.854135287608518e-05, |
|
"loss": 4.7265, |
|
"step": 89088 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.853296692857466e-05, |
|
"loss": 4.7141, |
|
"step": 89600 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.852458098106414e-05, |
|
"loss": 4.7234, |
|
"step": 90112 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.851619503355362e-05, |
|
"loss": 4.718, |
|
"step": 90624 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.85078090860431e-05, |
|
"loss": 4.7051, |
|
"step": 91136 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.849942313853258e-05, |
|
"loss": 4.7132, |
|
"step": 91648 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.849103719102206e-05, |
|
"loss": 4.7046, |
|
"step": 92160 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.848266762231527e-05, |
|
"loss": 4.7136, |
|
"step": 92672 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.847428167480475e-05, |
|
"loss": 4.7108, |
|
"step": 93184 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.846589572729423e-05, |
|
"loss": 4.6974, |
|
"step": 93696 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.845750977978371e-05, |
|
"loss": 4.706, |
|
"step": 94208 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8449123832273194e-05, |
|
"loss": 4.7134, |
|
"step": 94720 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8440754263566404e-05, |
|
"loss": 4.6825, |
|
"step": 95232 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8432368316055884e-05, |
|
"loss": 4.6939, |
|
"step": 95744 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8423982368545363e-05, |
|
"loss": 4.6896, |
|
"step": 96256 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8415596421034843e-05, |
|
"loss": 4.6815, |
|
"step": 96768 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.840721047352432e-05, |
|
"loss": 4.6821, |
|
"step": 97280 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.83988245260138e-05, |
|
"loss": 4.6906, |
|
"step": 97792 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.839045495730701e-05, |
|
"loss": 4.6842, |
|
"step": 98304 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.838206900979649e-05, |
|
"loss": 4.678, |
|
"step": 98816 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.83736994410897e-05, |
|
"loss": 4.6824, |
|
"step": 99328 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.836531349357918e-05, |
|
"loss": 4.6748, |
|
"step": 99840 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.835692754606866e-05, |
|
"loss": 4.679, |
|
"step": 100352 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.834854159855814e-05, |
|
"loss": 4.6607, |
|
"step": 100864 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.834015565104763e-05, |
|
"loss": 4.6578, |
|
"step": 101376 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.833178608234084e-05, |
|
"loss": 4.674, |
|
"step": 101888 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.832340013483032e-05, |
|
"loss": 4.6727, |
|
"step": 102400 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.83150141873198e-05, |
|
"loss": 4.6614, |
|
"step": 102912 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.830662823980928e-05, |
|
"loss": 4.652, |
|
"step": 103424 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.829824229229876e-05, |
|
"loss": 4.6634, |
|
"step": 103936 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.828985634478824e-05, |
|
"loss": 4.6453, |
|
"step": 104448 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.828147039727771e-05, |
|
"loss": 4.6713, |
|
"step": 104960 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.827308444976719e-05, |
|
"loss": 4.6471, |
|
"step": 105472 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.826469850225667e-05, |
|
"loss": 4.6591, |
|
"step": 105984 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.825631255474615e-05, |
|
"loss": 4.645, |
|
"step": 106496 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.824792660723563e-05, |
|
"loss": 4.6375, |
|
"step": 107008 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.823954065972511e-05, |
|
"loss": 4.6416, |
|
"step": 107520 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8231171091018326e-05, |
|
"loss": 4.6462, |
|
"step": 108032 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8222785143507806e-05, |
|
"loss": 4.6295, |
|
"step": 108544 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8214399195997286e-05, |
|
"loss": 4.6322, |
|
"step": 109056 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8206013248486766e-05, |
|
"loss": 4.6472, |
|
"step": 109568 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8197627300976246e-05, |
|
"loss": 4.6271, |
|
"step": 110080 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8189241353465726e-05, |
|
"loss": 4.6174, |
|
"step": 110592 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8180871784758935e-05, |
|
"loss": 4.6184, |
|
"step": 111104 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8172502216052144e-05, |
|
"loss": 4.6168, |
|
"step": 111616 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8164116268541624e-05, |
|
"loss": 4.6382, |
|
"step": 112128 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8155730321031104e-05, |
|
"loss": 4.6323, |
|
"step": 112640 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8147344373520584e-05, |
|
"loss": 4.6167, |
|
"step": 113152 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8138958426010064e-05, |
|
"loss": 4.6206, |
|
"step": 113664 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.813057247849955e-05, |
|
"loss": 4.6233, |
|
"step": 114176 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.812218653098903e-05, |
|
"loss": 4.622, |
|
"step": 114688 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.811380058347851e-05, |
|
"loss": 4.6052, |
|
"step": 115200 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.810543101477172e-05, |
|
"loss": 4.6188, |
|
"step": 115712 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.80970450672612e-05, |
|
"loss": 4.6063, |
|
"step": 116224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.808865911975068e-05, |
|
"loss": 4.6141, |
|
"step": 116736 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.808027317224016e-05, |
|
"loss": 4.6061, |
|
"step": 117248 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.807188722472964e-05, |
|
"loss": 4.6047, |
|
"step": 117760 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.806350127721912e-05, |
|
"loss": 4.6082, |
|
"step": 118272 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.80551153297086e-05, |
|
"loss": 4.6081, |
|
"step": 118784 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.804672938219807e-05, |
|
"loss": 4.5911, |
|
"step": 119296 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.803835981349129e-05, |
|
"loss": 4.5996, |
|
"step": 119808 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.802997386598077e-05, |
|
"loss": 4.5972, |
|
"step": 120320 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.802158791847025e-05, |
|
"loss": 4.6029, |
|
"step": 120832 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.801320197095973e-05, |
|
"loss": 4.5948, |
|
"step": 121344 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.800481602344921e-05, |
|
"loss": 4.5827, |
|
"step": 121856 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7996446454742424e-05, |
|
"loss": 4.5923, |
|
"step": 122368 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.79880605072319e-05, |
|
"loss": 4.5834, |
|
"step": 122880 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.797967455972138e-05, |
|
"loss": 4.5957, |
|
"step": 123392 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.797128861221086e-05, |
|
"loss": 4.577, |
|
"step": 123904 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.796290266470034e-05, |
|
"loss": 4.5814, |
|
"step": 124416 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7954533095993546e-05, |
|
"loss": 4.5918, |
|
"step": 124928 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7946147148483026e-05, |
|
"loss": 4.5816, |
|
"step": 125440 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7937761200972506e-05, |
|
"loss": 4.5627, |
|
"step": 125952 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7929375253461986e-05, |
|
"loss": 4.5757, |
|
"step": 126464 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.792098930595147e-05, |
|
"loss": 4.579, |
|
"step": 126976 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.791260335844095e-05, |
|
"loss": 4.5809, |
|
"step": 127488 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.790421741093043e-05, |
|
"loss": 4.5698, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.789584784222364e-05, |
|
"loss": 4.5701, |
|
"step": 128512 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.788746189471312e-05, |
|
"loss": 4.5639, |
|
"step": 129024 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.78790759472026e-05, |
|
"loss": 4.5699, |
|
"step": 129536 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.787068999969208e-05, |
|
"loss": 4.5512, |
|
"step": 130048 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.786232043098529e-05, |
|
"loss": 4.5595, |
|
"step": 130560 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.785393448347477e-05, |
|
"loss": 4.566, |
|
"step": 131072 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.784554853596425e-05, |
|
"loss": 4.5617, |
|
"step": 131584 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.783716258845373e-05, |
|
"loss": 4.5464, |
|
"step": 132096 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.782877664094321e-05, |
|
"loss": 4.555, |
|
"step": 132608 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.782040707223642e-05, |
|
"loss": 4.5441, |
|
"step": 133120 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7812021124725906e-05, |
|
"loss": 4.5602, |
|
"step": 133632 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7803635177215386e-05, |
|
"loss": 4.5462, |
|
"step": 134144 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7795249229704866e-05, |
|
"loss": 4.5378, |
|
"step": 134656 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7786863282194346e-05, |
|
"loss": 4.5567, |
|
"step": 135168 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7778493713487555e-05, |
|
"loss": 4.5497, |
|
"step": 135680 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7770107765977035e-05, |
|
"loss": 4.5439, |
|
"step": 136192 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7761721818466515e-05, |
|
"loss": 4.5462, |
|
"step": 136704 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7753335870955995e-05, |
|
"loss": 4.544, |
|
"step": 137216 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7744966302249204e-05, |
|
"loss": 4.549, |
|
"step": 137728 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7736580354738684e-05, |
|
"loss": 4.5282, |
|
"step": 138240 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7728194407228164e-05, |
|
"loss": 4.5482, |
|
"step": 138752 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7719808459717644e-05, |
|
"loss": 4.5363, |
|
"step": 139264 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.771143889101086e-05, |
|
"loss": 4.5396, |
|
"step": 139776 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.770305294350034e-05, |
|
"loss": 4.5301, |
|
"step": 140288 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.769466699598982e-05, |
|
"loss": 4.5355, |
|
"step": 140800 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.76862810484793e-05, |
|
"loss": 4.5243, |
|
"step": 141312 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.767791147977251e-05, |
|
"loss": 4.5326, |
|
"step": 141824 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.766952553226199e-05, |
|
"loss": 4.5091, |
|
"step": 142336 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.766113958475147e-05, |
|
"loss": 4.5433, |
|
"step": 142848 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.765275363724095e-05, |
|
"loss": 4.5358, |
|
"step": 143360 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.764436768973043e-05, |
|
"loss": 4.5304, |
|
"step": 143872 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.763599812102364e-05, |
|
"loss": 4.5148, |
|
"step": 144384 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.762761217351312e-05, |
|
"loss": 4.5308, |
|
"step": 144896 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.76192262260026e-05, |
|
"loss": 4.5304, |
|
"step": 145408 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.761084027849208e-05, |
|
"loss": 4.5232, |
|
"step": 145920 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.760245433098156e-05, |
|
"loss": 4.5159, |
|
"step": 146432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7594084762274773e-05, |
|
"loss": 4.5234, |
|
"step": 146944 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.758571519356798e-05, |
|
"loss": 4.5201, |
|
"step": 147456 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.757732924605746e-05, |
|
"loss": 4.5264, |
|
"step": 147968 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.756894329854694e-05, |
|
"loss": 4.5028, |
|
"step": 148480 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.756055735103642e-05, |
|
"loss": 4.5184, |
|
"step": 148992 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.75521714035259e-05, |
|
"loss": 4.5155, |
|
"step": 149504 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.754380183481911e-05, |
|
"loss": 4.5093, |
|
"step": 150016 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.753541588730859e-05, |
|
"loss": 4.5219, |
|
"step": 150528 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.752702993979807e-05, |
|
"loss": 4.5025, |
|
"step": 151040 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.751864399228755e-05, |
|
"loss": 4.5115, |
|
"step": 151552 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.751027442358077e-05, |
|
"loss": 4.5097, |
|
"step": 152064 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.750188847607025e-05, |
|
"loss": 4.5096, |
|
"step": 152576 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.525084018707275, |
|
"eval_runtime": 315.6819, |
|
"eval_samples_per_second": 1208.783, |
|
"eval_steps_per_second": 37.775, |
|
"step": 152640 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.749350252855973e-05, |
|
"loss": 4.4977, |
|
"step": 153088 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.748511658104921e-05, |
|
"loss": 4.5038, |
|
"step": 153600 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.747673063353868e-05, |
|
"loss": 4.5097, |
|
"step": 154112 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.746834468602816e-05, |
|
"loss": 4.5055, |
|
"step": 154624 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.745995873851764e-05, |
|
"loss": 4.5043, |
|
"step": 155136 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.745157279100712e-05, |
|
"loss": 4.4878, |
|
"step": 155648 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.74431868434966e-05, |
|
"loss": 4.4965, |
|
"step": 156160 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.743480089598608e-05, |
|
"loss": 4.4882, |
|
"step": 156672 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.742641494847556e-05, |
|
"loss": 4.4984, |
|
"step": 157184 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.741802900096504e-05, |
|
"loss": 4.4904, |
|
"step": 157696 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.740964305345452e-05, |
|
"loss": 4.4977, |
|
"step": 158208 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7401257105944e-05, |
|
"loss": 4.5, |
|
"step": 158720 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7392887537237216e-05, |
|
"loss": 4.4825, |
|
"step": 159232 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7384501589726696e-05, |
|
"loss": 4.4876, |
|
"step": 159744 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7376115642216176e-05, |
|
"loss": 4.4701, |
|
"step": 160256 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7367729694705656e-05, |
|
"loss": 4.4828, |
|
"step": 160768 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7359343747195135e-05, |
|
"loss": 4.4828, |
|
"step": 161280 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7350957799684615e-05, |
|
"loss": 4.4747, |
|
"step": 161792 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7342571852174095e-05, |
|
"loss": 4.4752, |
|
"step": 162304 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.733418590466357e-05, |
|
"loss": 4.5013, |
|
"step": 162816 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.732579995715305e-05, |
|
"loss": 4.4784, |
|
"step": 163328 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.731741400964253e-05, |
|
"loss": 4.4767, |
|
"step": 163840 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.730902806213201e-05, |
|
"loss": 4.4837, |
|
"step": 164352 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.730065849342522e-05, |
|
"loss": 4.4802, |
|
"step": 164864 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7292288924718433e-05, |
|
"loss": 4.4735, |
|
"step": 165376 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.728390297720791e-05, |
|
"loss": 4.4641, |
|
"step": 165888 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.727551702969739e-05, |
|
"loss": 4.4752, |
|
"step": 166400 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.726713108218687e-05, |
|
"loss": 4.4674, |
|
"step": 166912 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.725876151348009e-05, |
|
"loss": 4.4622, |
|
"step": 167424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.725037556596957e-05, |
|
"loss": 4.4703, |
|
"step": 167936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.724198961845904e-05, |
|
"loss": 4.4605, |
|
"step": 168448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.723360367094852e-05, |
|
"loss": 4.4724, |
|
"step": 168960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.722523410224174e-05, |
|
"loss": 4.4672, |
|
"step": 169472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.721684815473121e-05, |
|
"loss": 4.4577, |
|
"step": 169984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.720846220722069e-05, |
|
"loss": 4.4683, |
|
"step": 170496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.720007625971017e-05, |
|
"loss": 4.4789, |
|
"step": 171008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.719169031219965e-05, |
|
"loss": 4.4465, |
|
"step": 171520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.71833371222966e-05, |
|
"loss": 4.463, |
|
"step": 172032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.717495117478608e-05, |
|
"loss": 4.4526, |
|
"step": 172544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.716656522727556e-05, |
|
"loss": 4.4549, |
|
"step": 173056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.715817927976504e-05, |
|
"loss": 4.4495, |
|
"step": 173568 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7149793332254516e-05, |
|
"loss": 4.464, |
|
"step": 174080 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7141407384743996e-05, |
|
"loss": 4.4593, |
|
"step": 174592 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7133021437233476e-05, |
|
"loss": 4.4522, |
|
"step": 175104 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7124635489722956e-05, |
|
"loss": 4.4556, |
|
"step": 175616 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7116249542212436e-05, |
|
"loss": 4.4476, |
|
"step": 176128 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7107863594701916e-05, |
|
"loss": 4.4602, |
|
"step": 176640 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7099494025995125e-05, |
|
"loss": 4.4401, |
|
"step": 177152 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7091108078484605e-05, |
|
"loss": 4.4399, |
|
"step": 177664 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.708272213097409e-05, |
|
"loss": 4.4537, |
|
"step": 178176 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.707433618346357e-05, |
|
"loss": 4.4514, |
|
"step": 178688 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.706595023595305e-05, |
|
"loss": 4.4435, |
|
"step": 179200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.705756428844253e-05, |
|
"loss": 4.4392, |
|
"step": 179712 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.704917834093201e-05, |
|
"loss": 4.4477, |
|
"step": 180224 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.704079239342149e-05, |
|
"loss": 4.4303, |
|
"step": 180736 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.70324228247147e-05, |
|
"loss": 4.4551, |
|
"step": 181248 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.702403687720418e-05, |
|
"loss": 4.439, |
|
"step": 181760 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.701565092969366e-05, |
|
"loss": 4.4472, |
|
"step": 182272 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.700726498218314e-05, |
|
"loss": 4.4412, |
|
"step": 182784 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.699889541347635e-05, |
|
"loss": 4.4306, |
|
"step": 183296 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.699050946596583e-05, |
|
"loss": 4.4325, |
|
"step": 183808 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6982139897259045e-05, |
|
"loss": 4.4406, |
|
"step": 184320 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6973753949748525e-05, |
|
"loss": 4.4217, |
|
"step": 184832 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6965368002238005e-05, |
|
"loss": 4.4287, |
|
"step": 185344 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6956982054727485e-05, |
|
"loss": 4.4388, |
|
"step": 185856 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6948612486020694e-05, |
|
"loss": 4.4302, |
|
"step": 186368 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6940226538510174e-05, |
|
"loss": 4.4158, |
|
"step": 186880 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6931840590999654e-05, |
|
"loss": 4.4181, |
|
"step": 187392 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6923454643489134e-05, |
|
"loss": 4.4162, |
|
"step": 187904 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6915068695978614e-05, |
|
"loss": 4.4424, |
|
"step": 188416 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6906682748468094e-05, |
|
"loss": 4.4303, |
|
"step": 188928 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.68983131797613e-05, |
|
"loss": 4.4207, |
|
"step": 189440 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.688992723225078e-05, |
|
"loss": 4.4291, |
|
"step": 189952 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.688154128474026e-05, |
|
"loss": 4.4251, |
|
"step": 190464 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.687315533722974e-05, |
|
"loss": 4.4287, |
|
"step": 190976 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.686480214732669e-05, |
|
"loss": 4.4129, |
|
"step": 191488 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.685641619981617e-05, |
|
"loss": 4.4276, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.684803025230565e-05, |
|
"loss": 4.4204, |
|
"step": 192512 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.683964430479513e-05, |
|
"loss": 4.4213, |
|
"step": 193024 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.683125835728461e-05, |
|
"loss": 4.4146, |
|
"step": 193536 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.682287240977409e-05, |
|
"loss": 4.4177, |
|
"step": 194048 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.681448646226357e-05, |
|
"loss": 4.4211, |
|
"step": 194560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.680610051475305e-05, |
|
"loss": 4.4231, |
|
"step": 195072 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.679773094604626e-05, |
|
"loss": 4.4079, |
|
"step": 195584 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.678934499853574e-05, |
|
"loss": 4.4133, |
|
"step": 196096 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.678097542982895e-05, |
|
"loss": 4.4108, |
|
"step": 196608 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.677258948231843e-05, |
|
"loss": 4.4175, |
|
"step": 197120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.676420353480791e-05, |
|
"loss": 4.4158, |
|
"step": 197632 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.675581758729739e-05, |
|
"loss": 4.397, |
|
"step": 198144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.674743163978687e-05, |
|
"loss": 4.4098, |
|
"step": 198656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6739045692276346e-05, |
|
"loss": 4.4053, |
|
"step": 199168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6730659744765826e-05, |
|
"loss": 4.4155, |
|
"step": 199680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6722273797255305e-05, |
|
"loss": 4.3952, |
|
"step": 200192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.671392060735225e-05, |
|
"loss": 4.4046, |
|
"step": 200704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.670553465984173e-05, |
|
"loss": 4.4156, |
|
"step": 201216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.669714871233121e-05, |
|
"loss": 4.4032, |
|
"step": 201728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.668876276482069e-05, |
|
"loss": 4.3873, |
|
"step": 202240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.668037681731017e-05, |
|
"loss": 4.4005, |
|
"step": 202752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.667199086979965e-05, |
|
"loss": 4.4062, |
|
"step": 203264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.666360492228913e-05, |
|
"loss": 4.4072, |
|
"step": 203776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.665521897477861e-05, |
|
"loss": 4.3998, |
|
"step": 204288 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.664684940607182e-05, |
|
"loss": 4.3928, |
|
"step": 204800 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.66384634585613e-05, |
|
"loss": 4.3914, |
|
"step": 205312 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.663007751105078e-05, |
|
"loss": 4.3985, |
|
"step": 205824 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.662169156354026e-05, |
|
"loss": 4.3828, |
|
"step": 206336 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.661332199483347e-05, |
|
"loss": 4.3894, |
|
"step": 206848 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.660493604732295e-05, |
|
"loss": 4.3959, |
|
"step": 207360 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.659655009981243e-05, |
|
"loss": 4.3946, |
|
"step": 207872 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.658816415230191e-05, |
|
"loss": 4.3771, |
|
"step": 208384 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.657977820479139e-05, |
|
"loss": 4.3911, |
|
"step": 208896 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6571408636084604e-05, |
|
"loss": 4.379, |
|
"step": 209408 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6563022688574084e-05, |
|
"loss": 4.3901, |
|
"step": 209920 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6554636741063564e-05, |
|
"loss": 4.3855, |
|
"step": 210432 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6546250793553044e-05, |
|
"loss": 4.3776, |
|
"step": 210944 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6537864846042524e-05, |
|
"loss": 4.3917, |
|
"step": 211456 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.652949527733573e-05, |
|
"loss": 4.3914, |
|
"step": 211968 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.652110932982521e-05, |
|
"loss": 4.3794, |
|
"step": 212480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.651272338231469e-05, |
|
"loss": 4.3851, |
|
"step": 212992 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.650433743480417e-05, |
|
"loss": 4.3853, |
|
"step": 213504 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.649595148729365e-05, |
|
"loss": 4.3855, |
|
"step": 214016 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.648756553978313e-05, |
|
"loss": 4.3723, |
|
"step": 214528 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.647919597107634e-05, |
|
"loss": 4.3868, |
|
"step": 215040 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.647081002356582e-05, |
|
"loss": 4.3805, |
|
"step": 215552 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.646242407605531e-05, |
|
"loss": 4.3813, |
|
"step": 216064 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.645403812854479e-05, |
|
"loss": 4.3768, |
|
"step": 216576 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.644565218103427e-05, |
|
"loss": 4.3836, |
|
"step": 217088 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.643728261232748e-05, |
|
"loss": 4.3634, |
|
"step": 217600 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.642889666481696e-05, |
|
"loss": 4.3777, |
|
"step": 218112 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.642051071730644e-05, |
|
"loss": 4.358, |
|
"step": 218624 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.641212476979592e-05, |
|
"loss": 4.3882, |
|
"step": 219136 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.64037388222854e-05, |
|
"loss": 4.379, |
|
"step": 219648 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.639535287477488e-05, |
|
"loss": 4.3786, |
|
"step": 220160 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6386983306068086e-05, |
|
"loss": 4.3635, |
|
"step": 220672 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6378597358557566e-05, |
|
"loss": 4.3795, |
|
"step": 221184 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6370211411047046e-05, |
|
"loss": 4.3821, |
|
"step": 221696 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6361825463536526e-05, |
|
"loss": 4.3709, |
|
"step": 222208 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6353439516026006e-05, |
|
"loss": 4.3677, |
|
"step": 222720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6345053568515486e-05, |
|
"loss": 4.376, |
|
"step": 223232 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6336667621004966e-05, |
|
"loss": 4.3697, |
|
"step": 223744 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6328281673494446e-05, |
|
"loss": 4.3769, |
|
"step": 224256 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6319912104787655e-05, |
|
"loss": 4.3581, |
|
"step": 224768 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6311526157277135e-05, |
|
"loss": 4.3689, |
|
"step": 225280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6303140209766615e-05, |
|
"loss": 4.367, |
|
"step": 225792 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6294754262256095e-05, |
|
"loss": 4.36, |
|
"step": 226304 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6286368314745575e-05, |
|
"loss": 4.379, |
|
"step": 226816 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6277998746038784e-05, |
|
"loss": 4.3548, |
|
"step": 227328 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6269612798528264e-05, |
|
"loss": 4.3656, |
|
"step": 227840 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6261226851017744e-05, |
|
"loss": 4.3688, |
|
"step": 228352 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.625284090350723e-05, |
|
"loss": 4.3616, |
|
"step": 228864 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.390535354614258, |
|
"eval_runtime": 309.1781, |
|
"eval_samples_per_second": 1234.211, |
|
"eval_steps_per_second": 38.57, |
|
"step": 228960 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.624445495599671e-05, |
|
"loss": 4.3619, |
|
"step": 229376 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.623606900848619e-05, |
|
"loss": 4.3586, |
|
"step": 229888 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.622768306097567e-05, |
|
"loss": 4.3688, |
|
"step": 230400 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.621929711346515e-05, |
|
"loss": 4.3645, |
|
"step": 230912 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.621094392356209e-05, |
|
"loss": 4.3632, |
|
"step": 231424 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.620255797605157e-05, |
|
"loss": 4.3512, |
|
"step": 231936 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.619417202854105e-05, |
|
"loss": 4.3577, |
|
"step": 232448 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.618578608103053e-05, |
|
"loss": 4.3412, |
|
"step": 232960 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.617740013352001e-05, |
|
"loss": 4.3647, |
|
"step": 233472 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.616901418600949e-05, |
|
"loss": 4.3509, |
|
"step": 233984 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.616062823849897e-05, |
|
"loss": 4.3606, |
|
"step": 234496 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.615224229098845e-05, |
|
"loss": 4.3617, |
|
"step": 235008 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.614390547988913e-05, |
|
"loss": 4.3501, |
|
"step": 235520 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.61355195323786e-05, |
|
"loss": 4.3451, |
|
"step": 236032 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.612713358486808e-05, |
|
"loss": 4.3406, |
|
"step": 236544 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.611874763735756e-05, |
|
"loss": 4.3426, |
|
"step": 237056 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.611036168984704e-05, |
|
"loss": 4.3497, |
|
"step": 237568 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.610197574233652e-05, |
|
"loss": 4.3393, |
|
"step": 238080 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6093589794826e-05, |
|
"loss": 4.342, |
|
"step": 238592 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.608520384731548e-05, |
|
"loss": 4.365, |
|
"step": 239104 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.607681789980496e-05, |
|
"loss": 4.3468, |
|
"step": 239616 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.606843195229444e-05, |
|
"loss": 4.3462, |
|
"step": 240128 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.606004600478392e-05, |
|
"loss": 4.3521, |
|
"step": 240640 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.60516600572734e-05, |
|
"loss": 4.3476, |
|
"step": 241152 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.604327410976288e-05, |
|
"loss": 4.3424, |
|
"step": 241664 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.603488816225236e-05, |
|
"loss": 4.332, |
|
"step": 242176 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.602650221474184e-05, |
|
"loss": 4.3468, |
|
"step": 242688 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.601813264603506e-05, |
|
"loss": 4.3322, |
|
"step": 243200 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.600974669852454e-05, |
|
"loss": 4.3356, |
|
"step": 243712 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.600136075101402e-05, |
|
"loss": 4.3365, |
|
"step": 244224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.599297480350349e-05, |
|
"loss": 4.3327, |
|
"step": 244736 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.598458885599297e-05, |
|
"loss": 4.3469, |
|
"step": 245248 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.597620290848245e-05, |
|
"loss": 4.3376, |
|
"step": 245760 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.596781696097193e-05, |
|
"loss": 4.3297, |
|
"step": 246272 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.595944739226514e-05, |
|
"loss": 4.3409, |
|
"step": 246784 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.595106144475462e-05, |
|
"loss": 4.3491, |
|
"step": 247296 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.59426754972441e-05, |
|
"loss": 4.327, |
|
"step": 247808 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5934289549733587e-05, |
|
"loss": 4.3376, |
|
"step": 248320 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5925903602223066e-05, |
|
"loss": 4.3237, |
|
"step": 248832 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5917517654712546e-05, |
|
"loss": 4.3376, |
|
"step": 249344 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5909131707202026e-05, |
|
"loss": 4.3221, |
|
"step": 249856 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5900762138495235e-05, |
|
"loss": 4.3409, |
|
"step": 250368 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5892376190984715e-05, |
|
"loss": 4.3348, |
|
"step": 250880 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5883990243474195e-05, |
|
"loss": 4.3297, |
|
"step": 251392 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5875604295963675e-05, |
|
"loss": 4.3344, |
|
"step": 251904 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5867218348453155e-05, |
|
"loss": 4.3246, |
|
"step": 252416 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5858832400942635e-05, |
|
"loss": 4.3377, |
|
"step": 252928 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5850446453432115e-05, |
|
"loss": 4.3206, |
|
"step": 253440 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5842076884725324e-05, |
|
"loss": 4.3166, |
|
"step": 253952 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5833690937214804e-05, |
|
"loss": 4.3336, |
|
"step": 254464 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.582532136850802e-05, |
|
"loss": 4.3274, |
|
"step": 254976 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.58169354209975e-05, |
|
"loss": 4.3286, |
|
"step": 255488 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.580854947348698e-05, |
|
"loss": 4.3156, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.580016352597646e-05, |
|
"loss": 4.3279, |
|
"step": 256512 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.579177757846594e-05, |
|
"loss": 4.3121, |
|
"step": 257024 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.578339163095542e-05, |
|
"loss": 4.3341, |
|
"step": 257536 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.57750056834449e-05, |
|
"loss": 4.3193, |
|
"step": 258048 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.576661973593438e-05, |
|
"loss": 4.331, |
|
"step": 258560 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.575823378842385e-05, |
|
"loss": 4.3242, |
|
"step": 259072 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.574984784091333e-05, |
|
"loss": 4.3162, |
|
"step": 259584 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.574146189340281e-05, |
|
"loss": 4.3129, |
|
"step": 260096 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.573309232469602e-05, |
|
"loss": 4.3232, |
|
"step": 260608 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.572470637718551e-05, |
|
"loss": 4.3068, |
|
"step": 261120 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.571632042967499e-05, |
|
"loss": 4.3149, |
|
"step": 261632 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.570793448216447e-05, |
|
"loss": 4.3203, |
|
"step": 262144 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.569954853465395e-05, |
|
"loss": 4.3192, |
|
"step": 262656 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.569116258714343e-05, |
|
"loss": 4.2983, |
|
"step": 263168 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.568277663963291e-05, |
|
"loss": 4.3053, |
|
"step": 263680 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.567439069212239e-05, |
|
"loss": 4.3011, |
|
"step": 264192 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.56660211234156e-05, |
|
"loss": 4.3307, |
|
"step": 264704 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.565765155470881e-05, |
|
"loss": 4.3155, |
|
"step": 265216 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.564928198600202e-05, |
|
"loss": 4.3107, |
|
"step": 265728 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5640896038491496e-05, |
|
"loss": 4.3162, |
|
"step": 266240 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5632510090980976e-05, |
|
"loss": 4.3112, |
|
"step": 266752 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.562412414347046e-05, |
|
"loss": 4.3236, |
|
"step": 267264 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.561573819595994e-05, |
|
"loss": 4.2978, |
|
"step": 267776 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.560735224844942e-05, |
|
"loss": 4.3194, |
|
"step": 268288 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.55989663009389e-05, |
|
"loss": 4.3067, |
|
"step": 268800 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.559058035342838e-05, |
|
"loss": 4.3104, |
|
"step": 269312 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.558219440591786e-05, |
|
"loss": 4.3086, |
|
"step": 269824 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.557380845840734e-05, |
|
"loss": 4.3049, |
|
"step": 270336 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.556542251089682e-05, |
|
"loss": 4.3138, |
|
"step": 270848 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.55570365633863e-05, |
|
"loss": 4.3098, |
|
"step": 271360 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.554866699467951e-05, |
|
"loss": 4.2979, |
|
"step": 271872 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.554028104716899e-05, |
|
"loss": 4.3081, |
|
"step": 272384 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.553189509965847e-05, |
|
"loss": 4.2969, |
|
"step": 272896 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.552350915214795e-05, |
|
"loss": 4.3125, |
|
"step": 273408 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.551513958344116e-05, |
|
"loss": 4.3055, |
|
"step": 273920 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.550675363593064e-05, |
|
"loss": 4.2895, |
|
"step": 274432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5498384067223856e-05, |
|
"loss": 4.3056, |
|
"step": 274944 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5489998119713336e-05, |
|
"loss": 4.2966, |
|
"step": 275456 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5481612172202816e-05, |
|
"loss": 4.3059, |
|
"step": 275968 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5473226224692296e-05, |
|
"loss": 4.2914, |
|
"step": 276480 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5464856655985505e-05, |
|
"loss": 4.2989, |
|
"step": 276992 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5456470708474985e-05, |
|
"loss": 4.3084, |
|
"step": 277504 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5448084760964465e-05, |
|
"loss": 4.295, |
|
"step": 278016 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5439698813453945e-05, |
|
"loss": 4.2869, |
|
"step": 278528 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5431312865943425e-05, |
|
"loss": 4.2944, |
|
"step": 279040 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5422926918432905e-05, |
|
"loss": 4.2938, |
|
"step": 279552 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5414540970922385e-05, |
|
"loss": 4.3072, |
|
"step": 280064 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5406171402215594e-05, |
|
"loss": 4.2941, |
|
"step": 280576 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.539778545470508e-05, |
|
"loss": 4.2917, |
|
"step": 281088 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.538939950719456e-05, |
|
"loss": 4.2891, |
|
"step": 281600 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.538101355968404e-05, |
|
"loss": 4.2975, |
|
"step": 282112 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5372627612173514e-05, |
|
"loss": 4.2756, |
|
"step": 282624 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5364241664662994e-05, |
|
"loss": 4.2902, |
|
"step": 283136 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.535587209595621e-05, |
|
"loss": 4.2913, |
|
"step": 283648 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.534748614844569e-05, |
|
"loss": 4.2937, |
|
"step": 284160 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.533910020093516e-05, |
|
"loss": 4.2715, |
|
"step": 284672 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.533071425342464e-05, |
|
"loss": 4.2916, |
|
"step": 285184 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.532232830591412e-05, |
|
"loss": 4.2846, |
|
"step": 285696 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.53139423584036e-05, |
|
"loss": 4.2822, |
|
"step": 286208 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.530555641089308e-05, |
|
"loss": 4.289, |
|
"step": 286720 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.52971868421863e-05, |
|
"loss": 4.2737, |
|
"step": 287232 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.528880089467578e-05, |
|
"loss": 4.296, |
|
"step": 287744 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.528043132596899e-05, |
|
"loss": 4.29, |
|
"step": 288256 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.527204537845847e-05, |
|
"loss": 4.2812, |
|
"step": 288768 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.526365943094795e-05, |
|
"loss": 4.2888, |
|
"step": 289280 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.525527348343743e-05, |
|
"loss": 4.2823, |
|
"step": 289792 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.524688753592691e-05, |
|
"loss": 4.2887, |
|
"step": 290304 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.523850158841639e-05, |
|
"loss": 4.271, |
|
"step": 290816 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.523011564090587e-05, |
|
"loss": 4.2928, |
|
"step": 291328 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.522172969339535e-05, |
|
"loss": 4.2794, |
|
"step": 291840 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5213376503492285e-05, |
|
"loss": 4.2817, |
|
"step": 292352 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.520499055598177e-05, |
|
"loss": 4.2806, |
|
"step": 292864 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.519660460847125e-05, |
|
"loss": 4.2875, |
|
"step": 293376 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.518821866096073e-05, |
|
"loss": 4.2726, |
|
"step": 293888 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.517983271345021e-05, |
|
"loss": 4.2764, |
|
"step": 294400 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.517144676593969e-05, |
|
"loss": 4.2649, |
|
"step": 294912 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.516306081842917e-05, |
|
"loss": 4.2892, |
|
"step": 295424 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.515467487091865e-05, |
|
"loss": 4.2832, |
|
"step": 295936 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.514632168101559e-05, |
|
"loss": 4.2832, |
|
"step": 296448 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.513793573350507e-05, |
|
"loss": 4.2689, |
|
"step": 296960 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.512954978599455e-05, |
|
"loss": 4.2828, |
|
"step": 297472 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.512116383848403e-05, |
|
"loss": 4.2855, |
|
"step": 297984 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.511277789097351e-05, |
|
"loss": 4.2759, |
|
"step": 298496 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.510439194346299e-05, |
|
"loss": 4.2721, |
|
"step": 299008 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.509600599595247e-05, |
|
"loss": 4.2811, |
|
"step": 299520 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5087620048441956e-05, |
|
"loss": 4.2792, |
|
"step": 300032 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5079234100931436e-05, |
|
"loss": 4.2807, |
|
"step": 300544 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5070864532224645e-05, |
|
"loss": 4.2649, |
|
"step": 301056 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5062478584714125e-05, |
|
"loss": 4.2765, |
|
"step": 301568 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5054092637203605e-05, |
|
"loss": 4.273, |
|
"step": 302080 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5045706689693085e-05, |
|
"loss": 4.2722, |
|
"step": 302592 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5037337120986294e-05, |
|
"loss": 4.2801, |
|
"step": 303104 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5028967552279503e-05, |
|
"loss": 4.266, |
|
"step": 303616 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5020581604768983e-05, |
|
"loss": 4.2743, |
|
"step": 304128 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.501219565725846e-05, |
|
"loss": 4.2713, |
|
"step": 304640 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.500380970974794e-05, |
|
"loss": 4.2752, |
|
"step": 305152 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.3080854415893555, |
|
"eval_runtime": 304.2229, |
|
"eval_samples_per_second": 1254.314, |
|
"eval_steps_per_second": 39.198, |
|
"step": 305280 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.499542376223742e-05, |
|
"loss": 4.2653, |
|
"step": 305664 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.498703781472691e-05, |
|
"loss": 4.2706, |
|
"step": 306176 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.497865186721639e-05, |
|
"loss": 4.2789, |
|
"step": 306688 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.497026591970587e-05, |
|
"loss": 4.2699, |
|
"step": 307200 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.496187997219535e-05, |
|
"loss": 4.2773, |
|
"step": 307712 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.495349402468482e-05, |
|
"loss": 4.2591, |
|
"step": 308224 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.49451080771743e-05, |
|
"loss": 4.2668, |
|
"step": 308736 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.493672212966378e-05, |
|
"loss": 4.2546, |
|
"step": 309248 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.492833618215326e-05, |
|
"loss": 4.2738, |
|
"step": 309760 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.491995023464274e-05, |
|
"loss": 4.2671, |
|
"step": 310272 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.491156428713222e-05, |
|
"loss": 4.2681, |
|
"step": 310784 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.49031783396217e-05, |
|
"loss": 4.2699, |
|
"step": 311296 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.489479239211118e-05, |
|
"loss": 4.2627, |
|
"step": 311808 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.488640644460066e-05, |
|
"loss": 4.2572, |
|
"step": 312320 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.487802049709014e-05, |
|
"loss": 4.2591, |
|
"step": 312832 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.486963454957962e-05, |
|
"loss": 4.2483, |
|
"step": 313344 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.486126498087284e-05, |
|
"loss": 4.2645, |
|
"step": 313856 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.485289541216605e-05, |
|
"loss": 4.2522, |
|
"step": 314368 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.484450946465553e-05, |
|
"loss": 4.2545, |
|
"step": 314880 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.483612351714501e-05, |
|
"loss": 4.2834, |
|
"step": 315392 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.482773756963449e-05, |
|
"loss": 4.2561, |
|
"step": 315904 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.481935162212397e-05, |
|
"loss": 4.2623, |
|
"step": 316416 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.481096567461345e-05, |
|
"loss": 4.2655, |
|
"step": 316928 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.480257972710293e-05, |
|
"loss": 4.26, |
|
"step": 317440 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.479419377959241e-05, |
|
"loss": 4.2588, |
|
"step": 317952 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.478580783208188e-05, |
|
"loss": 4.2482, |
|
"step": 318464 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4777438263375096e-05, |
|
"loss": 4.2638, |
|
"step": 318976 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4769052315864576e-05, |
|
"loss": 4.2422, |
|
"step": 319488 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4760666368354056e-05, |
|
"loss": 4.2549, |
|
"step": 320000 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4752280420843536e-05, |
|
"loss": 4.2478, |
|
"step": 320512 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4743894473333016e-05, |
|
"loss": 4.2493, |
|
"step": 321024 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.473552490462623e-05, |
|
"loss": 4.2625, |
|
"step": 321536 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.472713895711571e-05, |
|
"loss": 4.2554, |
|
"step": 322048 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4718753009605185e-05, |
|
"loss": 4.25, |
|
"step": 322560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4710367062094665e-05, |
|
"loss": 4.2574, |
|
"step": 323072 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.470199749338788e-05, |
|
"loss": 4.2643, |
|
"step": 323584 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4693611545877354e-05, |
|
"loss": 4.2468, |
|
"step": 324096 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4685225598366834e-05, |
|
"loss": 4.2553, |
|
"step": 324608 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4676839650856314e-05, |
|
"loss": 4.241, |
|
"step": 325120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4668453703345794e-05, |
|
"loss": 4.2555, |
|
"step": 325632 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.466006775583528e-05, |
|
"loss": 4.2351, |
|
"step": 326144 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.465168180832476e-05, |
|
"loss": 4.2617, |
|
"step": 326656 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.464329586081424e-05, |
|
"loss": 4.2525, |
|
"step": 327168 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.463492629210745e-05, |
|
"loss": 4.2485, |
|
"step": 327680 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.462654034459693e-05, |
|
"loss": 4.2536, |
|
"step": 328192 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.461817077589014e-05, |
|
"loss": 4.2452, |
|
"step": 328704 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.460978482837962e-05, |
|
"loss": 4.2554, |
|
"step": 329216 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.46013988808691e-05, |
|
"loss": 4.241, |
|
"step": 329728 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.459301293335858e-05, |
|
"loss": 4.2359, |
|
"step": 330240 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.458464336465179e-05, |
|
"loss": 4.2533, |
|
"step": 330752 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4576273795945004e-05, |
|
"loss": 4.2476, |
|
"step": 331264 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4567887848434484e-05, |
|
"loss": 4.2479, |
|
"step": 331776 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4559501900923964e-05, |
|
"loss": 4.2372, |
|
"step": 332288 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4551115953413444e-05, |
|
"loss": 4.245, |
|
"step": 332800 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4542730005902923e-05, |
|
"loss": 4.2373, |
|
"step": 333312 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4534344058392403e-05, |
|
"loss": 4.2514, |
|
"step": 333824 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4525958110881883e-05, |
|
"loss": 4.2385, |
|
"step": 334336 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.451757216337136e-05, |
|
"loss": 4.2531, |
|
"step": 334848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.450918621586084e-05, |
|
"loss": 4.2454, |
|
"step": 335360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.450080026835032e-05, |
|
"loss": 4.2371, |
|
"step": 335872 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.44924143208398e-05, |
|
"loss": 4.2371, |
|
"step": 336384 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.448402837332928e-05, |
|
"loss": 4.2445, |
|
"step": 336896 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.447565880462249e-05, |
|
"loss": 4.2269, |
|
"step": 337408 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.446727285711197e-05, |
|
"loss": 4.2382, |
|
"step": 337920 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.445888690960145e-05, |
|
"loss": 4.2408, |
|
"step": 338432 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.445050096209093e-05, |
|
"loss": 4.245, |
|
"step": 338944 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.444213139338415e-05, |
|
"loss": 4.2199, |
|
"step": 339456 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.443374544587363e-05, |
|
"loss": 4.2303, |
|
"step": 339968 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.442535949836311e-05, |
|
"loss": 4.2225, |
|
"step": 340480 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.441698992965632e-05, |
|
"loss": 4.2495, |
|
"step": 340992 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.44086039821458e-05, |
|
"loss": 4.2372, |
|
"step": 341504 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.440021803463528e-05, |
|
"loss": 4.2372, |
|
"step": 342016 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.439183208712476e-05, |
|
"loss": 4.2379, |
|
"step": 342528 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4383462518417966e-05, |
|
"loss": 4.2345, |
|
"step": 343040 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4375076570907446e-05, |
|
"loss": 4.2512, |
|
"step": 343552 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4366690623396926e-05, |
|
"loss": 4.2205, |
|
"step": 344064 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4358304675886406e-05, |
|
"loss": 4.2399, |
|
"step": 344576 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4349918728375886e-05, |
|
"loss": 4.2354, |
|
"step": 345088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4341532780865366e-05, |
|
"loss": 4.234, |
|
"step": 345600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4333146833354846e-05, |
|
"loss": 4.2357, |
|
"step": 346112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.432477726464806e-05, |
|
"loss": 4.231, |
|
"step": 346624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.431639131713754e-05, |
|
"loss": 4.2362, |
|
"step": 347136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4308005369627015e-05, |
|
"loss": 4.2368, |
|
"step": 347648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4299619422116495e-05, |
|
"loss": 4.2227, |
|
"step": 348160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4291233474605975e-05, |
|
"loss": 4.236, |
|
"step": 348672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.428286390589919e-05, |
|
"loss": 4.2235, |
|
"step": 349184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.42744943371924e-05, |
|
"loss": 4.2372, |
|
"step": 349696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.426610838968188e-05, |
|
"loss": 4.2295, |
|
"step": 350208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.425772244217136e-05, |
|
"loss": 4.2179, |
|
"step": 350720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.424933649466084e-05, |
|
"loss": 4.2311, |
|
"step": 351232 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.424095054715032e-05, |
|
"loss": 4.2242, |
|
"step": 351744 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.42325645996398e-05, |
|
"loss": 4.2338, |
|
"step": 352256 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.422417865212928e-05, |
|
"loss": 4.2154, |
|
"step": 352768 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.421579270461876e-05, |
|
"loss": 4.23, |
|
"step": 353280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.420742313591197e-05, |
|
"loss": 4.241, |
|
"step": 353792 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.419903718840145e-05, |
|
"loss": 4.2208, |
|
"step": 354304 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.419065124089093e-05, |
|
"loss": 4.2103, |
|
"step": 354816 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.418226529338041e-05, |
|
"loss": 4.2213, |
|
"step": 355328 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.417387934586989e-05, |
|
"loss": 4.2252, |
|
"step": 355840 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.416549339835937e-05, |
|
"loss": 4.2317, |
|
"step": 356352 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.415710745084885e-05, |
|
"loss": 4.2254, |
|
"step": 356864 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.414872150333833e-05, |
|
"loss": 4.2197, |
|
"step": 357376 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.414033555582781e-05, |
|
"loss": 4.2159, |
|
"step": 357888 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4131965987121024e-05, |
|
"loss": 4.2269, |
|
"step": 358400 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4123580039610504e-05, |
|
"loss": 4.2033, |
|
"step": 358912 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4115194092099984e-05, |
|
"loss": 4.2193, |
|
"step": 359424 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.410682452339319e-05, |
|
"loss": 4.2197, |
|
"step": 359936 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.409843857588267e-05, |
|
"loss": 4.2242, |
|
"step": 360448 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.409005262837215e-05, |
|
"loss": 4.2015, |
|
"step": 360960 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.408166668086163e-05, |
|
"loss": 4.221, |
|
"step": 361472 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.407329711215484e-05, |
|
"loss": 4.2137, |
|
"step": 361984 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.406491116464432e-05, |
|
"loss": 4.216, |
|
"step": 362496 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.40565252171338e-05, |
|
"loss": 4.2153, |
|
"step": 363008 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.404813926962328e-05, |
|
"loss": 4.2079, |
|
"step": 363520 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.403975332211276e-05, |
|
"loss": 4.2216, |
|
"step": 364032 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.403138375340598e-05, |
|
"loss": 4.2218, |
|
"step": 364544 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.402299780589546e-05, |
|
"loss": 4.2105, |
|
"step": 365056 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.401461185838494e-05, |
|
"loss": 4.2204, |
|
"step": 365568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.400622591087442e-05, |
|
"loss": 4.2106, |
|
"step": 366080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.39978399633639e-05, |
|
"loss": 4.2212, |
|
"step": 366592 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.398945401585338e-05, |
|
"loss": 4.2046, |
|
"step": 367104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.398106806834285e-05, |
|
"loss": 4.2261, |
|
"step": 367616 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3972698499636066e-05, |
|
"loss": 4.2055, |
|
"step": 368128 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3964312552125546e-05, |
|
"loss": 4.2154, |
|
"step": 368640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3955926604615026e-05, |
|
"loss": 4.2068, |
|
"step": 369152 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.39475406571045e-05, |
|
"loss": 4.2202, |
|
"step": 369664 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.393915470959398e-05, |
|
"loss": 4.208, |
|
"step": 370176 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3930785140887195e-05, |
|
"loss": 4.2106, |
|
"step": 370688 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3922399193376675e-05, |
|
"loss": 4.1938, |
|
"step": 371200 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3914013245866155e-05, |
|
"loss": 4.2255, |
|
"step": 371712 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3905627298355635e-05, |
|
"loss": 4.2103, |
|
"step": 372224 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3897241350845115e-05, |
|
"loss": 4.2129, |
|
"step": 372736 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3888871782138324e-05, |
|
"loss": 4.1996, |
|
"step": 373248 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3880485834627804e-05, |
|
"loss": 4.217, |
|
"step": 373760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3872099887117284e-05, |
|
"loss": 4.2212, |
|
"step": 374272 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3863713939606764e-05, |
|
"loss": 4.2061, |
|
"step": 374784 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3855327992096244e-05, |
|
"loss": 4.2041, |
|
"step": 375296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.384695842338945e-05, |
|
"loss": 4.2137, |
|
"step": 375808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.383857247587893e-05, |
|
"loss": 4.2145, |
|
"step": 376320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.383018652836842e-05, |
|
"loss": 4.2155, |
|
"step": 376832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.38218005808579e-05, |
|
"loss": 4.1964, |
|
"step": 377344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.381341463334738e-05, |
|
"loss": 4.2098, |
|
"step": 377856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.380502868583686e-05, |
|
"loss": 4.2047, |
|
"step": 378368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.379664273832634e-05, |
|
"loss": 4.2081, |
|
"step": 378880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.378825679081582e-05, |
|
"loss": 4.2135, |
|
"step": 379392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.377988722210903e-05, |
|
"loss": 4.1974, |
|
"step": 379904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.377150127459851e-05, |
|
"loss": 4.2139, |
|
"step": 380416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.376311532708799e-05, |
|
"loss": 4.2026, |
|
"step": 380928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.375472937957747e-05, |
|
"loss": 4.2147, |
|
"step": 381440 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.25103235244751, |
|
"eval_runtime": 313.3193, |
|
"eval_samples_per_second": 1217.898, |
|
"eval_steps_per_second": 38.06, |
|
"step": 381600 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.374634343206695e-05, |
|
"loss": 4.1995, |
|
"step": 381952 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.373795748455643e-05, |
|
"loss": 4.2076, |
|
"step": 382464 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.372957153704591e-05, |
|
"loss": 4.2101, |
|
"step": 382976 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.372120196833912e-05, |
|
"loss": 4.2042, |
|
"step": 383488 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.37128160208286e-05, |
|
"loss": 4.2105, |
|
"step": 384000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3704430073318084e-05, |
|
"loss": 4.1967, |
|
"step": 384512 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3696044125807564e-05, |
|
"loss": 4.2013, |
|
"step": 385024 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.368765817829704e-05, |
|
"loss": 4.1911, |
|
"step": 385536 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.367927223078652e-05, |
|
"loss": 4.2092, |
|
"step": 386048 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3670886283276e-05, |
|
"loss": 4.2066, |
|
"step": 386560 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.366251671456921e-05, |
|
"loss": 4.2014, |
|
"step": 387072 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3654130767058686e-05, |
|
"loss": 4.207, |
|
"step": 387584 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.36457611983519e-05, |
|
"loss": 4.1957, |
|
"step": 388096 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.363737525084138e-05, |
|
"loss": 4.1964, |
|
"step": 388608 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.362898930333086e-05, |
|
"loss": 4.1954, |
|
"step": 389120 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3620603355820335e-05, |
|
"loss": 4.1864, |
|
"step": 389632 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.361221740830982e-05, |
|
"loss": 4.1983, |
|
"step": 390144 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.36038314607993e-05, |
|
"loss": 4.1884, |
|
"step": 390656 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.359544551328878e-05, |
|
"loss": 4.1919, |
|
"step": 391168 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.358705956577826e-05, |
|
"loss": 4.2272, |
|
"step": 391680 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.357867361826774e-05, |
|
"loss": 4.1905, |
|
"step": 392192 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.357030404956095e-05, |
|
"loss": 4.2008, |
|
"step": 392704 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.356191810205043e-05, |
|
"loss": 4.2017, |
|
"step": 393216 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.355353215453991e-05, |
|
"loss": 4.1999, |
|
"step": 393728 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.354514620702939e-05, |
|
"loss": 4.1919, |
|
"step": 394240 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.353676025951887e-05, |
|
"loss": 4.1905, |
|
"step": 394752 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.352837431200835e-05, |
|
"loss": 4.1992, |
|
"step": 395264 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.351998836449783e-05, |
|
"loss": 4.1802, |
|
"step": 395776 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.351160241698731e-05, |
|
"loss": 4.1901, |
|
"step": 396288 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.350323284828052e-05, |
|
"loss": 4.1863, |
|
"step": 396800 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3494846900770006e-05, |
|
"loss": 4.191, |
|
"step": 397312 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3486477332063216e-05, |
|
"loss": 4.1973, |
|
"step": 397824 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3478091384552696e-05, |
|
"loss": 4.1977, |
|
"step": 398336 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3469705437042175e-05, |
|
"loss": 4.1899, |
|
"step": 398848 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3461319489531655e-05, |
|
"loss": 4.1963, |
|
"step": 399360 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3452949920824865e-05, |
|
"loss": 4.2009, |
|
"step": 399872 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3444563973314344e-05, |
|
"loss": 4.1851, |
|
"step": 400384 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3436178025803824e-05, |
|
"loss": 4.1988, |
|
"step": 400896 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3427792078293304e-05, |
|
"loss": 4.1765, |
|
"step": 401408 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3419406130782784e-05, |
|
"loss": 4.1999, |
|
"step": 401920 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3411020183272264e-05, |
|
"loss": 4.1714, |
|
"step": 402432 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3402634235761744e-05, |
|
"loss": 4.2034, |
|
"step": 402944 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3394248288251224e-05, |
|
"loss": 4.1913, |
|
"step": 403456 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.338587871954444e-05, |
|
"loss": 4.1893, |
|
"step": 403968 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.337750915083765e-05, |
|
"loss": 4.1917, |
|
"step": 404480 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.336912320332713e-05, |
|
"loss": 4.1877, |
|
"step": 404992 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.336073725581661e-05, |
|
"loss": 4.191, |
|
"step": 405504 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.335235130830609e-05, |
|
"loss": 4.18, |
|
"step": 406016 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.334396536079557e-05, |
|
"loss": 4.1706, |
|
"step": 406528 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.333557941328505e-05, |
|
"loss": 4.1992, |
|
"step": 407040 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.332719346577452e-05, |
|
"loss": 4.1889, |
|
"step": 407552 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3318807518264e-05, |
|
"loss": 4.1932, |
|
"step": 408064 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.331043794955722e-05, |
|
"loss": 4.1783, |
|
"step": 408576 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.33020520020467e-05, |
|
"loss": 4.1861, |
|
"step": 409088 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.329366605453618e-05, |
|
"loss": 4.1793, |
|
"step": 409600 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.328528010702566e-05, |
|
"loss": 4.1905, |
|
"step": 410112 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.327689415951514e-05, |
|
"loss": 4.1805, |
|
"step": 410624 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.326850821200462e-05, |
|
"loss": 4.1941, |
|
"step": 411136 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.32601222644941e-05, |
|
"loss": 4.1922, |
|
"step": 411648 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.325175269578731e-05, |
|
"loss": 4.1748, |
|
"step": 412160 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.324336674827679e-05, |
|
"loss": 4.18, |
|
"step": 412672 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.323498080076627e-05, |
|
"loss": 4.1884, |
|
"step": 413184 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.322659485325575e-05, |
|
"loss": 4.1706, |
|
"step": 413696 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3218225284548956e-05, |
|
"loss": 4.1772, |
|
"step": 414208 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3209839337038436e-05, |
|
"loss": 4.184, |
|
"step": 414720 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3201453389527916e-05, |
|
"loss": 4.1837, |
|
"step": 415232 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3193067442017396e-05, |
|
"loss": 4.165, |
|
"step": 415744 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3184681494506876e-05, |
|
"loss": 4.172, |
|
"step": 416256 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.317631192580009e-05, |
|
"loss": 4.1657, |
|
"step": 416768 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.316792597828957e-05, |
|
"loss": 4.1912, |
|
"step": 417280 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.315954003077905e-05, |
|
"loss": 4.181, |
|
"step": 417792 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.315115408326853e-05, |
|
"loss": 4.18, |
|
"step": 418304 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.314276813575801e-05, |
|
"loss": 4.1849, |
|
"step": 418816 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.313439856705122e-05, |
|
"loss": 4.1757, |
|
"step": 419328 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.312602899834443e-05, |
|
"loss": 4.1951, |
|
"step": 419840 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.311764305083391e-05, |
|
"loss": 4.1662, |
|
"step": 420352 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.310925710332339e-05, |
|
"loss": 4.1823, |
|
"step": 420864 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.310087115581287e-05, |
|
"loss": 4.1803, |
|
"step": 421376 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.309248520830235e-05, |
|
"loss": 4.1776, |
|
"step": 421888 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.308409926079183e-05, |
|
"loss": 4.182, |
|
"step": 422400 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3075713313281316e-05, |
|
"loss": 4.1744, |
|
"step": 422912 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3067327365770796e-05, |
|
"loss": 4.1783, |
|
"step": 423424 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3058941418260276e-05, |
|
"loss": 4.1833, |
|
"step": 423936 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3050571849553485e-05, |
|
"loss": 4.1647, |
|
"step": 424448 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3042185902042965e-05, |
|
"loss": 4.1828, |
|
"step": 424960 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3033799954532445e-05, |
|
"loss": 4.1687, |
|
"step": 425472 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3025430385825654e-05, |
|
"loss": 4.1831, |
|
"step": 425984 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3017044438315134e-05, |
|
"loss": 4.1713, |
|
"step": 426496 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3008658490804614e-05, |
|
"loss": 4.1661, |
|
"step": 427008 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.3000272543294094e-05, |
|
"loss": 4.173, |
|
"step": 427520 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2991886595783574e-05, |
|
"loss": 4.1741, |
|
"step": 428032 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2983500648273054e-05, |
|
"loss": 4.1742, |
|
"step": 428544 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2975114700762534e-05, |
|
"loss": 4.1605, |
|
"step": 429056 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2966728753252014e-05, |
|
"loss": 4.1764, |
|
"step": 429568 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.295835918454523e-05, |
|
"loss": 4.1847, |
|
"step": 430080 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.294997323703471e-05, |
|
"loss": 4.1699, |
|
"step": 430592 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.294158728952418e-05, |
|
"loss": 4.1553, |
|
"step": 431104 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.293320134201366e-05, |
|
"loss": 4.1688, |
|
"step": 431616 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.292483177330688e-05, |
|
"loss": 4.1712, |
|
"step": 432128 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.291644582579636e-05, |
|
"loss": 4.1743, |
|
"step": 432640 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.290805987828583e-05, |
|
"loss": 4.1751, |
|
"step": 433152 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.289967393077531e-05, |
|
"loss": 4.1671, |
|
"step": 433664 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.289128798326479e-05, |
|
"loss": 4.1627, |
|
"step": 434176 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.288290203575427e-05, |
|
"loss": 4.1742, |
|
"step": 434688 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.287453246704749e-05, |
|
"loss": 4.1506, |
|
"step": 435200 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.286614651953697e-05, |
|
"loss": 4.1662, |
|
"step": 435712 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.285776057202645e-05, |
|
"loss": 4.1647, |
|
"step": 436224 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.284937462451593e-05, |
|
"loss": 4.1716, |
|
"step": 436736 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.284098867700541e-05, |
|
"loss": 4.1516, |
|
"step": 437248 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.283260272949489e-05, |
|
"loss": 4.1636, |
|
"step": 437760 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.282421678198437e-05, |
|
"loss": 4.1634, |
|
"step": 438272 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.281583083447385e-05, |
|
"loss": 4.161, |
|
"step": 438784 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2807461265767056e-05, |
|
"loss": 4.1641, |
|
"step": 439296 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2799075318256536e-05, |
|
"loss": 4.1534, |
|
"step": 439808 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2790689370746016e-05, |
|
"loss": 4.172, |
|
"step": 440320 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2782319802039225e-05, |
|
"loss": 4.1683, |
|
"step": 440832 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2773933854528705e-05, |
|
"loss": 4.1572, |
|
"step": 441344 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.276554790701819e-05, |
|
"loss": 4.1715, |
|
"step": 441856 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.275716195950767e-05, |
|
"loss": 4.1564, |
|
"step": 442368 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.274877601199715e-05, |
|
"loss": 4.1716, |
|
"step": 442880 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.274039006448663e-05, |
|
"loss": 4.1566, |
|
"step": 443392 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.273200411697611e-05, |
|
"loss": 4.1695, |
|
"step": 443904 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.272361816946559e-05, |
|
"loss": 4.154, |
|
"step": 444416 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.27152486007588e-05, |
|
"loss": 4.1647, |
|
"step": 444928 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.270687903205201e-05, |
|
"loss": 4.1573, |
|
"step": 445440 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.269849308454149e-05, |
|
"loss": 4.1671, |
|
"step": 445952 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.269010713703097e-05, |
|
"loss": 4.1541, |
|
"step": 446464 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.268172118952045e-05, |
|
"loss": 4.1612, |
|
"step": 446976 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.267333524200993e-05, |
|
"loss": 4.1405, |
|
"step": 447488 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.266494929449941e-05, |
|
"loss": 4.1775, |
|
"step": 448000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.265656334698889e-05, |
|
"loss": 4.1565, |
|
"step": 448512 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.264817739947837e-05, |
|
"loss": 4.164, |
|
"step": 449024 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2639807830771585e-05, |
|
"loss": 4.149, |
|
"step": 449536 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2631421883261065e-05, |
|
"loss": 4.1679, |
|
"step": 450048 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2623035935750545e-05, |
|
"loss": 4.1687, |
|
"step": 450560 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.261464998824002e-05, |
|
"loss": 4.1563, |
|
"step": 451072 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.26062640407295e-05, |
|
"loss": 4.1567, |
|
"step": 451584 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2597894472022714e-05, |
|
"loss": 4.1635, |
|
"step": 452096 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.258950852451219e-05, |
|
"loss": 4.1647, |
|
"step": 452608 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.258112257700167e-05, |
|
"loss": 4.173, |
|
"step": 453120 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.257273662949115e-05, |
|
"loss": 4.1437, |
|
"step": 453632 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.256435068198063e-05, |
|
"loss": 4.1655, |
|
"step": 454144 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.255596473447011e-05, |
|
"loss": 4.1523, |
|
"step": 454656 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2547578786959594e-05, |
|
"loss": 4.1629, |
|
"step": 455168 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2539192839449074e-05, |
|
"loss": 4.1598, |
|
"step": 455680 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.253082327074228e-05, |
|
"loss": 4.1461, |
|
"step": 456192 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.252243732323176e-05, |
|
"loss": 4.1657, |
|
"step": 456704 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.251405137572124e-05, |
|
"loss": 4.1575, |
|
"step": 457216 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.250568180701445e-05, |
|
"loss": 4.1606, |
|
"step": 457728 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.2099761962890625, |
|
"eval_runtime": 300.7093, |
|
"eval_samples_per_second": 1268.97, |
|
"eval_steps_per_second": 39.656, |
|
"step": 457920 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.249729585950393e-05, |
|
"loss": 4.1547, |
|
"step": 458240 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.248890991199341e-05, |
|
"loss": 4.1545, |
|
"step": 458752 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.248052396448289e-05, |
|
"loss": 4.165, |
|
"step": 459264 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.24721543957761e-05, |
|
"loss": 4.1579, |
|
"step": 459776 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.246376844826558e-05, |
|
"loss": 4.1607, |
|
"step": 460288 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.245538250075506e-05, |
|
"loss": 4.1528, |
|
"step": 460800 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.244699655324455e-05, |
|
"loss": 4.1464, |
|
"step": 461312 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.243861060573403e-05, |
|
"loss": 4.1466, |
|
"step": 461824 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.243022465822351e-05, |
|
"loss": 4.1561, |
|
"step": 462336 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.242183871071299e-05, |
|
"loss": 4.1603, |
|
"step": 462848 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.241345276320247e-05, |
|
"loss": 4.1534, |
|
"step": 463360 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.240508319449568e-05, |
|
"loss": 4.1592, |
|
"step": 463872 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.2396713625788886e-05, |
|
"loss": 4.1449, |
|
"step": 464384 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.2388327678278366e-05, |
|
"loss": 4.1509, |
|
"step": 464896 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.2379941730767846e-05, |
|
"loss": 4.1495, |
|
"step": 465408 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.2371555783257326e-05, |
|
"loss": 4.1373, |
|
"step": 465920 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.2363169835746806e-05, |
|
"loss": 4.1524, |
|
"step": 466432 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.2354783888236286e-05, |
|
"loss": 4.1371, |
|
"step": 466944 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.2346397940725765e-05, |
|
"loss": 4.1446, |
|
"step": 467456 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.233802837201898e-05, |
|
"loss": 4.1805, |
|
"step": 467968 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.232964242450846e-05, |
|
"loss": 4.1443, |
|
"step": 468480 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.232125647699794e-05, |
|
"loss": 4.1508, |
|
"step": 468992 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.231287052948742e-05, |
|
"loss": 4.1525, |
|
"step": 469504 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.23044845819769e-05, |
|
"loss": 4.1598, |
|
"step": 470016 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.2296098634466374e-05, |
|
"loss": 4.1364, |
|
"step": 470528 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.2287712686955854e-05, |
|
"loss": 4.1449, |
|
"step": 471040 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.2279326739445334e-05, |
|
"loss": 4.1476, |
|
"step": 471552 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.2270940791934814e-05, |
|
"loss": 4.139, |
|
"step": 472064 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.2262554844424294e-05, |
|
"loss": 4.1447, |
|
"step": 472576 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.2254168896913774e-05, |
|
"loss": 4.1379, |
|
"step": 473088 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.2245782949403254e-05, |
|
"loss": 4.1455, |
|
"step": 473600 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.223741338069647e-05, |
|
"loss": 4.1518, |
|
"step": 474112 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.222902743318595e-05, |
|
"loss": 4.1495, |
|
"step": 474624 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.222064148567543e-05, |
|
"loss": 4.1425, |
|
"step": 475136 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.221227191696864e-05, |
|
"loss": 4.1482, |
|
"step": 475648 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.220388596945812e-05, |
|
"loss": 4.154, |
|
"step": 476160 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.219551640075133e-05, |
|
"loss": 4.1384, |
|
"step": 476672 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.218713045324081e-05, |
|
"loss": 4.1565, |
|
"step": 477184 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.217874450573029e-05, |
|
"loss": 4.1297, |
|
"step": 477696 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.217035855821977e-05, |
|
"loss": 4.151, |
|
"step": 478208 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.216197261070925e-05, |
|
"loss": 4.1278, |
|
"step": 478720 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.215358666319873e-05, |
|
"loss": 4.1583, |
|
"step": 479232 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.214520071568821e-05, |
|
"loss": 4.1427, |
|
"step": 479744 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.213681476817769e-05, |
|
"loss": 4.1473, |
|
"step": 480256 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.212842882066717e-05, |
|
"loss": 4.1445, |
|
"step": 480768 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.212004287315665e-05, |
|
"loss": 4.1453, |
|
"step": 481280 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.2111656925646134e-05, |
|
"loss": 4.1478, |
|
"step": 481792 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.2103270978135614e-05, |
|
"loss": 4.1384, |
|
"step": 482304 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.2094901409428823e-05, |
|
"loss": 4.1214, |
|
"step": 482816 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.20865154619183e-05, |
|
"loss": 4.1566, |
|
"step": 483328 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.207812951440778e-05, |
|
"loss": 4.1413, |
|
"step": 483840 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.206974356689726e-05, |
|
"loss": 4.1519, |
|
"step": 484352 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.206137399819047e-05, |
|
"loss": 4.1315, |
|
"step": 484864 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.205298805067995e-05, |
|
"loss": 4.1403, |
|
"step": 485376 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.204460210316943e-05, |
|
"loss": 4.1316, |
|
"step": 485888 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.203621615565891e-05, |
|
"loss": 4.1465, |
|
"step": 486400 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.2027830208148385e-05, |
|
"loss": 4.1407, |
|
"step": 486912 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.201944426063787e-05, |
|
"loss": 4.1492, |
|
"step": 487424 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.201107469193109e-05, |
|
"loss": 4.1465, |
|
"step": 487936 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.200268874442057e-05, |
|
"loss": 4.1293, |
|
"step": 488448 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.199430279691004e-05, |
|
"loss": 4.1339, |
|
"step": 488960 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.198591684939952e-05, |
|
"loss": 4.1505, |
|
"step": 489472 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.1977530901889e-05, |
|
"loss": 4.1258, |
|
"step": 489984 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.196914495437848e-05, |
|
"loss": 4.1333, |
|
"step": 490496 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.196075900686796e-05, |
|
"loss": 4.1373, |
|
"step": 491008 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.195237305935744e-05, |
|
"loss": 4.1463, |
|
"step": 491520 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.1944019869454386e-05, |
|
"loss": 4.1203, |
|
"step": 492032 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.193563392194386e-05, |
|
"loss": 4.1284, |
|
"step": 492544 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.192724797443334e-05, |
|
"loss": 4.1277, |
|
"step": 493056 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.1918862026922826e-05, |
|
"loss": 4.1361, |
|
"step": 493568 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.1910476079412306e-05, |
|
"loss": 4.1413, |
|
"step": 494080 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.1902090131901786e-05, |
|
"loss": 4.1373, |
|
"step": 494592 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.1893704184391266e-05, |
|
"loss": 4.137, |
|
"step": 495104 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.1885318236880746e-05, |
|
"loss": 4.1302, |
|
"step": 495616 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.1876948668173955e-05, |
|
"loss": 4.1539, |
|
"step": 496128 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.1868579099467164e-05, |
|
"loss": 4.1225, |
|
"step": 496640 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.1860193151956644e-05, |
|
"loss": 4.1372, |
|
"step": 497152 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.185182358324986e-05, |
|
"loss": 4.1379, |
|
"step": 497664 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.184343763573933e-05, |
|
"loss": 4.1331, |
|
"step": 498176 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.183505168822881e-05, |
|
"loss": 4.1388, |
|
"step": 498688 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.182666574071829e-05, |
|
"loss": 4.1354, |
|
"step": 499200 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.181827979320778e-05, |
|
"loss": 4.1299, |
|
"step": 499712 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.180989384569726e-05, |
|
"loss": 4.1427, |
|
"step": 500224 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.180150789818674e-05, |
|
"loss": 4.1229, |
|
"step": 500736 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.179312195067622e-05, |
|
"loss": 4.1363, |
|
"step": 501248 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.17847360031657e-05, |
|
"loss": 4.13, |
|
"step": 501760 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.177635005565518e-05, |
|
"loss": 4.1402, |
|
"step": 502272 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.176796410814466e-05, |
|
"loss": 4.1274, |
|
"step": 502784 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.175957816063414e-05, |
|
"loss": 4.1228, |
|
"step": 503296 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.175120859192735e-05, |
|
"loss": 4.1294, |
|
"step": 503808 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.174282264441683e-05, |
|
"loss": 4.1335, |
|
"step": 504320 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.173445307571004e-05, |
|
"loss": 4.1331, |
|
"step": 504832 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.172606712819952e-05, |
|
"loss": 4.118, |
|
"step": 505344 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.1717681180689e-05, |
|
"loss": 4.1332, |
|
"step": 505856 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.170929523317848e-05, |
|
"loss": 4.1402, |
|
"step": 506368 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.1700909285667964e-05, |
|
"loss": 4.1302, |
|
"step": 506880 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.1692523338157444e-05, |
|
"loss": 4.1142, |
|
"step": 507392 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.168415376945065e-05, |
|
"loss": 4.1265, |
|
"step": 507904 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.167576782194013e-05, |
|
"loss": 4.1243, |
|
"step": 508416 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.166738187442961e-05, |
|
"loss": 4.1358, |
|
"step": 508928 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.165899592691909e-05, |
|
"loss": 4.135, |
|
"step": 509440 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.165060997940857e-05, |
|
"loss": 4.1215, |
|
"step": 509952 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.1642224031898046e-05, |
|
"loss": 4.1212, |
|
"step": 510464 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.1633838084387526e-05, |
|
"loss": 4.1325, |
|
"step": 510976 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.1625452136877006e-05, |
|
"loss": 4.1102, |
|
"step": 511488 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.161708256817022e-05, |
|
"loss": 4.1274, |
|
"step": 512000 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.160871299946343e-05, |
|
"loss": 4.1182, |
|
"step": 512512 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.160032705195292e-05, |
|
"loss": 4.1349, |
|
"step": 513024 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.159195748324613e-05, |
|
"loss": 4.1094, |
|
"step": 513536 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.158357153573561e-05, |
|
"loss": 4.1213, |
|
"step": 514048 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.1575185588225087e-05, |
|
"loss": 4.1201, |
|
"step": 514560 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.1566799640714567e-05, |
|
"loss": 4.1267, |
|
"step": 515072 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.1558413693204047e-05, |
|
"loss": 4.1193, |
|
"step": 515584 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.155002774569352e-05, |
|
"loss": 4.1147, |
|
"step": 516096 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.1541641798183e-05, |
|
"loss": 4.1271, |
|
"step": 516608 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.153325585067248e-05, |
|
"loss": 4.129, |
|
"step": 517120 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.1524902660769425e-05, |
|
"loss": 4.1155, |
|
"step": 517632 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.1516516713258905e-05, |
|
"loss": 4.1322, |
|
"step": 518144 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.1508130765748385e-05, |
|
"loss": 4.1158, |
|
"step": 518656 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.1499744818237864e-05, |
|
"loss": 4.1247, |
|
"step": 519168 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.1491358870727344e-05, |
|
"loss": 4.1181, |
|
"step": 519680 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.1482972923216824e-05, |
|
"loss": 4.1256, |
|
"step": 520192 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.1474586975706304e-05, |
|
"loss": 4.1143, |
|
"step": 520704 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.1466201028195784e-05, |
|
"loss": 4.1229, |
|
"step": 521216 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.1457831459488993e-05, |
|
"loss": 4.1252, |
|
"step": 521728 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.144944551197847e-05, |
|
"loss": 4.1215, |
|
"step": 522240 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.144105956446795e-05, |
|
"loss": 4.1129, |
|
"step": 522752 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.143267361695743e-05, |
|
"loss": 4.1227, |
|
"step": 523264 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.142428766944691e-05, |
|
"loss": 4.1039, |
|
"step": 523776 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.141591810074012e-05, |
|
"loss": 4.1336, |
|
"step": 524288 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.14075321532296e-05, |
|
"loss": 4.1194, |
|
"step": 524800 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.139914620571909e-05, |
|
"loss": 4.1225, |
|
"step": 525312 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.139076025820857e-05, |
|
"loss": 4.1109, |
|
"step": 525824 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.138237431069805e-05, |
|
"loss": 4.1321, |
|
"step": 526336 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.137398836318753e-05, |
|
"loss": 4.1228, |
|
"step": 526848 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.136560241567701e-05, |
|
"loss": 4.1169, |
|
"step": 527360 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.135721646816649e-05, |
|
"loss": 4.1162, |
|
"step": 527872 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.134883052065597e-05, |
|
"loss": 4.1236, |
|
"step": 528384 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.134046095194918e-05, |
|
"loss": 4.1225, |
|
"step": 528896 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.133207500443866e-05, |
|
"loss": 4.1327, |
|
"step": 529408 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.132370543573187e-05, |
|
"loss": 4.1032, |
|
"step": 529920 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.131531948822135e-05, |
|
"loss": 4.1238, |
|
"step": 530432 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.130693354071083e-05, |
|
"loss": 4.1139, |
|
"step": 530944 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.129854759320031e-05, |
|
"loss": 4.1251, |
|
"step": 531456 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.129016164568979e-05, |
|
"loss": 4.1168, |
|
"step": 531968 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.1281775698179273e-05, |
|
"loss": 4.1111, |
|
"step": 532480 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.1273389750668753e-05, |
|
"loss": 4.1251, |
|
"step": 532992 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.126500380315823e-05, |
|
"loss": 4.1208, |
|
"step": 533504 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.125663423445144e-05, |
|
"loss": 4.121, |
|
"step": 534016 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"eval_loss": 4.180113792419434, |
|
"eval_runtime": 298.6947, |
|
"eval_samples_per_second": 1277.529, |
|
"eval_steps_per_second": 39.924, |
|
"step": 534240 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.124824828694092e-05, |
|
"loss": 4.1185, |
|
"step": 534528 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.12398623394304e-05, |
|
"loss": 4.1143, |
|
"step": 535040 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.123147639191988e-05, |
|
"loss": 4.1244, |
|
"step": 535552 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1223090444409356e-05, |
|
"loss": 4.1223, |
|
"step": 536064 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1214704496898835e-05, |
|
"loss": 4.1245, |
|
"step": 536576 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1206318549388315e-05, |
|
"loss": 4.1102, |
|
"step": 537088 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1197932601877795e-05, |
|
"loss": 4.1073, |
|
"step": 537600 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1189546654367275e-05, |
|
"loss": 4.1069, |
|
"step": 538112 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1181160706856755e-05, |
|
"loss": 4.1193, |
|
"step": 538624 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.117277475934624e-05, |
|
"loss": 4.1254, |
|
"step": 539136 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.116438881183572e-05, |
|
"loss": 4.1096, |
|
"step": 539648 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.11560028643252e-05, |
|
"loss": 4.1215, |
|
"step": 540160 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.114763329561841e-05, |
|
"loss": 4.1129, |
|
"step": 540672 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.113924734810789e-05, |
|
"loss": 4.1112, |
|
"step": 541184 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.113086140059737e-05, |
|
"loss": 4.1087, |
|
"step": 541696 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.112247545308685e-05, |
|
"loss": 4.1035, |
|
"step": 542208 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.111408950557633e-05, |
|
"loss": 4.1117, |
|
"step": 542720 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.110570355806581e-05, |
|
"loss": 4.1045, |
|
"step": 543232 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.109731761055529e-05, |
|
"loss": 4.1035, |
|
"step": 543744 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.108893166304477e-05, |
|
"loss": 4.1378, |
|
"step": 544256 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1080545715534244e-05, |
|
"loss": 4.1125, |
|
"step": 544768 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1072159768023724e-05, |
|
"loss": 4.1116, |
|
"step": 545280 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.106377382051321e-05, |
|
"loss": 4.1145, |
|
"step": 545792 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.105538787300269e-05, |
|
"loss": 4.1179, |
|
"step": 546304 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.10470183042959e-05, |
|
"loss": 4.1009, |
|
"step": 546816 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1038648735589115e-05, |
|
"loss": 4.1095, |
|
"step": 547328 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1030262788078595e-05, |
|
"loss": 4.1121, |
|
"step": 547840 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.102187684056807e-05, |
|
"loss": 4.1062, |
|
"step": 548352 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.101349089305755e-05, |
|
"loss": 4.1026, |
|
"step": 548864 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.100510494554703e-05, |
|
"loss": 4.1014, |
|
"step": 549376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.099671899803651e-05, |
|
"loss": 4.1088, |
|
"step": 549888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.098833305052599e-05, |
|
"loss": 4.1162, |
|
"step": 550400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.09799634818192e-05, |
|
"loss": 4.1119, |
|
"step": 550912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.097157753430868e-05, |
|
"loss": 4.1042, |
|
"step": 551424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.096319158679816e-05, |
|
"loss": 4.1115, |
|
"step": 551936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0954805639287644e-05, |
|
"loss": 4.1148, |
|
"step": 552448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0946419691777124e-05, |
|
"loss": 4.1074, |
|
"step": 552960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0938033744266604e-05, |
|
"loss": 4.1138, |
|
"step": 553472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0929647796756084e-05, |
|
"loss": 4.0968, |
|
"step": 553984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0921261849245564e-05, |
|
"loss": 4.1115, |
|
"step": 554496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.091289228053877e-05, |
|
"loss": 4.0931, |
|
"step": 555008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.090450633302825e-05, |
|
"loss": 4.117, |
|
"step": 555520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.089613676432146e-05, |
|
"loss": 4.1102, |
|
"step": 556032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.088775081681094e-05, |
|
"loss": 4.11, |
|
"step": 556544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.087936486930042e-05, |
|
"loss": 4.1115, |
|
"step": 557056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.08709789217899e-05, |
|
"loss": 4.1049, |
|
"step": 557568 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.086259297427938e-05, |
|
"loss": 4.1115, |
|
"step": 558080 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.08542234055726e-05, |
|
"loss": 4.1018, |
|
"step": 558592 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.084583745806208e-05, |
|
"loss": 4.0807, |
|
"step": 559104 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.083745151055156e-05, |
|
"loss": 4.1199, |
|
"step": 559616 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.082906556304104e-05, |
|
"loss": 4.1047, |
|
"step": 560128 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.082067961553052e-05, |
|
"loss": 4.1171, |
|
"step": 560640 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.081229366802e-05, |
|
"loss": 4.0986, |
|
"step": 561152 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.080390772050948e-05, |
|
"loss": 4.103, |
|
"step": 561664 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.079553815180269e-05, |
|
"loss": 4.0926, |
|
"step": 562176 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.078715220429217e-05, |
|
"loss": 4.112, |
|
"step": 562688 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0778766256781647e-05, |
|
"loss": 4.1037, |
|
"step": 563200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0770380309271127e-05, |
|
"loss": 4.1093, |
|
"step": 563712 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0761994361760607e-05, |
|
"loss": 4.1162, |
|
"step": 564224 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0753624793053816e-05, |
|
"loss": 4.0952, |
|
"step": 564736 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0745238845543296e-05, |
|
"loss": 4.0971, |
|
"step": 565248 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.073685289803278e-05, |
|
"loss": 4.1155, |
|
"step": 565760 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0728466950522255e-05, |
|
"loss": 4.092, |
|
"step": 566272 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0720081003011735e-05, |
|
"loss": 4.0938, |
|
"step": 566784 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0711695055501215e-05, |
|
"loss": 4.1037, |
|
"step": 567296 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.070332548679443e-05, |
|
"loss": 4.1082, |
|
"step": 567808 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0694939539283904e-05, |
|
"loss": 4.0886, |
|
"step": 568320 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0686553591773384e-05, |
|
"loss": 4.0937, |
|
"step": 568832 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0678167644262864e-05, |
|
"loss": 4.0875, |
|
"step": 569344 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0669781696752344e-05, |
|
"loss": 4.102, |
|
"step": 569856 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0661395749241824e-05, |
|
"loss": 4.1077, |
|
"step": 570368 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0653009801731304e-05, |
|
"loss": 4.1024, |
|
"step": 570880 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.064464023302452e-05, |
|
"loss": 4.0992, |
|
"step": 571392 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0636254285514e-05, |
|
"loss": 4.0997, |
|
"step": 571904 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.062786833800348e-05, |
|
"loss": 4.1174, |
|
"step": 572416 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.061948239049296e-05, |
|
"loss": 4.0883, |
|
"step": 572928 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.061111282178617e-05, |
|
"loss": 4.1054, |
|
"step": 573440 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.060272687427565e-05, |
|
"loss": 4.102, |
|
"step": 573952 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.059434092676513e-05, |
|
"loss": 4.0938, |
|
"step": 574464 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.058595497925461e-05, |
|
"loss": 4.1079, |
|
"step": 574976 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.057756903174409e-05, |
|
"loss": 4.0995, |
|
"step": 575488 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.056918308423357e-05, |
|
"loss": 4.0973, |
|
"step": 576000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.056079713672305e-05, |
|
"loss": 4.1071, |
|
"step": 576512 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.055241118921253e-05, |
|
"loss": 4.0877, |
|
"step": 577024 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.054402524170201e-05, |
|
"loss": 4.0999, |
|
"step": 577536 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.053563929419149e-05, |
|
"loss": 4.0958, |
|
"step": 578048 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.05272697254847e-05, |
|
"loss": 4.1054, |
|
"step": 578560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0518883777974184e-05, |
|
"loss": 4.0917, |
|
"step": 579072 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0510497830463664e-05, |
|
"loss": 4.0933, |
|
"step": 579584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0502111882953144e-05, |
|
"loss": 4.0891, |
|
"step": 580096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.049372593544262e-05, |
|
"loss": 4.101, |
|
"step": 580608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0485356366735833e-05, |
|
"loss": 4.0995, |
|
"step": 581120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0476970419225313e-05, |
|
"loss": 4.0831, |
|
"step": 581632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.046858447171479e-05, |
|
"loss": 4.1031, |
|
"step": 582144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0460198524204267e-05, |
|
"loss": 4.1041, |
|
"step": 582656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.045182895549748e-05, |
|
"loss": 4.0946, |
|
"step": 583168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.044344300798696e-05, |
|
"loss": 4.0792, |
|
"step": 583680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.043505706047644e-05, |
|
"loss": 4.0916, |
|
"step": 584192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.042667111296592e-05, |
|
"loss": 4.0889, |
|
"step": 584704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.04182851654554e-05, |
|
"loss": 4.104, |
|
"step": 585216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.040989921794488e-05, |
|
"loss": 4.1, |
|
"step": 585728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.040152964923809e-05, |
|
"loss": 4.0896, |
|
"step": 586240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.039314370172757e-05, |
|
"loss": 4.0896, |
|
"step": 586752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.038475775421705e-05, |
|
"loss": 4.0919, |
|
"step": 587264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.037637180670653e-05, |
|
"loss": 4.0836, |
|
"step": 587776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.036798585919601e-05, |
|
"loss": 4.0906, |
|
"step": 588288 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.035959991168549e-05, |
|
"loss": 4.0851, |
|
"step": 588800 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.035121396417497e-05, |
|
"loss": 4.1032, |
|
"step": 589312 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.034282801666445e-05, |
|
"loss": 4.0766, |
|
"step": 589824 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.033444206915393e-05, |
|
"loss": 4.0874, |
|
"step": 590336 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.032607250044714e-05, |
|
"loss": 4.0857, |
|
"step": 590848 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.031768655293662e-05, |
|
"loss": 4.0952, |
|
"step": 591360 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.030930060542611e-05, |
|
"loss": 4.0834, |
|
"step": 591872 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0300931036719316e-05, |
|
"loss": 4.0825, |
|
"step": 592384 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0292545089208796e-05, |
|
"loss": 4.0965, |
|
"step": 592896 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0284159141698276e-05, |
|
"loss": 4.0959, |
|
"step": 593408 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0275773194187756e-05, |
|
"loss": 4.0805, |
|
"step": 593920 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0267403625480965e-05, |
|
"loss": 4.0966, |
|
"step": 594432 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0259017677970445e-05, |
|
"loss": 4.0865, |
|
"step": 594944 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0250631730459925e-05, |
|
"loss": 4.0925, |
|
"step": 595456 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0242245782949405e-05, |
|
"loss": 4.0869, |
|
"step": 595968 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0233859835438885e-05, |
|
"loss": 4.0908, |
|
"step": 596480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0225473887928365e-05, |
|
"loss": 4.0837, |
|
"step": 596992 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0217087940417845e-05, |
|
"loss": 4.092, |
|
"step": 597504 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0208701992907324e-05, |
|
"loss": 4.0942, |
|
"step": 598016 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.020033242420054e-05, |
|
"loss": 4.0892, |
|
"step": 598528 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.019194647669002e-05, |
|
"loss": 4.0822, |
|
"step": 599040 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.018357690798323e-05, |
|
"loss": 4.0879, |
|
"step": 599552 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.017519096047271e-05, |
|
"loss": 4.072, |
|
"step": 600064 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.016680501296219e-05, |
|
"loss": 4.1044, |
|
"step": 600576 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.015841906545167e-05, |
|
"loss": 4.0813, |
|
"step": 601088 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.015003311794115e-05, |
|
"loss": 4.0914, |
|
"step": 601600 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.014164717043063e-05, |
|
"loss": 4.0778, |
|
"step": 602112 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.013327760172384e-05, |
|
"loss": 4.0983, |
|
"step": 602624 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.012489165421332e-05, |
|
"loss": 4.0923, |
|
"step": 603136 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.01165057067028e-05, |
|
"loss": 4.0841, |
|
"step": 603648 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.010811975919228e-05, |
|
"loss": 4.0878, |
|
"step": 604160 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.009973381168176e-05, |
|
"loss": 4.0869, |
|
"step": 604672 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.009134786417124e-05, |
|
"loss": 4.0895, |
|
"step": 605184 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.008296191666072e-05, |
|
"loss": 4.1051, |
|
"step": 605696 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.00745759691502e-05, |
|
"loss": 4.0674, |
|
"step": 606208 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.006620640044341e-05, |
|
"loss": 4.0965, |
|
"step": 606720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.005782045293289e-05, |
|
"loss": 4.0816, |
|
"step": 607232 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.004943450542237e-05, |
|
"loss": 4.0916, |
|
"step": 607744 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.004104855791185e-05, |
|
"loss": 4.0818, |
|
"step": 608256 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.003266261040133e-05, |
|
"loss": 4.085, |
|
"step": 608768 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0024293041694536e-05, |
|
"loss": 4.0915, |
|
"step": 609280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.001592347298775e-05, |
|
"loss": 4.089, |
|
"step": 609792 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.000753752547723e-05, |
|
"loss": 4.0874, |
|
"step": 610304 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.156889915466309, |
|
"eval_runtime": 280.1064, |
|
"eval_samples_per_second": 1362.307, |
|
"eval_steps_per_second": 42.573, |
|
"step": 610560 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.999915157796671e-05, |
|
"loss": 4.0867, |
|
"step": 610816 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.999076563045619e-05, |
|
"loss": 4.0824, |
|
"step": 611328 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.998237968294567e-05, |
|
"loss": 4.09, |
|
"step": 611840 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.997399373543515e-05, |
|
"loss": 4.0883, |
|
"step": 612352 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.996560778792463e-05, |
|
"loss": 4.0944, |
|
"step": 612864 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.995723821921784e-05, |
|
"loss": 4.0782, |
|
"step": 613376 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.994885227170732e-05, |
|
"loss": 4.0774, |
|
"step": 613888 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.99404663241968e-05, |
|
"loss": 4.0796, |
|
"step": 614400 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.993208037668628e-05, |
|
"loss": 4.085, |
|
"step": 614912 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.992369442917576e-05, |
|
"loss": 4.0928, |
|
"step": 615424 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.991532486046897e-05, |
|
"loss": 4.0797, |
|
"step": 615936 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.990693891295845e-05, |
|
"loss": 4.0871, |
|
"step": 616448 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.98985857230554e-05, |
|
"loss": 4.0847, |
|
"step": 616960 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.9890199775544875e-05, |
|
"loss": 4.0767, |
|
"step": 617472 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.9881813828034354e-05, |
|
"loss": 4.0804, |
|
"step": 617984 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.9873427880523834e-05, |
|
"loss": 4.0684, |
|
"step": 618496 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.9865041933013314e-05, |
|
"loss": 4.0813, |
|
"step": 619008 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.9856655985502794e-05, |
|
"loss": 4.0702, |
|
"step": 619520 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.9848270037992274e-05, |
|
"loss": 4.075, |
|
"step": 620032 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.9839884090481754e-05, |
|
"loss": 4.1024, |
|
"step": 620544 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.9831498142971234e-05, |
|
"loss": 4.0866, |
|
"step": 621056 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.9823112195460714e-05, |
|
"loss": 4.0801, |
|
"step": 621568 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.981474262675392e-05, |
|
"loss": 4.0817, |
|
"step": 622080 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.98063566792434e-05, |
|
"loss": 4.0892, |
|
"step": 622592 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.979798711053662e-05, |
|
"loss": 4.0646, |
|
"step": 623104 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.97896011630261e-05, |
|
"loss": 4.0817, |
|
"step": 623616 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.978121521551558e-05, |
|
"loss": 4.0831, |
|
"step": 624128 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.977282926800506e-05, |
|
"loss": 4.0708, |
|
"step": 624640 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.976444332049454e-05, |
|
"loss": 4.0706, |
|
"step": 625152 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.975605737298402e-05, |
|
"loss": 4.0727, |
|
"step": 625664 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.97476714254735e-05, |
|
"loss": 4.077, |
|
"step": 626176 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.973928547796298e-05, |
|
"loss": 4.0862, |
|
"step": 626688 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.973091590925619e-05, |
|
"loss": 4.0801, |
|
"step": 627200 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.972252996174567e-05, |
|
"loss": 4.0765, |
|
"step": 627712 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.971416039303888e-05, |
|
"loss": 4.0803, |
|
"step": 628224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.970577444552836e-05, |
|
"loss": 4.0818, |
|
"step": 628736 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.969738849801784e-05, |
|
"loss": 4.078, |
|
"step": 629248 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9689002550507324e-05, |
|
"loss": 4.0816, |
|
"step": 629760 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9680616602996804e-05, |
|
"loss": 4.0663, |
|
"step": 630272 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9672230655486283e-05, |
|
"loss": 4.0787, |
|
"step": 630784 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.966386108677949e-05, |
|
"loss": 4.0665, |
|
"step": 631296 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.965547513926897e-05, |
|
"loss": 4.0874, |
|
"step": 631808 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.964708919175845e-05, |
|
"loss": 4.0798, |
|
"step": 632320 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.963870324424793e-05, |
|
"loss": 4.0763, |
|
"step": 632832 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9630317296737406e-05, |
|
"loss": 4.0822, |
|
"step": 633344 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9621931349226886e-05, |
|
"loss": 4.0762, |
|
"step": 633856 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9613545401716366e-05, |
|
"loss": 4.0852, |
|
"step": 634368 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9605159454205845e-05, |
|
"loss": 4.072, |
|
"step": 634880 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.959678988549906e-05, |
|
"loss": 4.0464, |
|
"step": 635392 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.958840393798854e-05, |
|
"loss": 4.0882, |
|
"step": 635904 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.958001799047802e-05, |
|
"loss": 4.0749, |
|
"step": 636416 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.95716320429675e-05, |
|
"loss": 4.0882, |
|
"step": 636928 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.956324609545698e-05, |
|
"loss": 4.0702, |
|
"step": 637440 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.955486014794646e-05, |
|
"loss": 4.07, |
|
"step": 637952 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.954647420043594e-05, |
|
"loss": 4.0636, |
|
"step": 638464 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.953808825292542e-05, |
|
"loss": 4.0789, |
|
"step": 638976 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.952971868421863e-05, |
|
"loss": 4.0777, |
|
"step": 639488 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.952133273670811e-05, |
|
"loss": 4.0761, |
|
"step": 640000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.951294678919759e-05, |
|
"loss": 4.0884, |
|
"step": 640512 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.950456084168707e-05, |
|
"loss": 4.0679, |
|
"step": 641024 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.949617489417655e-05, |
|
"loss": 4.0639, |
|
"step": 641536 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.948780532546976e-05, |
|
"loss": 4.0855, |
|
"step": 642048 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9479419377959246e-05, |
|
"loss": 4.0616, |
|
"step": 642560 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9471033430448726e-05, |
|
"loss": 4.0664, |
|
"step": 643072 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9462647482938206e-05, |
|
"loss": 4.0709, |
|
"step": 643584 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9454261535427686e-05, |
|
"loss": 4.0811, |
|
"step": 644096 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9445875587917166e-05, |
|
"loss": 4.0586, |
|
"step": 644608 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9437489640406646e-05, |
|
"loss": 4.0656, |
|
"step": 645120 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9429103692896126e-05, |
|
"loss": 4.0592, |
|
"step": 645632 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9420734124189335e-05, |
|
"loss": 4.0705, |
|
"step": 646144 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9412364555482544e-05, |
|
"loss": 4.0794, |
|
"step": 646656 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9403978607972024e-05, |
|
"loss": 4.0708, |
|
"step": 647168 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9395592660461504e-05, |
|
"loss": 4.0715, |
|
"step": 647680 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9387206712950984e-05, |
|
"loss": 4.0703, |
|
"step": 648192 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9378820765440464e-05, |
|
"loss": 4.0925, |
|
"step": 648704 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9370434817929943e-05, |
|
"loss": 4.061, |
|
"step": 649216 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.936206524922316e-05, |
|
"loss": 4.0728, |
|
"step": 649728 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.935367930171264e-05, |
|
"loss": 4.0754, |
|
"step": 650240 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.934529335420212e-05, |
|
"loss": 4.0669, |
|
"step": 650752 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.933690740669159e-05, |
|
"loss": 4.0758, |
|
"step": 651264 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.932852145918107e-05, |
|
"loss": 4.0741, |
|
"step": 651776 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.932013551167055e-05, |
|
"loss": 4.0695, |
|
"step": 652288 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.931174956416003e-05, |
|
"loss": 4.0753, |
|
"step": 652800 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.930336361664951e-05, |
|
"loss": 4.0634, |
|
"step": 653312 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.929499404794272e-05, |
|
"loss": 4.0672, |
|
"step": 653824 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.92866081004322e-05, |
|
"loss": 4.0699, |
|
"step": 654336 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.927822215292168e-05, |
|
"loss": 4.0769, |
|
"step": 654848 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.926983620541116e-05, |
|
"loss": 4.0624, |
|
"step": 655360 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.926145025790065e-05, |
|
"loss": 4.0646, |
|
"step": 655872 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.925306431039013e-05, |
|
"loss": 4.0607, |
|
"step": 656384 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.924467836287961e-05, |
|
"loss": 4.0751, |
|
"step": 656896 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.923630879417282e-05, |
|
"loss": 4.0702, |
|
"step": 657408 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.92279228466623e-05, |
|
"loss": 4.0572, |
|
"step": 657920 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.921953689915178e-05, |
|
"loss": 4.0725, |
|
"step": 658432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.921115095164126e-05, |
|
"loss": 4.0758, |
|
"step": 658944 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9202781382934466e-05, |
|
"loss": 4.067, |
|
"step": 659456 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9194395435423946e-05, |
|
"loss": 4.0501, |
|
"step": 659968 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9186009487913426e-05, |
|
"loss": 4.0643, |
|
"step": 660480 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9177623540402906e-05, |
|
"loss": 4.0587, |
|
"step": 660992 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9169253971696115e-05, |
|
"loss": 4.0752, |
|
"step": 661504 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.91608680241856e-05, |
|
"loss": 4.0694, |
|
"step": 662016 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.915248207667508e-05, |
|
"loss": 4.0684, |
|
"step": 662528 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.914409612916456e-05, |
|
"loss": 4.0623, |
|
"step": 663040 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.913571018165404e-05, |
|
"loss": 4.0618, |
|
"step": 663552 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.912732423414352e-05, |
|
"loss": 4.0568, |
|
"step": 664064 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9118938286633e-05, |
|
"loss": 4.0614, |
|
"step": 664576 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.911056871792621e-05, |
|
"loss": 4.0563, |
|
"step": 665088 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.910218277041569e-05, |
|
"loss": 4.0739, |
|
"step": 665600 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.909379682290517e-05, |
|
"loss": 4.0503, |
|
"step": 666112 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.908541087539465e-05, |
|
"loss": 4.0542, |
|
"step": 666624 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.907702492788413e-05, |
|
"loss": 4.06, |
|
"step": 667136 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9068638980373604e-05, |
|
"loss": 4.0615, |
|
"step": 667648 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9060253032863083e-05, |
|
"loss": 4.061, |
|
"step": 668160 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.905186708535257e-05, |
|
"loss": 4.0555, |
|
"step": 668672 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.904348113784205e-05, |
|
"loss": 4.0664, |
|
"step": 669184 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.903511156913526e-05, |
|
"loss": 4.0717, |
|
"step": 669696 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9026742000428475e-05, |
|
"loss": 4.0522, |
|
"step": 670208 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9018372431721684e-05, |
|
"loss": 4.0652, |
|
"step": 670720 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9009986484211164e-05, |
|
"loss": 4.0594, |
|
"step": 671232 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9001600536700644e-05, |
|
"loss": 4.0657, |
|
"step": 671744 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.8993214589190124e-05, |
|
"loss": 4.0614, |
|
"step": 672256 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.8984828641679604e-05, |
|
"loss": 4.0574, |
|
"step": 672768 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.897644269416908e-05, |
|
"loss": 4.0567, |
|
"step": 673280 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.896805674665856e-05, |
|
"loss": 4.0634, |
|
"step": 673792 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.895967079914804e-05, |
|
"loss": 4.0664, |
|
"step": 674304 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.8951284851637524e-05, |
|
"loss": 4.0602, |
|
"step": 674816 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.8942898904127004e-05, |
|
"loss": 4.0548, |
|
"step": 675328 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.8934512956616484e-05, |
|
"loss": 4.0592, |
|
"step": 675840 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.892614338790969e-05, |
|
"loss": 4.046, |
|
"step": 676352 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.891775744039917e-05, |
|
"loss": 4.0747, |
|
"step": 676864 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.890937149288865e-05, |
|
"loss": 4.0533, |
|
"step": 677376 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.890098554537813e-05, |
|
"loss": 4.0683, |
|
"step": 677888 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.889259959786761e-05, |
|
"loss": 4.0483, |
|
"step": 678400 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.888421365035709e-05, |
|
"loss": 4.0715, |
|
"step": 678912 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.887582770284657e-05, |
|
"loss": 4.0648, |
|
"step": 679424 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.886744175533605e-05, |
|
"loss": 4.0574, |
|
"step": 679936 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.885907218662926e-05, |
|
"loss": 4.0599, |
|
"step": 680448 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.885068623911874e-05, |
|
"loss": 4.0606, |
|
"step": 680960 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.884230029160822e-05, |
|
"loss": 4.0635, |
|
"step": 681472 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.88339143440977e-05, |
|
"loss": 4.0753, |
|
"step": 681984 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.882554477539092e-05, |
|
"loss": 4.0431, |
|
"step": 682496 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.88171588278804e-05, |
|
"loss": 4.0695, |
|
"step": 683008 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.880877288036988e-05, |
|
"loss": 4.0504, |
|
"step": 683520 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.880038693285936e-05, |
|
"loss": 4.0653, |
|
"step": 684032 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.8792017364152566e-05, |
|
"loss": 4.0559, |
|
"step": 684544 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.8783647795445775e-05, |
|
"loss": 4.0553, |
|
"step": 685056 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.8775261847935255e-05, |
|
"loss": 4.0628, |
|
"step": 685568 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.8766875900424735e-05, |
|
"loss": 4.0648, |
|
"step": 686080 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.8758489952914215e-05, |
|
"loss": 4.0594, |
|
"step": 686592 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.136818885803223, |
|
"eval_runtime": 280.1538, |
|
"eval_samples_per_second": 1362.077, |
|
"eval_steps_per_second": 42.566, |
|
"step": 686880 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.8750104005403695e-05, |
|
"loss": 4.0586, |
|
"step": 687104 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.8741718057893175e-05, |
|
"loss": 4.0578, |
|
"step": 687616 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.8733332110382655e-05, |
|
"loss": 4.0641, |
|
"step": 688128 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.872494616287214e-05, |
|
"loss": 4.0591, |
|
"step": 688640 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.8716560215361615e-05, |
|
"loss": 4.065, |
|
"step": 689152 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.8708174267851095e-05, |
|
"loss": 4.0541, |
|
"step": 689664 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.8699788320340575e-05, |
|
"loss": 4.052, |
|
"step": 690176 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.8691402372830055e-05, |
|
"loss": 4.0535, |
|
"step": 690688 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.8683016425319535e-05, |
|
"loss": 4.057, |
|
"step": 691200 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.8674630477809015e-05, |
|
"loss": 4.0648, |
|
"step": 691712 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.8666244530298495e-05, |
|
"loss": 4.0568, |
|
"step": 692224 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.8657858582787975e-05, |
|
"loss": 4.0582, |
|
"step": 692736 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.8649489014081184e-05, |
|
"loss": 4.0609, |
|
"step": 693248 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.8641103066570664e-05, |
|
"loss": 4.0488, |
|
"step": 693760 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.8632717119060144e-05, |
|
"loss": 4.0543, |
|
"step": 694272 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.8624331171549624e-05, |
|
"loss": 4.044, |
|
"step": 694784 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.861594522403911e-05, |
|
"loss": 4.0507, |
|
"step": 695296 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.860755927652859e-05, |
|
"loss": 4.0471, |
|
"step": 695808 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.859917332901807e-05, |
|
"loss": 4.0495, |
|
"step": 696320 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.859078738150755e-05, |
|
"loss": 4.0716, |
|
"step": 696832 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.858240143399703e-05, |
|
"loss": 4.0617, |
|
"step": 697344 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.8574015486486503e-05, |
|
"loss": 4.0566, |
|
"step": 697856 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.8565629538975983e-05, |
|
"loss": 4.0551, |
|
"step": 698368 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.85572599702692e-05, |
|
"loss": 4.0642, |
|
"step": 698880 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.854887402275868e-05, |
|
"loss": 4.0394, |
|
"step": 699392 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.854048807524815e-05, |
|
"loss": 4.054, |
|
"step": 699904 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.853210212773763e-05, |
|
"loss": 4.0565, |
|
"step": 700416 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.852371618022711e-05, |
|
"loss": 4.0403, |
|
"step": 700928 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.851533023271659e-05, |
|
"loss": 4.0508, |
|
"step": 701440 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.850694428520608e-05, |
|
"loss": 4.0444, |
|
"step": 701952 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.849855833769556e-05, |
|
"loss": 4.0511, |
|
"step": 702464 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.849018876898877e-05, |
|
"loss": 4.0542, |
|
"step": 702976 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.848180282147825e-05, |
|
"loss": 4.0531, |
|
"step": 703488 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.847341687396773e-05, |
|
"loss": 4.0578, |
|
"step": 704000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.846503092645721e-05, |
|
"loss": 4.0479, |
|
"step": 704512 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.845664497894669e-05, |
|
"loss": 4.0564, |
|
"step": 705024 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.84482754102399e-05, |
|
"loss": 4.0595, |
|
"step": 705536 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.843988946272938e-05, |
|
"loss": 4.0481, |
|
"step": 706048 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.843150351521886e-05, |
|
"loss": 4.0419, |
|
"step": 706560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8423133946512066e-05, |
|
"loss": 4.0529, |
|
"step": 707072 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8414747999001546e-05, |
|
"loss": 4.042, |
|
"step": 707584 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.840636205149103e-05, |
|
"loss": 4.0623, |
|
"step": 708096 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.839797610398051e-05, |
|
"loss": 4.0553, |
|
"step": 708608 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.838959015646999e-05, |
|
"loss": 4.0486, |
|
"step": 709120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.838120420895947e-05, |
|
"loss": 4.0564, |
|
"step": 709632 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.837281826144895e-05, |
|
"loss": 4.0483, |
|
"step": 710144 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.836443231393843e-05, |
|
"loss": 4.0574, |
|
"step": 710656 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.835604636642791e-05, |
|
"loss": 4.0535, |
|
"step": 711168 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.834766041891739e-05, |
|
"loss": 4.0183, |
|
"step": 711680 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8339274471406866e-05, |
|
"loss": 4.0621, |
|
"step": 712192 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8330888523896346e-05, |
|
"loss": 4.0505, |
|
"step": 712704 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.832253533399329e-05, |
|
"loss": 4.0576, |
|
"step": 713216 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.831414938648277e-05, |
|
"loss": 4.0441, |
|
"step": 713728 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.830576343897225e-05, |
|
"loss": 4.0444, |
|
"step": 714240 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.829737749146173e-05, |
|
"loss": 4.0387, |
|
"step": 714752 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.828899154395121e-05, |
|
"loss": 4.053, |
|
"step": 715264 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.82806055964407e-05, |
|
"loss": 4.0516, |
|
"step": 715776 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.827221964893017e-05, |
|
"loss": 4.0481, |
|
"step": 716288 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.826383370141965e-05, |
|
"loss": 4.0669, |
|
"step": 716800 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8255464132712866e-05, |
|
"loss": 4.045, |
|
"step": 717312 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.824707818520234e-05, |
|
"loss": 4.0348, |
|
"step": 717824 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.823869223769182e-05, |
|
"loss": 4.0639, |
|
"step": 718336 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.82303062901813e-05, |
|
"loss": 4.0357, |
|
"step": 718848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8221936721474515e-05, |
|
"loss": 4.0335, |
|
"step": 719360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.821355077396399e-05, |
|
"loss": 4.0511, |
|
"step": 719872 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8205181205257204e-05, |
|
"loss": 4.0523, |
|
"step": 720384 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8196795257746684e-05, |
|
"loss": 4.0367, |
|
"step": 720896 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8188409310236164e-05, |
|
"loss": 4.0384, |
|
"step": 721408 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8180023362725644e-05, |
|
"loss": 4.0313, |
|
"step": 721920 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8171637415215124e-05, |
|
"loss": 4.0437, |
|
"step": 722432 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8163251467704604e-05, |
|
"loss": 4.0547, |
|
"step": 722944 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8154865520194084e-05, |
|
"loss": 4.0456, |
|
"step": 723456 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8146479572683564e-05, |
|
"loss": 4.0476, |
|
"step": 723968 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8138093625173044e-05, |
|
"loss": 4.0408, |
|
"step": 724480 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8129707677662524e-05, |
|
"loss": 4.0637, |
|
"step": 724992 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.812133810895573e-05, |
|
"loss": 4.0378, |
|
"step": 725504 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.811295216144521e-05, |
|
"loss": 4.0512, |
|
"step": 726016 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.810456621393469e-05, |
|
"loss": 4.0472, |
|
"step": 726528 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.809618026642417e-05, |
|
"loss": 4.0436, |
|
"step": 727040 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.808779431891365e-05, |
|
"loss": 4.049, |
|
"step": 727552 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.807940837140313e-05, |
|
"loss": 4.0528, |
|
"step": 728064 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.807102242389262e-05, |
|
"loss": 4.0422, |
|
"step": 728576 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.80626364763821e-05, |
|
"loss": 4.0528, |
|
"step": 729088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.805426690767531e-05, |
|
"loss": 4.0404, |
|
"step": 729600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.804588096016479e-05, |
|
"loss": 4.0421, |
|
"step": 730112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8037511391458e-05, |
|
"loss": 4.0447, |
|
"step": 730624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8029141822751207e-05, |
|
"loss": 4.0503, |
|
"step": 731136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8020772254044416e-05, |
|
"loss": 4.037, |
|
"step": 731648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8012386306533896e-05, |
|
"loss": 4.042, |
|
"step": 732160 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8004000359023376e-05, |
|
"loss": 4.0381, |
|
"step": 732672 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7995614411512856e-05, |
|
"loss": 4.0464, |
|
"step": 733184 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.798722846400234e-05, |
|
"loss": 4.0463, |
|
"step": 733696 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.797884251649182e-05, |
|
"loss": 4.0299, |
|
"step": 734208 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.79704565689813e-05, |
|
"loss": 4.0488, |
|
"step": 734720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.796207062147078e-05, |
|
"loss": 4.0462, |
|
"step": 735232 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.795368467396026e-05, |
|
"loss": 4.0488, |
|
"step": 735744 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.794529872644974e-05, |
|
"loss": 4.0256, |
|
"step": 736256 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.793691277893922e-05, |
|
"loss": 4.0369, |
|
"step": 736768 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.79285268314287e-05, |
|
"loss": 4.0351, |
|
"step": 737280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7920140883918175e-05, |
|
"loss": 4.0508, |
|
"step": 737792 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.791177131521139e-05, |
|
"loss": 4.0445, |
|
"step": 738304 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.790338536770087e-05, |
|
"loss": 4.0469, |
|
"step": 738816 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.789499942019035e-05, |
|
"loss": 4.0413, |
|
"step": 739328 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7886613472679824e-05, |
|
"loss": 4.0335, |
|
"step": 739840 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.787824390397304e-05, |
|
"loss": 4.031, |
|
"step": 740352 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.786985795646253e-05, |
|
"loss": 4.04, |
|
"step": 740864 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7861472008952e-05, |
|
"loss": 4.0323, |
|
"step": 741376 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.785308606144148e-05, |
|
"loss": 4.0428, |
|
"step": 741888 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.784470011393096e-05, |
|
"loss": 4.0316, |
|
"step": 742400 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.783631416642044e-05, |
|
"loss": 4.0281, |
|
"step": 742912 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.782794459771365e-05, |
|
"loss": 4.0351, |
|
"step": 743424 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.781955865020313e-05, |
|
"loss": 4.0396, |
|
"step": 743936 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.781117270269261e-05, |
|
"loss": 4.0354, |
|
"step": 744448 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.780278675518209e-05, |
|
"loss": 4.0301, |
|
"step": 744960 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.779440080767157e-05, |
|
"loss": 4.0453, |
|
"step": 745472 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.778601486016105e-05, |
|
"loss": 4.0465, |
|
"step": 745984 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7777645291454264e-05, |
|
"loss": 4.0313, |
|
"step": 746496 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7769259343943744e-05, |
|
"loss": 4.0395, |
|
"step": 747008 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7760873396433224e-05, |
|
"loss": 4.0389, |
|
"step": 747520 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7752487448922704e-05, |
|
"loss": 4.039, |
|
"step": 748032 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7744101501412184e-05, |
|
"loss": 4.0391, |
|
"step": 748544 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7735715553901664e-05, |
|
"loss": 4.0347, |
|
"step": 749056 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7727329606391144e-05, |
|
"loss": 4.0356, |
|
"step": 749568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7718943658880624e-05, |
|
"loss": 4.0359, |
|
"step": 750080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.771057409017383e-05, |
|
"loss": 4.0429, |
|
"step": 750592 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.770218814266331e-05, |
|
"loss": 4.0374, |
|
"step": 751104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.769381857395652e-05, |
|
"loss": 4.028, |
|
"step": 751616 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7685432626446e-05, |
|
"loss": 4.0377, |
|
"step": 752128 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.767704667893548e-05, |
|
"loss": 4.0213, |
|
"step": 752640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.766866073142496e-05, |
|
"loss": 4.051, |
|
"step": 753152 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.766027478391444e-05, |
|
"loss": 4.0302, |
|
"step": 753664 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.765188883640393e-05, |
|
"loss": 4.045, |
|
"step": 754176 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.764350288889341e-05, |
|
"loss": 4.0256, |
|
"step": 754688 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.763511694138289e-05, |
|
"loss": 4.0473, |
|
"step": 755200 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.76267473726761e-05, |
|
"loss": 4.0435, |
|
"step": 755712 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.761836142516558e-05, |
|
"loss": 4.0313, |
|
"step": 756224 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.760999185645879e-05, |
|
"loss": 4.0367, |
|
"step": 756736 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.760160590894827e-05, |
|
"loss": 4.0334, |
|
"step": 757248 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.759321996143775e-05, |
|
"loss": 4.0422, |
|
"step": 757760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.758483401392723e-05, |
|
"loss": 4.0478, |
|
"step": 758272 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.757644806641671e-05, |
|
"loss": 4.0222, |
|
"step": 758784 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7568078497709916e-05, |
|
"loss": 4.0406, |
|
"step": 759296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7559692550199396e-05, |
|
"loss": 4.0323, |
|
"step": 759808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.755130660268888e-05, |
|
"loss": 4.0426, |
|
"step": 760320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.754292065517836e-05, |
|
"loss": 4.0326, |
|
"step": 760832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7534534707667836e-05, |
|
"loss": 4.0366, |
|
"step": 761344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.752616513896105e-05, |
|
"loss": 4.0367, |
|
"step": 761856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.751777919145053e-05, |
|
"loss": 4.0402, |
|
"step": 762368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.750939324394001e-05, |
|
"loss": 4.0388, |
|
"step": 762880 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.1215620040893555, |
|
"eval_runtime": 294.8761, |
|
"eval_samples_per_second": 1294.072, |
|
"eval_steps_per_second": 40.441, |
|
"step": 763200 |
|
} |
|
], |
|
"logging_steps": 512, |
|
"max_steps": 3052726, |
|
"num_train_epochs": 9223372036854775807, |
|
"save_steps": 10, |
|
"total_flos": 2.939484666167586e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|