|
{ |
|
"best_metric": 4.044595241546631, |
|
"best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/binding-domain/lstm/3/checkpoints/checkpoint-915840", |
|
"epoch": 0.025000606015738065, |
|
"eval_steps": 10, |
|
"global_step": 915840, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999998362119627e-05, |
|
"loss": 10.8198, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999161405248948e-05, |
|
"loss": 7.5508, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.998322810497896e-05, |
|
"loss": 7.0546, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.997484215746844e-05, |
|
"loss": 6.9887, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.996645620995792e-05, |
|
"loss": 6.9426, |
|
"step": 2048 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.99580702624474e-05, |
|
"loss": 6.915, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.994968431493688e-05, |
|
"loss": 6.7347, |
|
"step": 3072 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.994129836742636e-05, |
|
"loss": 6.6264, |
|
"step": 3584 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.993291241991584e-05, |
|
"loss": 6.5281, |
|
"step": 4096 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.992452647240532e-05, |
|
"loss": 6.4559, |
|
"step": 4608 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.99161405248948e-05, |
|
"loss": 6.3935, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.990775457738428e-05, |
|
"loss": 6.3291, |
|
"step": 5632 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.989936862987376e-05, |
|
"loss": 6.2547, |
|
"step": 6144 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.989098268236324e-05, |
|
"loss": 6.1878, |
|
"step": 6656 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.988259673485272e-05, |
|
"loss": 6.1296, |
|
"step": 7168 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.98742107873422e-05, |
|
"loss": 6.0676, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.986582483983168e-05, |
|
"loss": 6.0186, |
|
"step": 8192 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.985743889232116e-05, |
|
"loss": 5.9742, |
|
"step": 8704 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.984905294481064e-05, |
|
"loss": 5.9331, |
|
"step": 9216 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.984066699730012e-05, |
|
"loss": 5.8941, |
|
"step": 9728 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.983229742859333e-05, |
|
"loss": 5.8576, |
|
"step": 10240 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.982391148108281e-05, |
|
"loss": 5.814, |
|
"step": 10752 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.981552553357229e-05, |
|
"loss": 5.7914, |
|
"step": 11264 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.980713958606178e-05, |
|
"loss": 5.7546, |
|
"step": 11776 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9798770017354986e-05, |
|
"loss": 5.7329, |
|
"step": 12288 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9790384069844466e-05, |
|
"loss": 5.7053, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9781998122333946e-05, |
|
"loss": 5.6822, |
|
"step": 13312 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9773612174823426e-05, |
|
"loss": 5.6519, |
|
"step": 13824 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.97652262273129e-05, |
|
"loss": 5.6275, |
|
"step": 14336 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9756856658606115e-05, |
|
"loss": 5.6097, |
|
"step": 14848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9748470711095595e-05, |
|
"loss": 5.5838, |
|
"step": 15360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.974008476358507e-05, |
|
"loss": 5.5794, |
|
"step": 15872 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.973169881607455e-05, |
|
"loss": 5.5486, |
|
"step": 16384 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.972331286856403e-05, |
|
"loss": 5.5386, |
|
"step": 16896 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9714943299857244e-05, |
|
"loss": 5.5314, |
|
"step": 17408 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9706557352346724e-05, |
|
"loss": 5.5107, |
|
"step": 17920 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9698171404836204e-05, |
|
"loss": 5.4979, |
|
"step": 18432 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9689785457325684e-05, |
|
"loss": 5.4634, |
|
"step": 18944 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9681399509815164e-05, |
|
"loss": 5.4573, |
|
"step": 19456 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.967302994110837e-05, |
|
"loss": 5.437, |
|
"step": 19968 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.966464399359785e-05, |
|
"loss": 5.4312, |
|
"step": 20480 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.965625804608733e-05, |
|
"loss": 5.4135, |
|
"step": 20992 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.964787209857681e-05, |
|
"loss": 5.4049, |
|
"step": 21504 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.963948615106629e-05, |
|
"loss": 5.3742, |
|
"step": 22016 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.963110020355577e-05, |
|
"loss": 5.3865, |
|
"step": 22528 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.962273063484898e-05, |
|
"loss": 5.3751, |
|
"step": 23040 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.961434468733847e-05, |
|
"loss": 5.3665, |
|
"step": 23552 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.960595873982795e-05, |
|
"loss": 5.3533, |
|
"step": 24064 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.959757279231743e-05, |
|
"loss": 5.3256, |
|
"step": 24576 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.958918684480691e-05, |
|
"loss": 5.3191, |
|
"step": 25088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.958081727610012e-05, |
|
"loss": 5.3207, |
|
"step": 25600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.95724313285896e-05, |
|
"loss": 5.3065, |
|
"step": 26112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.956406175988281e-05, |
|
"loss": 5.2952, |
|
"step": 26624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.955567581237229e-05, |
|
"loss": 5.2703, |
|
"step": 27136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.954728986486177e-05, |
|
"loss": 5.2708, |
|
"step": 27648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.953890391735125e-05, |
|
"loss": 5.2579, |
|
"step": 28160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9530517969840727e-05, |
|
"loss": 5.2837, |
|
"step": 28672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9522132022330207e-05, |
|
"loss": 5.2368, |
|
"step": 29184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9513746074819686e-05, |
|
"loss": 5.2413, |
|
"step": 29696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9505360127309166e-05, |
|
"loss": 5.2355, |
|
"step": 30208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9496974179798646e-05, |
|
"loss": 5.2152, |
|
"step": 30720 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.948858823228813e-05, |
|
"loss": 5.2171, |
|
"step": 31232 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.948021866358134e-05, |
|
"loss": 5.2008, |
|
"step": 31744 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.947183271607082e-05, |
|
"loss": 5.1848, |
|
"step": 32256 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.94634467685603e-05, |
|
"loss": 5.1828, |
|
"step": 32768 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.945506082104978e-05, |
|
"loss": 5.2, |
|
"step": 33280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9446674873539255e-05, |
|
"loss": 5.1739, |
|
"step": 33792 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9438288926028735e-05, |
|
"loss": 5.165, |
|
"step": 34304 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9429902978518215e-05, |
|
"loss": 5.1481, |
|
"step": 34816 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9421517031007695e-05, |
|
"loss": 5.1484, |
|
"step": 35328 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9413147462300904e-05, |
|
"loss": 5.1574, |
|
"step": 35840 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.940477789359412e-05, |
|
"loss": 5.1536, |
|
"step": 36352 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.93963919460836e-05, |
|
"loss": 5.1402, |
|
"step": 36864 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.938800599857309e-05, |
|
"loss": 5.1481, |
|
"step": 37376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.937962005106256e-05, |
|
"loss": 5.141, |
|
"step": 37888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.937123410355204e-05, |
|
"loss": 5.129, |
|
"step": 38400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9362864534845256e-05, |
|
"loss": 5.1102, |
|
"step": 38912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.935447858733473e-05, |
|
"loss": 5.0985, |
|
"step": 39424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.934609263982421e-05, |
|
"loss": 5.0972, |
|
"step": 39936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.933770669231369e-05, |
|
"loss": 5.0896, |
|
"step": 40448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9329337123606905e-05, |
|
"loss": 5.0857, |
|
"step": 40960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.932095117609638e-05, |
|
"loss": 5.0819, |
|
"step": 41472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.931256522858586e-05, |
|
"loss": 5.0842, |
|
"step": 41984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.930417928107534e-05, |
|
"loss": 5.0763, |
|
"step": 42496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9295809712368554e-05, |
|
"loss": 5.05, |
|
"step": 43008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9287423764858034e-05, |
|
"loss": 5.0449, |
|
"step": 43520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9279037817347514e-05, |
|
"loss": 5.0553, |
|
"step": 44032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9270651869836994e-05, |
|
"loss": 5.0557, |
|
"step": 44544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.92622823011302e-05, |
|
"loss": 5.0322, |
|
"step": 45056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.925389635361968e-05, |
|
"loss": 5.0348, |
|
"step": 45568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.924551040610916e-05, |
|
"loss": 5.0363, |
|
"step": 46080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.923712445859864e-05, |
|
"loss": 5.0208, |
|
"step": 46592 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.922875488989185e-05, |
|
"loss": 5.0149, |
|
"step": 47104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.922036894238133e-05, |
|
"loss": 5.0145, |
|
"step": 47616 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.921199937367455e-05, |
|
"loss": 5.0026, |
|
"step": 48128 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.920361342616403e-05, |
|
"loss": 5.0076, |
|
"step": 48640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.919522747865351e-05, |
|
"loss": 4.9971, |
|
"step": 49152 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.918684153114299e-05, |
|
"loss": 4.9818, |
|
"step": 49664 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.917845558363247e-05, |
|
"loss": 4.9816, |
|
"step": 50176 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.917006963612195e-05, |
|
"loss": 4.981, |
|
"step": 50688 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.916168368861143e-05, |
|
"loss": 4.9758, |
|
"step": 51200 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.915329774110091e-05, |
|
"loss": 4.9761, |
|
"step": 51712 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.914491179359039e-05, |
|
"loss": 4.9668, |
|
"step": 52224 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.913652584607987e-05, |
|
"loss": 4.9643, |
|
"step": 52736 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9128156277373076e-05, |
|
"loss": 4.9535, |
|
"step": 53248 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9119770329862556e-05, |
|
"loss": 4.9354, |
|
"step": 53760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9111384382352036e-05, |
|
"loss": 4.94, |
|
"step": 54272 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9102998434841516e-05, |
|
"loss": 4.9388, |
|
"step": 54784 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9094612487330996e-05, |
|
"loss": 4.9354, |
|
"step": 55296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.908624291862421e-05, |
|
"loss": 4.9258, |
|
"step": 55808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.907785697111369e-05, |
|
"loss": 4.9232, |
|
"step": 56320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.906947102360317e-05, |
|
"loss": 4.9143, |
|
"step": 56832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.906108507609265e-05, |
|
"loss": 4.9155, |
|
"step": 57344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.905269912858213e-05, |
|
"loss": 4.9094, |
|
"step": 57856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.904431318107161e-05, |
|
"loss": 4.9097, |
|
"step": 58368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.903592723356109e-05, |
|
"loss": 4.9011, |
|
"step": 58880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9027541286050565e-05, |
|
"loss": 4.9005, |
|
"step": 59392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.901917171734378e-05, |
|
"loss": 4.901, |
|
"step": 59904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.901078576983326e-05, |
|
"loss": 4.8919, |
|
"step": 60416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.900239982232274e-05, |
|
"loss": 4.8984, |
|
"step": 60928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.899403025361595e-05, |
|
"loss": 4.8966, |
|
"step": 61440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.898564430610543e-05, |
|
"loss": 4.8811, |
|
"step": 61952 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.897725835859491e-05, |
|
"loss": 4.8873, |
|
"step": 62464 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8968872411084396e-05, |
|
"loss": 4.8701, |
|
"step": 62976 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8960502842377605e-05, |
|
"loss": 4.875, |
|
"step": 63488 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8952133273670815e-05, |
|
"loss": 4.8508, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8943747326160294e-05, |
|
"loss": 4.8636, |
|
"step": 64512 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8935361378649774e-05, |
|
"loss": 4.849, |
|
"step": 65024 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8926975431139254e-05, |
|
"loss": 4.8519, |
|
"step": 65536 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8918589483628734e-05, |
|
"loss": 4.8331, |
|
"step": 66048 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8910203536118214e-05, |
|
"loss": 4.8643, |
|
"step": 66560 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.890181758860769e-05, |
|
"loss": 4.8515, |
|
"step": 67072 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.889343164109717e-05, |
|
"loss": 4.8391, |
|
"step": 67584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.888506207239038e-05, |
|
"loss": 4.8366, |
|
"step": 68096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.887667612487986e-05, |
|
"loss": 4.8324, |
|
"step": 68608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.886830655617308e-05, |
|
"loss": 4.8419, |
|
"step": 69120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.885992060866256e-05, |
|
"loss": 4.8351, |
|
"step": 69632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.885153466115204e-05, |
|
"loss": 4.8236, |
|
"step": 70144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.884314871364151e-05, |
|
"loss": 4.8287, |
|
"step": 70656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.883476276613099e-05, |
|
"loss": 4.8155, |
|
"step": 71168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.882637681862047e-05, |
|
"loss": 4.8247, |
|
"step": 71680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.881799087110995e-05, |
|
"loss": 4.808, |
|
"step": 72192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.880960492359943e-05, |
|
"loss": 4.8093, |
|
"step": 72704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.880125173369638e-05, |
|
"loss": 4.8077, |
|
"step": 73216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.879286578618586e-05, |
|
"loss": 4.797, |
|
"step": 73728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.878447983867534e-05, |
|
"loss": 4.798, |
|
"step": 74240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.877609389116482e-05, |
|
"loss": 4.7987, |
|
"step": 74752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.87677079436543e-05, |
|
"loss": 4.7928, |
|
"step": 75264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.875933837494751e-05, |
|
"loss": 4.7986, |
|
"step": 75776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8750952427436986e-05, |
|
"loss": 4.7862, |
|
"step": 76288 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.749021053314209, |
|
"eval_runtime": 316.6809, |
|
"eval_samples_per_second": 1204.97, |
|
"eval_steps_per_second": 37.656, |
|
"step": 76320 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8742566479926466e-05, |
|
"loss": 4.7758, |
|
"step": 76800 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8734180532415946e-05, |
|
"loss": 4.7701, |
|
"step": 77312 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8725794584905426e-05, |
|
"loss": 4.7875, |
|
"step": 77824 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8717425016198635e-05, |
|
"loss": 4.7705, |
|
"step": 78336 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8709039068688115e-05, |
|
"loss": 4.7862, |
|
"step": 78848 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8700653121177595e-05, |
|
"loss": 4.7582, |
|
"step": 79360 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8692267173667075e-05, |
|
"loss": 4.7692, |
|
"step": 79872 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8683881226156555e-05, |
|
"loss": 4.7475, |
|
"step": 80384 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.867551165744977e-05, |
|
"loss": 4.7603, |
|
"step": 80896 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.866712570993925e-05, |
|
"loss": 4.7568, |
|
"step": 81408 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.865873976242873e-05, |
|
"loss": 4.7611, |
|
"step": 81920 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.865035381491821e-05, |
|
"loss": 4.7563, |
|
"step": 82432 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.864198424621142e-05, |
|
"loss": 4.7388, |
|
"step": 82944 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.86335982987009e-05, |
|
"loss": 4.7396, |
|
"step": 83456 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.862521235119038e-05, |
|
"loss": 4.7337, |
|
"step": 83968 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.861682640367986e-05, |
|
"loss": 4.7303, |
|
"step": 84480 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.860844045616934e-05, |
|
"loss": 4.7409, |
|
"step": 84992 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.860005450865882e-05, |
|
"loss": 4.7344, |
|
"step": 85504 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.85916685611483e-05, |
|
"loss": 4.7301, |
|
"step": 86016 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.858328261363778e-05, |
|
"loss": 4.7454, |
|
"step": 86528 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.857489666612726e-05, |
|
"loss": 4.7206, |
|
"step": 87040 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8566527097420475e-05, |
|
"loss": 4.7289, |
|
"step": 87552 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8558141149909955e-05, |
|
"loss": 4.722, |
|
"step": 88064 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8549755202399435e-05, |
|
"loss": 4.7316, |
|
"step": 88576 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8541369254888915e-05, |
|
"loss": 4.7079, |
|
"step": 89088 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8532999686182124e-05, |
|
"loss": 4.7144, |
|
"step": 89600 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8524613738671604e-05, |
|
"loss": 4.706, |
|
"step": 90112 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8516227791161084e-05, |
|
"loss": 4.7147, |
|
"step": 90624 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8507841843650564e-05, |
|
"loss": 4.6943, |
|
"step": 91136 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8499455896140044e-05, |
|
"loss": 4.6995, |
|
"step": 91648 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.849108632743325e-05, |
|
"loss": 4.7082, |
|
"step": 92160 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.848270037992273e-05, |
|
"loss": 4.6993, |
|
"step": 92672 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.847431443241221e-05, |
|
"loss": 4.6997, |
|
"step": 93184 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.846592848490169e-05, |
|
"loss": 4.7053, |
|
"step": 93696 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.845755891619491e-05, |
|
"loss": 4.6986, |
|
"step": 94208 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.844917296868439e-05, |
|
"loss": 4.6983, |
|
"step": 94720 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.844078702117387e-05, |
|
"loss": 4.6799, |
|
"step": 95232 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.843240107366335e-05, |
|
"loss": 4.6899, |
|
"step": 95744 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.842401512615282e-05, |
|
"loss": 4.6733, |
|
"step": 96256 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.84156291786423e-05, |
|
"loss": 4.6829, |
|
"step": 96768 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.840725960993552e-05, |
|
"loss": 4.6803, |
|
"step": 97280 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8398873662425e-05, |
|
"loss": 4.6772, |
|
"step": 97792 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.839048771491447e-05, |
|
"loss": 4.6633, |
|
"step": 98304 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.838210176740395e-05, |
|
"loss": 4.6781, |
|
"step": 98816 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.837371581989343e-05, |
|
"loss": 4.6769, |
|
"step": 99328 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.836532987238292e-05, |
|
"loss": 4.6699, |
|
"step": 99840 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.83569439248724e-05, |
|
"loss": 4.6766, |
|
"step": 100352 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.834855797736188e-05, |
|
"loss": 4.6471, |
|
"step": 100864 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8340188408655086e-05, |
|
"loss": 4.6481, |
|
"step": 101376 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8331802461144566e-05, |
|
"loss": 4.667, |
|
"step": 101888 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8323416513634046e-05, |
|
"loss": 4.6551, |
|
"step": 102400 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8315030566123526e-05, |
|
"loss": 4.6527, |
|
"step": 102912 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8306644618613006e-05, |
|
"loss": 4.6356, |
|
"step": 103424 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8298291428709944e-05, |
|
"loss": 4.6404, |
|
"step": 103936 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8289905481199424e-05, |
|
"loss": 4.6352, |
|
"step": 104448 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.828153591249264e-05, |
|
"loss": 4.6609, |
|
"step": 104960 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.827314996498212e-05, |
|
"loss": 4.6298, |
|
"step": 105472 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.82647640174716e-05, |
|
"loss": 4.6401, |
|
"step": 105984 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.825637806996108e-05, |
|
"loss": 4.6436, |
|
"step": 106496 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.824799212245056e-05, |
|
"loss": 4.6251, |
|
"step": 107008 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.823960617494004e-05, |
|
"loss": 4.6304, |
|
"step": 107520 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.823122022742952e-05, |
|
"loss": 4.6242, |
|
"step": 108032 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8222834279919e-05, |
|
"loss": 4.6148, |
|
"step": 108544 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.821446471121221e-05, |
|
"loss": 4.6207, |
|
"step": 109056 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.820607876370169e-05, |
|
"loss": 4.6346, |
|
"step": 109568 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.819769281619117e-05, |
|
"loss": 4.623, |
|
"step": 110080 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.818930686868065e-05, |
|
"loss": 4.6109, |
|
"step": 110592 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.818092092117013e-05, |
|
"loss": 4.6113, |
|
"step": 111104 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.817253497365961e-05, |
|
"loss": 4.6102, |
|
"step": 111616 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.816416540495282e-05, |
|
"loss": 4.6266, |
|
"step": 112128 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8155779457442305e-05, |
|
"loss": 4.6284, |
|
"step": 112640 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8147393509931785e-05, |
|
"loss": 4.6172, |
|
"step": 113152 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8139007562421265e-05, |
|
"loss": 4.6309, |
|
"step": 113664 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8130621614910745e-05, |
|
"loss": 4.6272, |
|
"step": 114176 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8122252046203954e-05, |
|
"loss": 4.6228, |
|
"step": 114688 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.811388247749716e-05, |
|
"loss": 4.6077, |
|
"step": 115200 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.810549652998664e-05, |
|
"loss": 4.6066, |
|
"step": 115712 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.809711058247612e-05, |
|
"loss": 4.6022, |
|
"step": 116224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.80887246349656e-05, |
|
"loss": 4.6022, |
|
"step": 116736 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.808033868745508e-05, |
|
"loss": 4.5984, |
|
"step": 117248 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.807195273994456e-05, |
|
"loss": 4.6026, |
|
"step": 117760 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.806356679243404e-05, |
|
"loss": 4.6112, |
|
"step": 118272 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.805518084492352e-05, |
|
"loss": 4.601, |
|
"step": 118784 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8046794897413e-05, |
|
"loss": 4.5852, |
|
"step": 119296 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.803840894990248e-05, |
|
"loss": 4.5855, |
|
"step": 119808 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.80300393811957e-05, |
|
"loss": 4.5902, |
|
"step": 120320 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.802165343368518e-05, |
|
"loss": 4.6024, |
|
"step": 120832 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.801326748617466e-05, |
|
"loss": 4.5828, |
|
"step": 121344 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.800488153866413e-05, |
|
"loss": 4.5892, |
|
"step": 121856 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.799649559115361e-05, |
|
"loss": 4.5932, |
|
"step": 122368 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.798810964364309e-05, |
|
"loss": 4.5839, |
|
"step": 122880 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7979756453740036e-05, |
|
"loss": 4.5776, |
|
"step": 123392 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7971370506229516e-05, |
|
"loss": 4.5794, |
|
"step": 123904 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7962984558718996e-05, |
|
"loss": 4.5782, |
|
"step": 124416 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7954598611208476e-05, |
|
"loss": 4.5796, |
|
"step": 124928 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7946212663697956e-05, |
|
"loss": 4.5815, |
|
"step": 125440 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7937826716187436e-05, |
|
"loss": 4.5649, |
|
"step": 125952 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7929440768676916e-05, |
|
"loss": 4.5726, |
|
"step": 126464 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7921054821166396e-05, |
|
"loss": 4.568, |
|
"step": 126976 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7912668873655876e-05, |
|
"loss": 4.5768, |
|
"step": 127488 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7904299304949085e-05, |
|
"loss": 4.5703, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7895913357438565e-05, |
|
"loss": 4.571, |
|
"step": 128512 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7887527409928045e-05, |
|
"loss": 4.5673, |
|
"step": 129024 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7879141462417525e-05, |
|
"loss": 4.5631, |
|
"step": 129536 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7870755514907005e-05, |
|
"loss": 4.5479, |
|
"step": 130048 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7862369567396485e-05, |
|
"loss": 4.5519, |
|
"step": 130560 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7853983619885965e-05, |
|
"loss": 4.5569, |
|
"step": 131072 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7845597672375445e-05, |
|
"loss": 4.5538, |
|
"step": 131584 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.783722810366866e-05, |
|
"loss": 4.5502, |
|
"step": 132096 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.782885853496187e-05, |
|
"loss": 4.5474, |
|
"step": 132608 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.782047258745135e-05, |
|
"loss": 4.5426, |
|
"step": 133120 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.781208663994083e-05, |
|
"loss": 4.547, |
|
"step": 133632 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.780370069243031e-05, |
|
"loss": 4.5445, |
|
"step": 134144 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.779531474491979e-05, |
|
"loss": 4.5444, |
|
"step": 134656 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.778692879740927e-05, |
|
"loss": 4.5472, |
|
"step": 135168 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.777854284989875e-05, |
|
"loss": 4.5454, |
|
"step": 135680 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.777018965999569e-05, |
|
"loss": 4.547, |
|
"step": 136192 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.776180371248517e-05, |
|
"loss": 4.5417, |
|
"step": 136704 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.775341776497465e-05, |
|
"loss": 4.5503, |
|
"step": 137216 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7745031817464134e-05, |
|
"loss": 4.5494, |
|
"step": 137728 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7736645869953614e-05, |
|
"loss": 4.5365, |
|
"step": 138240 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7728259922443094e-05, |
|
"loss": 4.5471, |
|
"step": 138752 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7719873974932574e-05, |
|
"loss": 4.5342, |
|
"step": 139264 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7711488027422054e-05, |
|
"loss": 4.5418, |
|
"step": 139776 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.770311845871526e-05, |
|
"loss": 4.5176, |
|
"step": 140288 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.769473251120474e-05, |
|
"loss": 4.5354, |
|
"step": 140800 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.768634656369422e-05, |
|
"loss": 4.5167, |
|
"step": 141312 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.767797699498743e-05, |
|
"loss": 4.5251, |
|
"step": 141824 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.766959104747691e-05, |
|
"loss": 4.5117, |
|
"step": 142336 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.766120509996639e-05, |
|
"loss": 4.5418, |
|
"step": 142848 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.765281915245587e-05, |
|
"loss": 4.5325, |
|
"step": 143360 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.764443320494535e-05, |
|
"loss": 4.5257, |
|
"step": 143872 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.763604725743483e-05, |
|
"loss": 4.5186, |
|
"step": 144384 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.762766130992431e-05, |
|
"loss": 4.5225, |
|
"step": 144896 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.761927536241379e-05, |
|
"loss": 4.5318, |
|
"step": 145408 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.761088941490327e-05, |
|
"loss": 4.5252, |
|
"step": 145920 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.760251984619649e-05, |
|
"loss": 4.5188, |
|
"step": 146432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.759413389868596e-05, |
|
"loss": 4.5229, |
|
"step": 146944 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.758574795117544e-05, |
|
"loss": 4.5147, |
|
"step": 147456 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.757736200366492e-05, |
|
"loss": 4.5276, |
|
"step": 147968 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.75689760561544e-05, |
|
"loss": 4.5084, |
|
"step": 148480 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.756059010864388e-05, |
|
"loss": 4.5144, |
|
"step": 148992 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.755222053993709e-05, |
|
"loss": 4.5098, |
|
"step": 149504 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.754383459242657e-05, |
|
"loss": 4.5056, |
|
"step": 150016 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.753544864491605e-05, |
|
"loss": 4.5103, |
|
"step": 150528 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7527062697405536e-05, |
|
"loss": 4.5085, |
|
"step": 151040 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7518693128698745e-05, |
|
"loss": 4.5084, |
|
"step": 151552 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7510307181188225e-05, |
|
"loss": 4.5083, |
|
"step": 152064 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7501921233677705e-05, |
|
"loss": 4.5033, |
|
"step": 152576 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.471827507019043, |
|
"eval_runtime": 306.1033, |
|
"eval_samples_per_second": 1246.609, |
|
"eval_steps_per_second": 38.957, |
|
"step": 152640 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7493535286167185e-05, |
|
"loss": 4.4952, |
|
"step": 153088 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7485149338656665e-05, |
|
"loss": 4.4875, |
|
"step": 153600 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7476763391146145e-05, |
|
"loss": 4.5094, |
|
"step": 154112 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7468377443635625e-05, |
|
"loss": 4.4926, |
|
"step": 154624 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7459991496125105e-05, |
|
"loss": 4.5133, |
|
"step": 155136 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7451605548614585e-05, |
|
"loss": 4.4833, |
|
"step": 155648 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7443219601104065e-05, |
|
"loss": 4.4959, |
|
"step": 156160 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7434833653593545e-05, |
|
"loss": 4.4784, |
|
"step": 156672 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7426447706083025e-05, |
|
"loss": 4.4947, |
|
"step": 157184 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7418061758572505e-05, |
|
"loss": 4.4905, |
|
"step": 157696 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7409675811061985e-05, |
|
"loss": 4.4959, |
|
"step": 158208 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7401289863551465e-05, |
|
"loss": 4.4917, |
|
"step": 158720 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.739293667364841e-05, |
|
"loss": 4.4758, |
|
"step": 159232 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.738455072613789e-05, |
|
"loss": 4.4835, |
|
"step": 159744 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.737616477862737e-05, |
|
"loss": 4.4784, |
|
"step": 160256 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.736777883111685e-05, |
|
"loss": 4.4749, |
|
"step": 160768 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.735939288360632e-05, |
|
"loss": 4.4788, |
|
"step": 161280 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.73510069360958e-05, |
|
"loss": 4.4817, |
|
"step": 161792 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.734262098858528e-05, |
|
"loss": 4.4763, |
|
"step": 162304 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.733423504107476e-05, |
|
"loss": 4.4964, |
|
"step": 162816 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.732584909356424e-05, |
|
"loss": 4.4702, |
|
"step": 163328 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.731746314605372e-05, |
|
"loss": 4.4782, |
|
"step": 163840 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.73090771985432e-05, |
|
"loss": 4.4757, |
|
"step": 164352 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.730069125103269e-05, |
|
"loss": 4.4855, |
|
"step": 164864 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.729230530352217e-05, |
|
"loss": 4.4662, |
|
"step": 165376 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.728393573481538e-05, |
|
"loss": 4.4764, |
|
"step": 165888 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.727554978730486e-05, |
|
"loss": 4.465, |
|
"step": 166400 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.726716383979434e-05, |
|
"loss": 4.4723, |
|
"step": 166912 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.725877789228382e-05, |
|
"loss": 4.4574, |
|
"step": 167424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.72503919447733e-05, |
|
"loss": 4.4607, |
|
"step": 167936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.724202237606651e-05, |
|
"loss": 4.4694, |
|
"step": 168448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.723363642855599e-05, |
|
"loss": 4.4679, |
|
"step": 168960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.722525048104547e-05, |
|
"loss": 4.4676, |
|
"step": 169472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.721686453353495e-05, |
|
"loss": 4.4705, |
|
"step": 169984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.720847858602443e-05, |
|
"loss": 4.4645, |
|
"step": 170496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.720009263851391e-05, |
|
"loss": 4.4704, |
|
"step": 171008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.719172306980712e-05, |
|
"loss": 4.45, |
|
"step": 171520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.71833371222966e-05, |
|
"loss": 4.4659, |
|
"step": 172032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.717495117478608e-05, |
|
"loss": 4.4478, |
|
"step": 172544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.716656522727556e-05, |
|
"loss": 4.4584, |
|
"step": 173056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.715819565856877e-05, |
|
"loss": 4.4552, |
|
"step": 173568 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.714980971105825e-05, |
|
"loss": 4.4585, |
|
"step": 174080 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.714142376354773e-05, |
|
"loss": 4.4514, |
|
"step": 174592 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.713303781603721e-05, |
|
"loss": 4.4521, |
|
"step": 175104 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7124651868526685e-05, |
|
"loss": 4.4592, |
|
"step": 175616 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7116265921016165e-05, |
|
"loss": 4.4526, |
|
"step": 176128 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7107879973505645e-05, |
|
"loss": 4.4565, |
|
"step": 176640 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7099494025995125e-05, |
|
"loss": 4.4369, |
|
"step": 177152 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.709112445728834e-05, |
|
"loss": 4.4305, |
|
"step": 177664 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.708273850977782e-05, |
|
"loss": 4.4484, |
|
"step": 178176 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7074368941071037e-05, |
|
"loss": 4.4439, |
|
"step": 178688 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7065982993560517e-05, |
|
"loss": 4.4431, |
|
"step": 179200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.705759704604999e-05, |
|
"loss": 4.4257, |
|
"step": 179712 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.704921109853947e-05, |
|
"loss": 4.4315, |
|
"step": 180224 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.704082515102895e-05, |
|
"loss": 4.4328, |
|
"step": 180736 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.703243920351843e-05, |
|
"loss": 4.4454, |
|
"step": 181248 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.702405325600791e-05, |
|
"loss": 4.4235, |
|
"step": 181760 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.701566730849739e-05, |
|
"loss": 4.4355, |
|
"step": 182272 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.70072977397906e-05, |
|
"loss": 4.4382, |
|
"step": 182784 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6998928171083814e-05, |
|
"loss": 4.4242, |
|
"step": 183296 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6990542223573294e-05, |
|
"loss": 4.4292, |
|
"step": 183808 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6982156276062774e-05, |
|
"loss": 4.425, |
|
"step": 184320 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6973770328552254e-05, |
|
"loss": 4.4129, |
|
"step": 184832 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6965384381041734e-05, |
|
"loss": 4.427, |
|
"step": 185344 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6956998433531214e-05, |
|
"loss": 4.4293, |
|
"step": 185856 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6948612486020694e-05, |
|
"loss": 4.4326, |
|
"step": 186368 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6940226538510174e-05, |
|
"loss": 4.4101, |
|
"step": 186880 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6931840590999654e-05, |
|
"loss": 4.4181, |
|
"step": 187392 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6923454643489134e-05, |
|
"loss": 4.4141, |
|
"step": 187904 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6915068695978614e-05, |
|
"loss": 4.4404, |
|
"step": 188416 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.690669912727182e-05, |
|
"loss": 4.4269, |
|
"step": 188928 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.68983131797613e-05, |
|
"loss": 4.4277, |
|
"step": 189440 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.688992723225078e-05, |
|
"loss": 4.4389, |
|
"step": 189952 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.688154128474026e-05, |
|
"loss": 4.4385, |
|
"step": 190464 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.687315533722974e-05, |
|
"loss": 4.4315, |
|
"step": 190976 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.686476938971923e-05, |
|
"loss": 4.4199, |
|
"step": 191488 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.685639982101244e-05, |
|
"loss": 4.4203, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.684801387350192e-05, |
|
"loss": 4.4184, |
|
"step": 192512 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.68396279259914e-05, |
|
"loss": 4.4145, |
|
"step": 193024 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.683125835728461e-05, |
|
"loss": 4.4159, |
|
"step": 193536 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.682287240977409e-05, |
|
"loss": 4.4166, |
|
"step": 194048 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.681448646226357e-05, |
|
"loss": 4.4271, |
|
"step": 194560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.680610051475305e-05, |
|
"loss": 4.4157, |
|
"step": 195072 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.679771456724252e-05, |
|
"loss": 4.4069, |
|
"step": 195584 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6789328619732e-05, |
|
"loss": 4.4023, |
|
"step": 196096 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.678094267222148e-05, |
|
"loss": 4.4114, |
|
"step": 196608 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.677255672471097e-05, |
|
"loss": 4.419, |
|
"step": 197120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6764187156004177e-05, |
|
"loss": 4.4074, |
|
"step": 197632 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6755801208493657e-05, |
|
"loss": 4.4088, |
|
"step": 198144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6747415260983136e-05, |
|
"loss": 4.4181, |
|
"step": 198656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6739029313472616e-05, |
|
"loss": 4.3978, |
|
"step": 199168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6730659744765826e-05, |
|
"loss": 4.4055, |
|
"step": 199680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6722273797255305e-05, |
|
"loss": 4.4009, |
|
"step": 200192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6713887849744785e-05, |
|
"loss": 4.4085, |
|
"step": 200704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6705501902234265e-05, |
|
"loss": 4.407, |
|
"step": 201216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6697115954723745e-05, |
|
"loss": 4.4033, |
|
"step": 201728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6688730007213225e-05, |
|
"loss": 4.3946, |
|
"step": 202240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6680344059702705e-05, |
|
"loss": 4.403, |
|
"step": 202752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6671958112192185e-05, |
|
"loss": 4.394, |
|
"step": 203264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6663572164681665e-05, |
|
"loss": 4.4105, |
|
"step": 203776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.665520259597488e-05, |
|
"loss": 4.4018, |
|
"step": 204288 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.664681664846436e-05, |
|
"loss": 4.4014, |
|
"step": 204800 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.663843070095384e-05, |
|
"loss": 4.3934, |
|
"step": 205312 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.663006113224705e-05, |
|
"loss": 4.4029, |
|
"step": 205824 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.662167518473653e-05, |
|
"loss": 4.3793, |
|
"step": 206336 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.661328923722601e-05, |
|
"loss": 4.3864, |
|
"step": 206848 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.660490328971549e-05, |
|
"loss": 4.3896, |
|
"step": 207360 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.659651734220497e-05, |
|
"loss": 4.391, |
|
"step": 207872 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.658813139469445e-05, |
|
"loss": 4.3832, |
|
"step": 208384 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.657976182598766e-05, |
|
"loss": 4.3861, |
|
"step": 208896 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.657137587847714e-05, |
|
"loss": 4.379, |
|
"step": 209408 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.656298993096662e-05, |
|
"loss": 4.3864, |
|
"step": 209920 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.65546039834561e-05, |
|
"loss": 4.3823, |
|
"step": 210432 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6546218035945586e-05, |
|
"loss": 4.3795, |
|
"step": 210944 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6537832088435065e-05, |
|
"loss": 4.3902, |
|
"step": 211456 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6529462519728275e-05, |
|
"loss": 4.3886, |
|
"step": 211968 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6521076572217755e-05, |
|
"loss": 4.3905, |
|
"step": 212480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6512690624707234e-05, |
|
"loss": 4.38, |
|
"step": 212992 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.650430467719671e-05, |
|
"loss": 4.3912, |
|
"step": 213504 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.649591872968619e-05, |
|
"loss": 4.3882, |
|
"step": 214016 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.648753278217567e-05, |
|
"loss": 4.3797, |
|
"step": 214528 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.647914683466515e-05, |
|
"loss": 4.3973, |
|
"step": 215040 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.647077726595836e-05, |
|
"loss": 4.3752, |
|
"step": 215552 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6462391318447837e-05, |
|
"loss": 4.3864, |
|
"step": 216064 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.645400537093732e-05, |
|
"loss": 4.3654, |
|
"step": 216576 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.64456194234268e-05, |
|
"loss": 4.3817, |
|
"step": 217088 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.643723347591628e-05, |
|
"loss": 4.3617, |
|
"step": 217600 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.642888028601323e-05, |
|
"loss": 4.3735, |
|
"step": 218112 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.642049433850271e-05, |
|
"loss": 4.3608, |
|
"step": 218624 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.641210839099218e-05, |
|
"loss": 4.3883, |
|
"step": 219136 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.640372244348166e-05, |
|
"loss": 4.3774, |
|
"step": 219648 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.639533649597114e-05, |
|
"loss": 4.3763, |
|
"step": 220160 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.638695054846062e-05, |
|
"loss": 4.3689, |
|
"step": 220672 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.63785646009501e-05, |
|
"loss": 4.3716, |
|
"step": 221184 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.637017865343958e-05, |
|
"loss": 4.3871, |
|
"step": 221696 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.636179270592906e-05, |
|
"loss": 4.3754, |
|
"step": 222208 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.635342313722228e-05, |
|
"loss": 4.3689, |
|
"step": 222720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.634503718971176e-05, |
|
"loss": 4.3794, |
|
"step": 223232 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6336667621004966e-05, |
|
"loss": 4.3687, |
|
"step": 223744 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6328281673494446e-05, |
|
"loss": 4.3777, |
|
"step": 224256 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6319895725983926e-05, |
|
"loss": 4.3625, |
|
"step": 224768 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6311509778473406e-05, |
|
"loss": 4.3712, |
|
"step": 225280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6303123830962886e-05, |
|
"loss": 4.3653, |
|
"step": 225792 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6294737883452366e-05, |
|
"loss": 4.3657, |
|
"step": 226304 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6286351935941846e-05, |
|
"loss": 4.3629, |
|
"step": 226816 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6277965988431326e-05, |
|
"loss": 4.3689, |
|
"step": 227328 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6269580040920806e-05, |
|
"loss": 4.3594, |
|
"step": 227840 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6261194093410286e-05, |
|
"loss": 4.3657, |
|
"step": 228352 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6252808145899766e-05, |
|
"loss": 4.3621, |
|
"step": 228864 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.3374714851379395, |
|
"eval_runtime": 295.9677, |
|
"eval_samples_per_second": 1289.299, |
|
"eval_steps_per_second": 40.292, |
|
"step": 228960 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6244438577192975e-05, |
|
"loss": 4.3592, |
|
"step": 229376 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.623605262968246e-05, |
|
"loss": 4.3484, |
|
"step": 229888 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.622766668217194e-05, |
|
"loss": 4.3675, |
|
"step": 230400 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.621928073466142e-05, |
|
"loss": 4.3507, |
|
"step": 230912 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6210894787150895e-05, |
|
"loss": 4.3766, |
|
"step": 231424 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.620252521844411e-05, |
|
"loss": 4.3435, |
|
"step": 231936 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.619413927093359e-05, |
|
"loss": 4.356, |
|
"step": 232448 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.618575332342307e-05, |
|
"loss": 4.3399, |
|
"step": 232960 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6177367375912543e-05, |
|
"loss": 4.3572, |
|
"step": 233472 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.616899780720576e-05, |
|
"loss": 4.3558, |
|
"step": 233984 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.616061185969524e-05, |
|
"loss": 4.3544, |
|
"step": 234496 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.615224229098845e-05, |
|
"loss": 4.3563, |
|
"step": 235008 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.614385634347793e-05, |
|
"loss": 4.3384, |
|
"step": 235520 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6135470395967415e-05, |
|
"loss": 4.35, |
|
"step": 236032 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6127084448456895e-05, |
|
"loss": 4.3452, |
|
"step": 236544 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.611869850094637e-05, |
|
"loss": 4.3437, |
|
"step": 237056 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.611031255343585e-05, |
|
"loss": 4.3439, |
|
"step": 237568 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.610192660592533e-05, |
|
"loss": 4.3472, |
|
"step": 238080 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.609354065841481e-05, |
|
"loss": 4.3462, |
|
"step": 238592 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.608517108970802e-05, |
|
"loss": 4.3603, |
|
"step": 239104 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.60767851421975e-05, |
|
"loss": 4.3407, |
|
"step": 239616 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.606839919468698e-05, |
|
"loss": 4.3459, |
|
"step": 240128 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.606001324717646e-05, |
|
"loss": 4.3463, |
|
"step": 240640 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.605162729966594e-05, |
|
"loss": 4.3573, |
|
"step": 241152 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.604325773095915e-05, |
|
"loss": 4.336, |
|
"step": 241664 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.603487178344863e-05, |
|
"loss": 4.3477, |
|
"step": 242176 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.602648583593811e-05, |
|
"loss": 4.3364, |
|
"step": 242688 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.601809988842759e-05, |
|
"loss": 4.3402, |
|
"step": 243200 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.600971394091707e-05, |
|
"loss": 4.3278, |
|
"step": 243712 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.600134437221028e-05, |
|
"loss": 4.3392, |
|
"step": 244224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.599295842469976e-05, |
|
"loss": 4.3391, |
|
"step": 244736 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.598457247718924e-05, |
|
"loss": 4.3431, |
|
"step": 245248 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.597618652967872e-05, |
|
"loss": 4.3381, |
|
"step": 245760 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.59678005821682e-05, |
|
"loss": 4.3451, |
|
"step": 246272 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.595943101346141e-05, |
|
"loss": 4.3372, |
|
"step": 246784 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.595104506595089e-05, |
|
"loss": 4.3442, |
|
"step": 247296 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.59426754972441e-05, |
|
"loss": 4.3306, |
|
"step": 247808 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5934289549733587e-05, |
|
"loss": 4.3398, |
|
"step": 248320 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5925903602223066e-05, |
|
"loss": 4.3241, |
|
"step": 248832 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5917517654712546e-05, |
|
"loss": 4.3337, |
|
"step": 249344 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5909131707202026e-05, |
|
"loss": 4.3332, |
|
"step": 249856 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5900745759691506e-05, |
|
"loss": 4.3377, |
|
"step": 250368 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5892359812180986e-05, |
|
"loss": 4.3285, |
|
"step": 250880 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5883973864670466e-05, |
|
"loss": 4.331, |
|
"step": 251392 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5875587917159946e-05, |
|
"loss": 4.3354, |
|
"step": 251904 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5867201969649426e-05, |
|
"loss": 4.3323, |
|
"step": 252416 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5858832400942635e-05, |
|
"loss": 4.3343, |
|
"step": 252928 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5850446453432115e-05, |
|
"loss": 4.324, |
|
"step": 253440 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5842060505921595e-05, |
|
"loss": 4.3058, |
|
"step": 253952 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5833674558411075e-05, |
|
"loss": 4.3301, |
|
"step": 254464 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5825288610900555e-05, |
|
"loss": 4.3252, |
|
"step": 254976 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5816902663390035e-05, |
|
"loss": 4.3204, |
|
"step": 255488 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5808516715879515e-05, |
|
"loss": 4.3142, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.580014714717273e-05, |
|
"loss": 4.3126, |
|
"step": 256512 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5791761199662204e-05, |
|
"loss": 4.3116, |
|
"step": 257024 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5783375252151684e-05, |
|
"loss": 4.3282, |
|
"step": 257536 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5774989304641164e-05, |
|
"loss": 4.3098, |
|
"step": 258048 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.576661973593438e-05, |
|
"loss": 4.3165, |
|
"step": 258560 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.575823378842385e-05, |
|
"loss": 4.3256, |
|
"step": 259072 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.574986421971707e-05, |
|
"loss": 4.3089, |
|
"step": 259584 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.574147827220655e-05, |
|
"loss": 4.3108, |
|
"step": 260096 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.573309232469602e-05, |
|
"loss": 4.3159, |
|
"step": 260608 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.572470637718551e-05, |
|
"loss": 4.2949, |
|
"step": 261120 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5716336808478725e-05, |
|
"loss": 4.3117, |
|
"step": 261632 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5707950860968205e-05, |
|
"loss": 4.3172, |
|
"step": 262144 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.569956491345768e-05, |
|
"loss": 4.3168, |
|
"step": 262656 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.569117896594716e-05, |
|
"loss": 4.2996, |
|
"step": 263168 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.568279301843664e-05, |
|
"loss": 4.3072, |
|
"step": 263680 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.567440707092612e-05, |
|
"loss": 4.2995, |
|
"step": 264192 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.56660211234156e-05, |
|
"loss": 4.322, |
|
"step": 264704 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.565763517590508e-05, |
|
"loss": 4.3164, |
|
"step": 265216 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.564926560719829e-05, |
|
"loss": 4.3177, |
|
"step": 265728 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5640879659687767e-05, |
|
"loss": 4.3247, |
|
"step": 266240 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5632493712177247e-05, |
|
"loss": 4.3285, |
|
"step": 266752 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5624107764666726e-05, |
|
"loss": 4.3256, |
|
"step": 267264 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.561573819595994e-05, |
|
"loss": 4.303, |
|
"step": 267776 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.560735224844942e-05, |
|
"loss": 4.3117, |
|
"step": 268288 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.559898267974263e-05, |
|
"loss": 4.3138, |
|
"step": 268800 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.559059673223211e-05, |
|
"loss": 4.3031, |
|
"step": 269312 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.558221078472159e-05, |
|
"loss": 4.3123, |
|
"step": 269824 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.557382483721107e-05, |
|
"loss": 4.3045, |
|
"step": 270336 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.556543888970055e-05, |
|
"loss": 4.316, |
|
"step": 270848 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.555705294219003e-05, |
|
"loss": 4.3117, |
|
"step": 271360 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.554866699467951e-05, |
|
"loss": 4.2926, |
|
"step": 271872 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.554028104716899e-05, |
|
"loss": 4.2989, |
|
"step": 272384 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.553189509965847e-05, |
|
"loss": 4.3072, |
|
"step": 272896 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5523541909755416e-05, |
|
"loss": 4.3093, |
|
"step": 273408 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5515155962244896e-05, |
|
"loss": 4.3017, |
|
"step": 273920 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5506770014734376e-05, |
|
"loss": 4.3007, |
|
"step": 274432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5498384067223856e-05, |
|
"loss": 4.3099, |
|
"step": 274944 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5490014498517065e-05, |
|
"loss": 4.2952, |
|
"step": 275456 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5481644929810274e-05, |
|
"loss": 4.2969, |
|
"step": 275968 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5473258982299754e-05, |
|
"loss": 4.3009, |
|
"step": 276480 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5464873034789234e-05, |
|
"loss": 4.3064, |
|
"step": 276992 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5456487087278714e-05, |
|
"loss": 4.3043, |
|
"step": 277504 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5448101139768194e-05, |
|
"loss": 4.2979, |
|
"step": 278016 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5439715192257674e-05, |
|
"loss": 4.2866, |
|
"step": 278528 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5431329244747154e-05, |
|
"loss": 4.3033, |
|
"step": 279040 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5422943297236634e-05, |
|
"loss": 4.2891, |
|
"step": 279552 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5414557349726114e-05, |
|
"loss": 4.3097, |
|
"step": 280064 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.540618778101933e-05, |
|
"loss": 4.3005, |
|
"step": 280576 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.539780183350881e-05, |
|
"loss": 4.2976, |
|
"step": 281088 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.538941588599829e-05, |
|
"loss": 4.292, |
|
"step": 281600 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.538102993848777e-05, |
|
"loss": 4.3031, |
|
"step": 282112 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.537264399097725e-05, |
|
"loss": 4.2785, |
|
"step": 282624 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.536425804346673e-05, |
|
"loss": 4.2893, |
|
"step": 283136 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.535588847475994e-05, |
|
"loss": 4.2837, |
|
"step": 283648 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.534750252724942e-05, |
|
"loss": 4.291, |
|
"step": 284160 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.53391165797389e-05, |
|
"loss": 4.2833, |
|
"step": 284672 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.533073063222838e-05, |
|
"loss": 4.2887, |
|
"step": 285184 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.532236106352159e-05, |
|
"loss": 4.2832, |
|
"step": 285696 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.53139914948148e-05, |
|
"loss": 4.279, |
|
"step": 286208 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.530560554730428e-05, |
|
"loss": 4.2879, |
|
"step": 286720 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.529721959979376e-05, |
|
"loss": 4.2777, |
|
"step": 287232 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.528883365228324e-05, |
|
"loss": 4.2938, |
|
"step": 287744 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.528044770477272e-05, |
|
"loss": 4.2902, |
|
"step": 288256 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.52720617572622e-05, |
|
"loss": 4.2915, |
|
"step": 288768 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.526367580975168e-05, |
|
"loss": 4.2836, |
|
"step": 289280 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.525528986224116e-05, |
|
"loss": 4.2934, |
|
"step": 289792 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5246903914730636e-05, |
|
"loss": 4.2942, |
|
"step": 290304 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5238517967220116e-05, |
|
"loss": 4.2834, |
|
"step": 290816 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5230132019709596e-05, |
|
"loss": 4.2973, |
|
"step": 291328 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5221762451002805e-05, |
|
"loss": 4.2804, |
|
"step": 291840 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5213376503492285e-05, |
|
"loss": 4.2887, |
|
"step": 292352 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.520499055598177e-05, |
|
"loss": 4.2753, |
|
"step": 292864 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.519660460847125e-05, |
|
"loss": 4.2856, |
|
"step": 293376 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.518821866096073e-05, |
|
"loss": 4.2674, |
|
"step": 293888 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.517983271345021e-05, |
|
"loss": 4.2796, |
|
"step": 294400 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.517146314474342e-05, |
|
"loss": 4.2684, |
|
"step": 294912 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.51630771972329e-05, |
|
"loss": 4.2896, |
|
"step": 295424 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.515469124972238e-05, |
|
"loss": 4.2807, |
|
"step": 295936 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.514630530221186e-05, |
|
"loss": 4.2845, |
|
"step": 296448 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.513791935470134e-05, |
|
"loss": 4.2746, |
|
"step": 296960 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.512953340719082e-05, |
|
"loss": 4.2807, |
|
"step": 297472 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.51211474596803e-05, |
|
"loss": 4.2903, |
|
"step": 297984 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.511277789097351e-05, |
|
"loss": 4.2812, |
|
"step": 298496 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.510439194346299e-05, |
|
"loss": 4.2781, |
|
"step": 299008 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.509600599595247e-05, |
|
"loss": 4.2872, |
|
"step": 299520 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5087620048441956e-05, |
|
"loss": 4.2748, |
|
"step": 300032 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5079234100931436e-05, |
|
"loss": 4.2879, |
|
"step": 300544 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5070864532224645e-05, |
|
"loss": 4.2679, |
|
"step": 301056 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5062478584714125e-05, |
|
"loss": 4.2806, |
|
"step": 301568 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5054109016007334e-05, |
|
"loss": 4.2747, |
|
"step": 302080 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5045723068496814e-05, |
|
"loss": 4.2731, |
|
"step": 302592 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5037337120986294e-05, |
|
"loss": 4.2775, |
|
"step": 303104 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5028951173475774e-05, |
|
"loss": 4.2706, |
|
"step": 303616 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5020565225965254e-05, |
|
"loss": 4.2712, |
|
"step": 304128 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.501219565725846e-05, |
|
"loss": 4.2727, |
|
"step": 304640 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.500380970974794e-05, |
|
"loss": 4.2771, |
|
"step": 305152 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.2549028396606445, |
|
"eval_runtime": 293.1686, |
|
"eval_samples_per_second": 1301.609, |
|
"eval_steps_per_second": 40.676, |
|
"step": 305280 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.499542376223742e-05, |
|
"loss": 4.2664, |
|
"step": 305664 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.498703781472691e-05, |
|
"loss": 4.258, |
|
"step": 306176 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.497865186721639e-05, |
|
"loss": 4.2781, |
|
"step": 306688 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.497026591970587e-05, |
|
"loss": 4.2638, |
|
"step": 307200 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.496187997219535e-05, |
|
"loss": 4.2843, |
|
"step": 307712 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.495349402468482e-05, |
|
"loss": 4.2638, |
|
"step": 308224 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.49451080771743e-05, |
|
"loss": 4.2622, |
|
"step": 308736 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.493672212966378e-05, |
|
"loss": 4.2508, |
|
"step": 309248 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.492833618215326e-05, |
|
"loss": 4.2698, |
|
"step": 309760 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.491995023464274e-05, |
|
"loss": 4.2726, |
|
"step": 310272 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.491156428713222e-05, |
|
"loss": 4.2654, |
|
"step": 310784 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.49031783396217e-05, |
|
"loss": 4.2654, |
|
"step": 311296 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.489479239211118e-05, |
|
"loss": 4.2545, |
|
"step": 311808 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.488640644460066e-05, |
|
"loss": 4.2619, |
|
"step": 312320 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.487802049709014e-05, |
|
"loss": 4.2615, |
|
"step": 312832 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.486963454957962e-05, |
|
"loss": 4.2526, |
|
"step": 313344 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.48612486020691e-05, |
|
"loss": 4.2588, |
|
"step": 313856 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.485287903336232e-05, |
|
"loss": 4.2648, |
|
"step": 314368 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.48444930858518e-05, |
|
"loss": 4.2587, |
|
"step": 314880 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.483610713834128e-05, |
|
"loss": 4.2714, |
|
"step": 315392 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.482772119083076e-05, |
|
"loss": 4.2614, |
|
"step": 315904 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.481933524332024e-05, |
|
"loss": 4.264, |
|
"step": 316416 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.481096567461345e-05, |
|
"loss": 4.2601, |
|
"step": 316928 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.480257972710293e-05, |
|
"loss": 4.2702, |
|
"step": 317440 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.479419377959241e-05, |
|
"loss": 4.2514, |
|
"step": 317952 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.478580783208188e-05, |
|
"loss": 4.2597, |
|
"step": 318464 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4777438263375096e-05, |
|
"loss": 4.2568, |
|
"step": 318976 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.476906869466831e-05, |
|
"loss": 4.2535, |
|
"step": 319488 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.476069912596152e-05, |
|
"loss": 4.2456, |
|
"step": 320000 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4752313178451e-05, |
|
"loss": 4.2537, |
|
"step": 320512 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.474392723094048e-05, |
|
"loss": 4.2593, |
|
"step": 321024 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.473554128342996e-05, |
|
"loss": 4.2594, |
|
"step": 321536 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.472715533591944e-05, |
|
"loss": 4.2595, |
|
"step": 322048 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.471876938840892e-05, |
|
"loss": 4.2589, |
|
"step": 322560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.47103834408984e-05, |
|
"loss": 4.2547, |
|
"step": 323072 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.470199749338788e-05, |
|
"loss": 4.2597, |
|
"step": 323584 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4693611545877354e-05, |
|
"loss": 4.2522, |
|
"step": 324096 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4685225598366834e-05, |
|
"loss": 4.2539, |
|
"step": 324608 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4676839650856314e-05, |
|
"loss": 4.2457, |
|
"step": 325120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4668453703345794e-05, |
|
"loss": 4.2532, |
|
"step": 325632 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.466006775583528e-05, |
|
"loss": 4.2521, |
|
"step": 326144 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.465168180832476e-05, |
|
"loss": 4.257, |
|
"step": 326656 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.464329586081424e-05, |
|
"loss": 4.2488, |
|
"step": 327168 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.463490991330372e-05, |
|
"loss": 4.2506, |
|
"step": 327680 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.462654034459693e-05, |
|
"loss": 4.259, |
|
"step": 328192 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.461815439708641e-05, |
|
"loss": 4.2495, |
|
"step": 328704 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.460976844957589e-05, |
|
"loss": 4.2516, |
|
"step": 329216 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.460138250206537e-05, |
|
"loss": 4.2447, |
|
"step": 329728 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.459301293335858e-05, |
|
"loss": 4.226, |
|
"step": 330240 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.458464336465179e-05, |
|
"loss": 4.254, |
|
"step": 330752 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.457625741714127e-05, |
|
"loss": 4.2456, |
|
"step": 331264 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.456787146963075e-05, |
|
"loss": 4.2436, |
|
"step": 331776 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4559485522120234e-05, |
|
"loss": 4.229, |
|
"step": 332288 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4551099574609714e-05, |
|
"loss": 4.237, |
|
"step": 332800 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4542713627099194e-05, |
|
"loss": 4.2301, |
|
"step": 333312 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4534344058392403e-05, |
|
"loss": 4.2519, |
|
"step": 333824 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4525958110881883e-05, |
|
"loss": 4.2316, |
|
"step": 334336 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.451757216337136e-05, |
|
"loss": 4.2357, |
|
"step": 334848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.450918621586084e-05, |
|
"loss": 4.2476, |
|
"step": 335360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.450081664715405e-05, |
|
"loss": 4.2307, |
|
"step": 335872 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.449243069964353e-05, |
|
"loss": 4.2354, |
|
"step": 336384 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.448404475213301e-05, |
|
"loss": 4.2399, |
|
"step": 336896 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.447565880462249e-05, |
|
"loss": 4.2178, |
|
"step": 337408 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.446727285711197e-05, |
|
"loss": 4.2367, |
|
"step": 337920 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.445890328840519e-05, |
|
"loss": 4.2352, |
|
"step": 338432 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.445051734089467e-05, |
|
"loss": 4.2425, |
|
"step": 338944 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.444213139338415e-05, |
|
"loss": 4.2243, |
|
"step": 339456 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.443374544587363e-05, |
|
"loss": 4.2312, |
|
"step": 339968 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.442535949836311e-05, |
|
"loss": 4.2204, |
|
"step": 340480 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.441698992965632e-05, |
|
"loss": 4.247, |
|
"step": 340992 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.44086039821458e-05, |
|
"loss": 4.2391, |
|
"step": 341504 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.440021803463528e-05, |
|
"loss": 4.2371, |
|
"step": 342016 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.439183208712476e-05, |
|
"loss": 4.2521, |
|
"step": 342528 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.438344613961424e-05, |
|
"loss": 4.2464, |
|
"step": 343040 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.437506019210372e-05, |
|
"loss": 4.2564, |
|
"step": 343552 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.436667424459319e-05, |
|
"loss": 4.2295, |
|
"step": 344064 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4358304675886406e-05, |
|
"loss": 4.2344, |
|
"step": 344576 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4349918728375886e-05, |
|
"loss": 4.2394, |
|
"step": 345088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4341532780865366e-05, |
|
"loss": 4.2254, |
|
"step": 345600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4333146833354846e-05, |
|
"loss": 4.2403, |
|
"step": 346112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4324760885844326e-05, |
|
"loss": 4.2292, |
|
"step": 346624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4316374938333806e-05, |
|
"loss": 4.2406, |
|
"step": 347136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4307988990823286e-05, |
|
"loss": 4.2368, |
|
"step": 347648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4299619422116495e-05, |
|
"loss": 4.221, |
|
"step": 348160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4291233474605975e-05, |
|
"loss": 4.2343, |
|
"step": 348672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4282847527095455e-05, |
|
"loss": 4.2229, |
|
"step": 349184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4274461579584935e-05, |
|
"loss": 4.2381, |
|
"step": 349696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4266092010878144e-05, |
|
"loss": 4.2307, |
|
"step": 350208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4257706063367624e-05, |
|
"loss": 4.2266, |
|
"step": 350720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4249320115857104e-05, |
|
"loss": 4.2357, |
|
"step": 351232 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.424093416834659e-05, |
|
"loss": 4.2286, |
|
"step": 351744 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.423254822083607e-05, |
|
"loss": 4.2202, |
|
"step": 352256 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.422416227332555e-05, |
|
"loss": 4.2252, |
|
"step": 352768 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.421577632581503e-05, |
|
"loss": 4.2358, |
|
"step": 353280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.420739037830451e-05, |
|
"loss": 4.2314, |
|
"step": 353792 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.419902080959772e-05, |
|
"loss": 4.229, |
|
"step": 354304 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.41906348620872e-05, |
|
"loss": 4.2141, |
|
"step": 354816 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.418226529338041e-05, |
|
"loss": 4.2295, |
|
"step": 355328 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.417387934586989e-05, |
|
"loss": 4.2189, |
|
"step": 355840 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.416549339835937e-05, |
|
"loss": 4.2358, |
|
"step": 356352 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.415710745084885e-05, |
|
"loss": 4.231, |
|
"step": 356864 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.414872150333833e-05, |
|
"loss": 4.226, |
|
"step": 357376 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.414033555582781e-05, |
|
"loss": 4.2206, |
|
"step": 357888 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.413194960831729e-05, |
|
"loss": 4.2344, |
|
"step": 358400 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4123563660806775e-05, |
|
"loss": 4.2034, |
|
"step": 358912 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4115194092099984e-05, |
|
"loss": 4.2201, |
|
"step": 359424 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.410682452339319e-05, |
|
"loss": 4.2115, |
|
"step": 359936 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.409843857588267e-05, |
|
"loss": 4.2258, |
|
"step": 360448 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.409005262837215e-05, |
|
"loss": 4.2107, |
|
"step": 360960 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.408166668086163e-05, |
|
"loss": 4.218, |
|
"step": 361472 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.407328073335111e-05, |
|
"loss": 4.2145, |
|
"step": 361984 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.406491116464432e-05, |
|
"loss": 4.2111, |
|
"step": 362496 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.40565252171338e-05, |
|
"loss": 4.2165, |
|
"step": 363008 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.404813926962328e-05, |
|
"loss": 4.2131, |
|
"step": 363520 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.403975332211276e-05, |
|
"loss": 4.2251, |
|
"step": 364032 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.403136737460224e-05, |
|
"loss": 4.2227, |
|
"step": 364544 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.402298142709173e-05, |
|
"loss": 4.2212, |
|
"step": 365056 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.40145954795812e-05, |
|
"loss": 4.2145, |
|
"step": 365568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.400620953207068e-05, |
|
"loss": 4.2256, |
|
"step": 366080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.399782358456016e-05, |
|
"loss": 4.2203, |
|
"step": 366592 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.398945401585338e-05, |
|
"loss": 4.2138, |
|
"step": 367104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.398106806834285e-05, |
|
"loss": 4.2313, |
|
"step": 367616 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.397268212083233e-05, |
|
"loss": 4.2117, |
|
"step": 368128 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3964312552125546e-05, |
|
"loss": 4.2183, |
|
"step": 368640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3955926604615026e-05, |
|
"loss": 4.2064, |
|
"step": 369152 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.39475406571045e-05, |
|
"loss": 4.2176, |
|
"step": 369664 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.393915470959398e-05, |
|
"loss": 4.2031, |
|
"step": 370176 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3930768762083466e-05, |
|
"loss": 4.2091, |
|
"step": 370688 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3922399193376675e-05, |
|
"loss": 4.2061, |
|
"step": 371200 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3914013245866155e-05, |
|
"loss": 4.2211, |
|
"step": 371712 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3905627298355635e-05, |
|
"loss": 4.214, |
|
"step": 372224 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3897241350845115e-05, |
|
"loss": 4.2166, |
|
"step": 372736 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3888855403334595e-05, |
|
"loss": 4.2062, |
|
"step": 373248 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3880485834627804e-05, |
|
"loss": 4.2186, |
|
"step": 373760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3872099887117284e-05, |
|
"loss": 4.225, |
|
"step": 374272 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3863713939606764e-05, |
|
"loss": 4.2154, |
|
"step": 374784 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3855327992096244e-05, |
|
"loss": 4.2124, |
|
"step": 375296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3846942044585724e-05, |
|
"loss": 4.2207, |
|
"step": 375808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.383858885468267e-05, |
|
"loss": 4.2107, |
|
"step": 376320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.383020290717215e-05, |
|
"loss": 4.22, |
|
"step": 376832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.382181695966163e-05, |
|
"loss": 4.2037, |
|
"step": 377344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.381343101215111e-05, |
|
"loss": 4.2146, |
|
"step": 377856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.380504506464059e-05, |
|
"loss": 4.207, |
|
"step": 378368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.379665911713007e-05, |
|
"loss": 4.2101, |
|
"step": 378880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.378827316961955e-05, |
|
"loss": 4.2107, |
|
"step": 379392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.377988722210903e-05, |
|
"loss": 4.2056, |
|
"step": 379904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.377151765340224e-05, |
|
"loss": 4.2089, |
|
"step": 380416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.376313170589172e-05, |
|
"loss": 4.2033, |
|
"step": 380928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.37547457583812e-05, |
|
"loss": 4.2156, |
|
"step": 381440 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.198291778564453, |
|
"eval_runtime": 291.6012, |
|
"eval_samples_per_second": 1308.606, |
|
"eval_steps_per_second": 40.895, |
|
"step": 381600 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.374635981087068e-05, |
|
"loss": 4.2059, |
|
"step": 381952 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.373799024216389e-05, |
|
"loss": 4.1931, |
|
"step": 382464 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3729604294653374e-05, |
|
"loss": 4.2155, |
|
"step": 382976 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.372123472594658e-05, |
|
"loss": 4.2004, |
|
"step": 383488 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.371284877843606e-05, |
|
"loss": 4.217, |
|
"step": 384000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.370446283092554e-05, |
|
"loss": 4.2042, |
|
"step": 384512 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.369607688341502e-05, |
|
"loss": 4.1907, |
|
"step": 385024 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.36876909359045e-05, |
|
"loss": 4.1958, |
|
"step": 385536 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.367932136719771e-05, |
|
"loss": 4.1988, |
|
"step": 386048 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.367093541968719e-05, |
|
"loss": 4.2109, |
|
"step": 386560 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.366254947217667e-05, |
|
"loss": 4.202, |
|
"step": 387072 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.365416352466615e-05, |
|
"loss": 4.2041, |
|
"step": 387584 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.364577757715563e-05, |
|
"loss": 4.1944, |
|
"step": 388096 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.363739162964511e-05, |
|
"loss": 4.1952, |
|
"step": 388608 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.362900568213459e-05, |
|
"loss": 4.2032, |
|
"step": 389120 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.362061973462407e-05, |
|
"loss": 4.1891, |
|
"step": 389632 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.361223378711355e-05, |
|
"loss": 4.1944, |
|
"step": 390144 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.360384783960304e-05, |
|
"loss": 4.2001, |
|
"step": 390656 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.359547827089625e-05, |
|
"loss": 4.2013, |
|
"step": 391168 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.358709232338573e-05, |
|
"loss": 4.2083, |
|
"step": 391680 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.357870637587521e-05, |
|
"loss": 4.2024, |
|
"step": 392192 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.357032042836469e-05, |
|
"loss": 4.202, |
|
"step": 392704 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.356193448085416e-05, |
|
"loss": 4.199, |
|
"step": 393216 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.355354853334364e-05, |
|
"loss": 4.2049, |
|
"step": 393728 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3545178964636856e-05, |
|
"loss": 4.1892, |
|
"step": 394240 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3536793017126336e-05, |
|
"loss": 4.2047, |
|
"step": 394752 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.352840706961581e-05, |
|
"loss": 4.1895, |
|
"step": 395264 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.352002112210529e-05, |
|
"loss": 4.1912, |
|
"step": 395776 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3511635174594776e-05, |
|
"loss": 4.1886, |
|
"step": 396288 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3503249227084256e-05, |
|
"loss": 4.1925, |
|
"step": 396800 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3494863279573736e-05, |
|
"loss": 4.1972, |
|
"step": 397312 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3486477332063216e-05, |
|
"loss": 4.1968, |
|
"step": 397824 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3478107763356425e-05, |
|
"loss": 4.2018, |
|
"step": 398336 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3469721815845905e-05, |
|
"loss": 4.2043, |
|
"step": 398848 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3461335868335385e-05, |
|
"loss": 4.1916, |
|
"step": 399360 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3452966299628594e-05, |
|
"loss": 4.201, |
|
"step": 399872 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3444580352118074e-05, |
|
"loss": 4.1932, |
|
"step": 400384 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3436194404607554e-05, |
|
"loss": 4.1916, |
|
"step": 400896 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3427808457097034e-05, |
|
"loss": 4.1872, |
|
"step": 401408 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3419422509586514e-05, |
|
"loss": 4.1886, |
|
"step": 401920 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3411036562075993e-05, |
|
"loss": 4.1963, |
|
"step": 402432 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.340266699336921e-05, |
|
"loss": 4.2006, |
|
"step": 402944 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.339428104585869e-05, |
|
"loss": 4.1873, |
|
"step": 403456 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.338589509834817e-05, |
|
"loss": 4.1927, |
|
"step": 403968 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.337750915083765e-05, |
|
"loss": 4.1992, |
|
"step": 404480 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.336913958213086e-05, |
|
"loss": 4.1934, |
|
"step": 404992 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.336075363462034e-05, |
|
"loss": 4.1941, |
|
"step": 405504 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.335236768710982e-05, |
|
"loss": 4.1856, |
|
"step": 406016 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.33439817395993e-05, |
|
"loss": 4.1646, |
|
"step": 406528 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.333559579208878e-05, |
|
"loss": 4.1987, |
|
"step": 407040 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.332720984457826e-05, |
|
"loss": 4.1864, |
|
"step": 407552 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.331884027587147e-05, |
|
"loss": 4.1878, |
|
"step": 408064 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.331047070716468e-05, |
|
"loss": 4.1776, |
|
"step": 408576 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.330208475965416e-05, |
|
"loss": 4.173, |
|
"step": 409088 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.329369881214364e-05, |
|
"loss": 4.1759, |
|
"step": 409600 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.328531286463312e-05, |
|
"loss": 4.1864, |
|
"step": 410112 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.32769269171226e-05, |
|
"loss": 4.1773, |
|
"step": 410624 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.326854096961208e-05, |
|
"loss": 4.1817, |
|
"step": 411136 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.326015502210156e-05, |
|
"loss": 4.1895, |
|
"step": 411648 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.325176907459104e-05, |
|
"loss": 4.1746, |
|
"step": 412160 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.324338312708052e-05, |
|
"loss": 4.1763, |
|
"step": 412672 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.323501355837373e-05, |
|
"loss": 4.1853, |
|
"step": 413184 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.322662761086321e-05, |
|
"loss": 4.1657, |
|
"step": 413696 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.321824166335269e-05, |
|
"loss": 4.1761, |
|
"step": 414208 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3209855715842165e-05, |
|
"loss": 4.1797, |
|
"step": 414720 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.320146976833165e-05, |
|
"loss": 4.1888, |
|
"step": 415232 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.319308382082113e-05, |
|
"loss": 4.1652, |
|
"step": 415744 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.318469787331061e-05, |
|
"loss": 4.1774, |
|
"step": 416256 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.317632830460382e-05, |
|
"loss": 4.1644, |
|
"step": 416768 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.31679423570933e-05, |
|
"loss": 4.1894, |
|
"step": 417280 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.315955640958278e-05, |
|
"loss": 4.1844, |
|
"step": 417792 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.315117046207226e-05, |
|
"loss": 4.1812, |
|
"step": 418304 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.314278451456174e-05, |
|
"loss": 4.1931, |
|
"step": 418816 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.313439856705122e-05, |
|
"loss": 4.1935, |
|
"step": 419328 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.31260126195407e-05, |
|
"loss": 4.2015, |
|
"step": 419840 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.311762667203018e-05, |
|
"loss": 4.1767, |
|
"step": 420352 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.310924072451966e-05, |
|
"loss": 4.1806, |
|
"step": 420864 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.310087115581287e-05, |
|
"loss": 4.1819, |
|
"step": 421376 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.309248520830235e-05, |
|
"loss": 4.1706, |
|
"step": 421888 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3084115639595565e-05, |
|
"loss": 4.1877, |
|
"step": 422400 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3075729692085045e-05, |
|
"loss": 4.176, |
|
"step": 422912 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3067343744574525e-05, |
|
"loss": 4.1859, |
|
"step": 423424 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3058957797064005e-05, |
|
"loss": 4.1814, |
|
"step": 423936 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3050571849553485e-05, |
|
"loss": 4.1668, |
|
"step": 424448 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3042185902042965e-05, |
|
"loss": 4.1788, |
|
"step": 424960 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3033799954532445e-05, |
|
"loss": 4.1689, |
|
"step": 425472 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3025414007021925e-05, |
|
"loss": 4.1826, |
|
"step": 425984 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3017044438315134e-05, |
|
"loss": 4.1796, |
|
"step": 426496 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3008658490804614e-05, |
|
"loss": 4.1672, |
|
"step": 427008 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.3000272543294094e-05, |
|
"loss": 4.1868, |
|
"step": 427520 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2991886595783574e-05, |
|
"loss": 4.1733, |
|
"step": 428032 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.298351702707678e-05, |
|
"loss": 4.1678, |
|
"step": 428544 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.297513107956627e-05, |
|
"loss": 4.1724, |
|
"step": 429056 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.296676151085948e-05, |
|
"loss": 4.1828, |
|
"step": 429568 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.295837556334896e-05, |
|
"loss": 4.1759, |
|
"step": 430080 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.294998961583844e-05, |
|
"loss": 4.1798, |
|
"step": 430592 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.294160366832792e-05, |
|
"loss": 4.1618, |
|
"step": 431104 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.293323409962113e-05, |
|
"loss": 4.1757, |
|
"step": 431616 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.292484815211061e-05, |
|
"loss": 4.1693, |
|
"step": 432128 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.291646220460009e-05, |
|
"loss": 4.1807, |
|
"step": 432640 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.290807625708957e-05, |
|
"loss": 4.1805, |
|
"step": 433152 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.289970668838278e-05, |
|
"loss": 4.1683, |
|
"step": 433664 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.289132074087226e-05, |
|
"loss": 4.1749, |
|
"step": 434176 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2882934793361737e-05, |
|
"loss": 4.1797, |
|
"step": 434688 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.287454884585122e-05, |
|
"loss": 4.1561, |
|
"step": 435200 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.28661628983407e-05, |
|
"loss": 4.1655, |
|
"step": 435712 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.285777695083018e-05, |
|
"loss": 4.1592, |
|
"step": 436224 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2849391003319656e-05, |
|
"loss": 4.1716, |
|
"step": 436736 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.284102143461287e-05, |
|
"loss": 4.1622, |
|
"step": 437248 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.283263548710235e-05, |
|
"loss": 4.1681, |
|
"step": 437760 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2824249539591825e-05, |
|
"loss": 4.1601, |
|
"step": 438272 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2815863592081305e-05, |
|
"loss": 4.161, |
|
"step": 438784 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2807477644570785e-05, |
|
"loss": 4.1677, |
|
"step": 439296 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2799108075864e-05, |
|
"loss": 4.1595, |
|
"step": 439808 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2790722128353474e-05, |
|
"loss": 4.174, |
|
"step": 440320 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.278233618084296e-05, |
|
"loss": 4.1695, |
|
"step": 440832 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.277395023333244e-05, |
|
"loss": 4.1719, |
|
"step": 441344 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.276556428582192e-05, |
|
"loss": 4.1637, |
|
"step": 441856 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.275719471711513e-05, |
|
"loss": 4.1729, |
|
"step": 442368 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.274880876960461e-05, |
|
"loss": 4.1751, |
|
"step": 442880 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.274042282209409e-05, |
|
"loss": 4.1625, |
|
"step": 443392 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.273203687458357e-05, |
|
"loss": 4.1825, |
|
"step": 443904 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.272365092707305e-05, |
|
"loss": 4.1569, |
|
"step": 444416 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.271528135836626e-05, |
|
"loss": 4.169, |
|
"step": 444928 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.270689541085574e-05, |
|
"loss": 4.1529, |
|
"step": 445440 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.269850946334522e-05, |
|
"loss": 4.1695, |
|
"step": 445952 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.26901235158347e-05, |
|
"loss": 4.1512, |
|
"step": 446464 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.268173756832418e-05, |
|
"loss": 4.1627, |
|
"step": 446976 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2673367999617395e-05, |
|
"loss": 4.1515, |
|
"step": 447488 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2664982052106875e-05, |
|
"loss": 4.1743, |
|
"step": 448000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2656596104596355e-05, |
|
"loss": 4.1615, |
|
"step": 448512 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2648210157085835e-05, |
|
"loss": 4.1674, |
|
"step": 449024 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2639840588379044e-05, |
|
"loss": 4.1557, |
|
"step": 449536 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2631454640868524e-05, |
|
"loss": 4.1703, |
|
"step": 450048 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2623068693358004e-05, |
|
"loss": 4.1727, |
|
"step": 450560 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2614682745847484e-05, |
|
"loss": 4.1665, |
|
"step": 451072 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2606296798336964e-05, |
|
"loss": 4.1661, |
|
"step": 451584 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2597910850826443e-05, |
|
"loss": 4.167, |
|
"step": 452096 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.258954128211965e-05, |
|
"loss": 4.162, |
|
"step": 452608 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.258115533460913e-05, |
|
"loss": 4.1723, |
|
"step": 453120 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.257276938709861e-05, |
|
"loss": 4.1559, |
|
"step": 453632 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.256438343958809e-05, |
|
"loss": 4.1645, |
|
"step": 454144 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.255599749207758e-05, |
|
"loss": 4.1605, |
|
"step": 454656 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.254761154456706e-05, |
|
"loss": 4.1617, |
|
"step": 455168 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.253924197586027e-05, |
|
"loss": 4.163, |
|
"step": 455680 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.253085602834975e-05, |
|
"loss": 4.1525, |
|
"step": 456192 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.252247008083923e-05, |
|
"loss": 4.1679, |
|
"step": 456704 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.251408413332871e-05, |
|
"loss": 4.1498, |
|
"step": 457216 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.250569818581819e-05, |
|
"loss": 4.1699, |
|
"step": 457728 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.158207416534424, |
|
"eval_runtime": 295.9808, |
|
"eval_samples_per_second": 1289.243, |
|
"eval_steps_per_second": 40.29, |
|
"step": 457920 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.249731223830766e-05, |
|
"loss": 4.1601, |
|
"step": 458240 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.248892629079714e-05, |
|
"loss": 4.1437, |
|
"step": 458752 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.248054034328662e-05, |
|
"loss": 4.164, |
|
"step": 459264 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.24721543957761e-05, |
|
"loss": 4.1579, |
|
"step": 459776 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.246376844826558e-05, |
|
"loss": 4.1667, |
|
"step": 460288 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.245538250075506e-05, |
|
"loss": 4.1571, |
|
"step": 460800 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.244699655324455e-05, |
|
"loss": 4.144, |
|
"step": 461312 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.243861060573403e-05, |
|
"loss": 4.1459, |
|
"step": 461824 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.243022465822351e-05, |
|
"loss": 4.1544, |
|
"step": 462336 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.242183871071299e-05, |
|
"loss": 4.1616, |
|
"step": 462848 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.241345276320247e-05, |
|
"loss": 4.155, |
|
"step": 463360 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.240508319449568e-05, |
|
"loss": 4.154, |
|
"step": 463872 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.239669724698516e-05, |
|
"loss": 4.1537, |
|
"step": 464384 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2388311299474637e-05, |
|
"loss": 4.1486, |
|
"step": 464896 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2379925351964117e-05, |
|
"loss": 4.1524, |
|
"step": 465408 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2371539404453596e-05, |
|
"loss": 4.1398, |
|
"step": 465920 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2363153456943076e-05, |
|
"loss": 4.1502, |
|
"step": 466432 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.235476750943255e-05, |
|
"loss": 4.1516, |
|
"step": 466944 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.234638156192203e-05, |
|
"loss": 4.1521, |
|
"step": 467456 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2337995614411516e-05, |
|
"loss": 4.1613, |
|
"step": 467968 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2329609666900996e-05, |
|
"loss": 4.1603, |
|
"step": 468480 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2321223719390476e-05, |
|
"loss": 4.151, |
|
"step": 468992 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2312837771879956e-05, |
|
"loss": 4.1507, |
|
"step": 469504 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2304451824369436e-05, |
|
"loss": 4.1619, |
|
"step": 470016 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2296065876858916e-05, |
|
"loss": 4.1391, |
|
"step": 470528 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2287679929348396e-05, |
|
"loss": 4.1615, |
|
"step": 471040 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2279310360641605e-05, |
|
"loss": 4.1443, |
|
"step": 471552 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2270924413131085e-05, |
|
"loss": 4.1438, |
|
"step": 472064 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2262538465620565e-05, |
|
"loss": 4.1417, |
|
"step": 472576 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2254152518110045e-05, |
|
"loss": 4.1495, |
|
"step": 473088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2245766570599525e-05, |
|
"loss": 4.1495, |
|
"step": 473600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2237380623089005e-05, |
|
"loss": 4.1528, |
|
"step": 474112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2228994675578485e-05, |
|
"loss": 4.1514, |
|
"step": 474624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.22206251068717e-05, |
|
"loss": 4.1579, |
|
"step": 475136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.221223915936118e-05, |
|
"loss": 4.1443, |
|
"step": 475648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.220385321185066e-05, |
|
"loss": 4.1571, |
|
"step": 476160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.219546726434014e-05, |
|
"loss": 4.1532, |
|
"step": 476672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.218708131682962e-05, |
|
"loss": 4.1423, |
|
"step": 477184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2178695369319094e-05, |
|
"loss": 4.1426, |
|
"step": 477696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.217032580061231e-05, |
|
"loss": 4.1468, |
|
"step": 478208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.216193985310179e-05, |
|
"loss": 4.1459, |
|
"step": 478720 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.215355390559126e-05, |
|
"loss": 4.1566, |
|
"step": 479232 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.214516795808074e-05, |
|
"loss": 4.1428, |
|
"step": 479744 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.213678201057022e-05, |
|
"loss": 4.1448, |
|
"step": 480256 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.21283960630597e-05, |
|
"loss": 4.1573, |
|
"step": 480768 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.212001011554918e-05, |
|
"loss": 4.1445, |
|
"step": 481280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.211162416803866e-05, |
|
"loss": 4.1527, |
|
"step": 481792 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.210325459933188e-05, |
|
"loss": 4.1417, |
|
"step": 482304 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.209486865182136e-05, |
|
"loss": 4.1117, |
|
"step": 482816 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.208648270431084e-05, |
|
"loss": 4.1595, |
|
"step": 483328 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.207811313560405e-05, |
|
"loss": 4.1415, |
|
"step": 483840 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.206972718809353e-05, |
|
"loss": 4.1448, |
|
"step": 484352 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.206134124058301e-05, |
|
"loss": 4.1338, |
|
"step": 484864 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.205295529307249e-05, |
|
"loss": 4.1239, |
|
"step": 485376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2044585724365696e-05, |
|
"loss": 4.1345, |
|
"step": 485888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2036199776855176e-05, |
|
"loss": 4.1398, |
|
"step": 486400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2027813829344656e-05, |
|
"loss": 4.1361, |
|
"step": 486912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2019427881834136e-05, |
|
"loss": 4.1347, |
|
"step": 487424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.201105831312735e-05, |
|
"loss": 4.1496, |
|
"step": 487936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.200267236561683e-05, |
|
"loss": 4.1292, |
|
"step": 488448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.199428641810631e-05, |
|
"loss": 4.1296, |
|
"step": 488960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.198591684939952e-05, |
|
"loss": 4.1444, |
|
"step": 489472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1977530901889e-05, |
|
"loss": 4.1174, |
|
"step": 489984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.196914495437848e-05, |
|
"loss": 4.1318, |
|
"step": 490496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.196075900686796e-05, |
|
"loss": 4.1343, |
|
"step": 491008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.195237305935744e-05, |
|
"loss": 4.1475, |
|
"step": 491520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.194398711184692e-05, |
|
"loss": 4.1209, |
|
"step": 492032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.19356011643364e-05, |
|
"loss": 4.1372, |
|
"step": 492544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.192721521682588e-05, |
|
"loss": 4.117, |
|
"step": 493056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.191882926931536e-05, |
|
"loss": 4.147, |
|
"step": 493568 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.191044332180484e-05, |
|
"loss": 4.1438, |
|
"step": 494080 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.190205737429432e-05, |
|
"loss": 4.1377, |
|
"step": 494592 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.18936714267838e-05, |
|
"loss": 4.1448, |
|
"step": 495104 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1885301858077017e-05, |
|
"loss": 4.149, |
|
"step": 495616 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1876915910566496e-05, |
|
"loss": 4.161, |
|
"step": 496128 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1868546341859706e-05, |
|
"loss": 4.135, |
|
"step": 496640 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1860160394349186e-05, |
|
"loss": 4.1393, |
|
"step": 497152 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1851774446838665e-05, |
|
"loss": 4.141, |
|
"step": 497664 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1843388499328145e-05, |
|
"loss": 4.1244, |
|
"step": 498176 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1835002551817625e-05, |
|
"loss": 4.1406, |
|
"step": 498688 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.18266166043071e-05, |
|
"loss": 4.1351, |
|
"step": 499200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1818247035600314e-05, |
|
"loss": 4.1408, |
|
"step": 499712 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1809861088089794e-05, |
|
"loss": 4.1477, |
|
"step": 500224 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1801475140579274e-05, |
|
"loss": 4.1254, |
|
"step": 500736 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1793089193068754e-05, |
|
"loss": 4.1306, |
|
"step": 501248 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1784703245558234e-05, |
|
"loss": 4.1287, |
|
"step": 501760 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1776317298047714e-05, |
|
"loss": 4.1412, |
|
"step": 502272 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1767931350537194e-05, |
|
"loss": 4.1346, |
|
"step": 502784 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.17595617818304e-05, |
|
"loss": 4.131, |
|
"step": 503296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.175117583431988e-05, |
|
"loss": 4.133, |
|
"step": 503808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.174278988680936e-05, |
|
"loss": 4.1386, |
|
"step": 504320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.173440393929884e-05, |
|
"loss": 4.1285, |
|
"step": 504832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.172601799178832e-05, |
|
"loss": 4.1282, |
|
"step": 505344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.171764842308153e-05, |
|
"loss": 4.1477, |
|
"step": 505856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.170926247557101e-05, |
|
"loss": 4.1335, |
|
"step": 506368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.170087652806049e-05, |
|
"loss": 4.1349, |
|
"step": 506880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.169249058054998e-05, |
|
"loss": 4.1205, |
|
"step": 507392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.168410463303946e-05, |
|
"loss": 4.1331, |
|
"step": 507904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.167571868552894e-05, |
|
"loss": 4.1274, |
|
"step": 508416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.166733273801842e-05, |
|
"loss": 4.1385, |
|
"step": 508928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.16589467905079e-05, |
|
"loss": 4.1373, |
|
"step": 509440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.165057722180111e-05, |
|
"loss": 4.134, |
|
"step": 509952 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.164219127429059e-05, |
|
"loss": 4.1337, |
|
"step": 510464 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.163380532678007e-05, |
|
"loss": 4.1353, |
|
"step": 510976 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.162541937926955e-05, |
|
"loss": 4.1154, |
|
"step": 511488 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.161703343175903e-05, |
|
"loss": 4.1265, |
|
"step": 512000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.160866386305224e-05, |
|
"loss": 4.1131, |
|
"step": 512512 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.160027791554172e-05, |
|
"loss": 4.1329, |
|
"step": 513024 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1591891968031197e-05, |
|
"loss": 4.1217, |
|
"step": 513536 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1583506020520677e-05, |
|
"loss": 4.125, |
|
"step": 514048 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1575120073010156e-05, |
|
"loss": 4.1185, |
|
"step": 514560 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.156673412549964e-05, |
|
"loss": 4.122, |
|
"step": 515072 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.155836455679285e-05, |
|
"loss": 4.127, |
|
"step": 515584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.154997860928233e-05, |
|
"loss": 4.1186, |
|
"step": 516096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.154159266177181e-05, |
|
"loss": 4.129, |
|
"step": 516608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1533206714261285e-05, |
|
"loss": 4.1346, |
|
"step": 517120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1524820766750765e-05, |
|
"loss": 4.1266, |
|
"step": 517632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1516434819240245e-05, |
|
"loss": 4.1303, |
|
"step": 518144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1508048871729725e-05, |
|
"loss": 4.1284, |
|
"step": 518656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1499679303022934e-05, |
|
"loss": 4.1332, |
|
"step": 519168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1491293355512414e-05, |
|
"loss": 4.1265, |
|
"step": 519680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1482907408001894e-05, |
|
"loss": 4.1363, |
|
"step": 520192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.147452146049138e-05, |
|
"loss": 4.1176, |
|
"step": 520704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.146613551298086e-05, |
|
"loss": 4.1306, |
|
"step": 521216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.145774956547034e-05, |
|
"loss": 4.1176, |
|
"step": 521728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.144937999676355e-05, |
|
"loss": 4.1239, |
|
"step": 522240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.144099404925303e-05, |
|
"loss": 4.1171, |
|
"step": 522752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.143260810174251e-05, |
|
"loss": 4.1203, |
|
"step": 523264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.142422215423199e-05, |
|
"loss": 4.1135, |
|
"step": 523776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.141583620672147e-05, |
|
"loss": 4.131, |
|
"step": 524288 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.140745025921095e-05, |
|
"loss": 4.1235, |
|
"step": 524800 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.139908069050416e-05, |
|
"loss": 4.1276, |
|
"step": 525312 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.139069474299364e-05, |
|
"loss": 4.1168, |
|
"step": 525824 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.138230879548312e-05, |
|
"loss": 4.1329, |
|
"step": 526336 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.13739228479726e-05, |
|
"loss": 4.1315, |
|
"step": 526848 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.136553690046208e-05, |
|
"loss": 4.1297, |
|
"step": 527360 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1357150952951565e-05, |
|
"loss": 4.1274, |
|
"step": 527872 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1348765005441045e-05, |
|
"loss": 4.1238, |
|
"step": 528384 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1340379057930525e-05, |
|
"loss": 4.1255, |
|
"step": 528896 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1331993110420005e-05, |
|
"loss": 4.1356, |
|
"step": 529408 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.132360716290948e-05, |
|
"loss": 4.1136, |
|
"step": 529920 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1315237594202694e-05, |
|
"loss": 4.1241, |
|
"step": 530432 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1306851646692174e-05, |
|
"loss": 4.1279, |
|
"step": 530944 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.129846569918165e-05, |
|
"loss": 4.1244, |
|
"step": 531456 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.129009613047486e-05, |
|
"loss": 4.1157, |
|
"step": 531968 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.128171018296434e-05, |
|
"loss": 4.1157, |
|
"step": 532480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.127332423545382e-05, |
|
"loss": 4.129, |
|
"step": 532992 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.12649382879433e-05, |
|
"loss": 4.1155, |
|
"step": 533504 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.125656871923652e-05, |
|
"loss": 4.1302, |
|
"step": 534016 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.127338886260986, |
|
"eval_runtime": 294.1908, |
|
"eval_samples_per_second": 1297.087, |
|
"eval_steps_per_second": 40.535, |
|
"step": 534240 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1248182771726e-05, |
|
"loss": 4.1166, |
|
"step": 534528 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.123979682421548e-05, |
|
"loss": 4.106, |
|
"step": 535040 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.123141087670495e-05, |
|
"loss": 4.1276, |
|
"step": 535552 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.122304130799817e-05, |
|
"loss": 4.1118, |
|
"step": 536064 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.121465536048765e-05, |
|
"loss": 4.1253, |
|
"step": 536576 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.120626941297712e-05, |
|
"loss": 4.1226, |
|
"step": 537088 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.11978834654666e-05, |
|
"loss": 4.1099, |
|
"step": 537600 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.118949751795608e-05, |
|
"loss": 4.1092, |
|
"step": 538112 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.118111157044556e-05, |
|
"loss": 4.1181, |
|
"step": 538624 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.117272562293504e-05, |
|
"loss": 4.1212, |
|
"step": 539136 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.116433967542452e-05, |
|
"loss": 4.1171, |
|
"step": 539648 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.115597010671774e-05, |
|
"loss": 4.1141, |
|
"step": 540160 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.114760053801095e-05, |
|
"loss": 4.1151, |
|
"step": 540672 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1139214590500426e-05, |
|
"loss": 4.1115, |
|
"step": 541184 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1130828642989906e-05, |
|
"loss": 4.1154, |
|
"step": 541696 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1122442695479386e-05, |
|
"loss": 4.1049, |
|
"step": 542208 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1114056747968866e-05, |
|
"loss": 4.1106, |
|
"step": 542720 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1105670800458346e-05, |
|
"loss": 4.1143, |
|
"step": 543232 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1097284852947826e-05, |
|
"loss": 4.1163, |
|
"step": 543744 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1088898905437306e-05, |
|
"loss": 4.1188, |
|
"step": 544256 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1080512957926786e-05, |
|
"loss": 4.1271, |
|
"step": 544768 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1072127010416266e-05, |
|
"loss": 4.1164, |
|
"step": 545280 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1063741062905746e-05, |
|
"loss": 4.1141, |
|
"step": 545792 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1055355115395225e-05, |
|
"loss": 4.119, |
|
"step": 546304 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.104700192549217e-05, |
|
"loss": 4.1082, |
|
"step": 546816 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.103863235678538e-05, |
|
"loss": 4.1214, |
|
"step": 547328 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.103024640927486e-05, |
|
"loss": 4.1071, |
|
"step": 547840 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.102186046176434e-05, |
|
"loss": 4.1108, |
|
"step": 548352 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.101347451425382e-05, |
|
"loss": 4.1075, |
|
"step": 548864 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.10050885667433e-05, |
|
"loss": 4.1087, |
|
"step": 549376 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.099670261923278e-05, |
|
"loss": 4.1101, |
|
"step": 549888 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.098831667172226e-05, |
|
"loss": 4.1157, |
|
"step": 550400 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.097993072421174e-05, |
|
"loss": 4.1147, |
|
"step": 550912 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.097154477670122e-05, |
|
"loss": 4.1201, |
|
"step": 551424 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.096317520799443e-05, |
|
"loss": 4.1064, |
|
"step": 551936 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.095478926048391e-05, |
|
"loss": 4.1225, |
|
"step": 552448 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0946419691777124e-05, |
|
"loss": 4.1157, |
|
"step": 552960 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0938033744266604e-05, |
|
"loss": 4.1031, |
|
"step": 553472 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0929647796756084e-05, |
|
"loss": 4.1093, |
|
"step": 553984 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0921261849245564e-05, |
|
"loss": 4.1092, |
|
"step": 554496 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0912875901735044e-05, |
|
"loss": 4.1087, |
|
"step": 555008 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0904489954224524e-05, |
|
"loss": 4.117, |
|
"step": 555520 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0896104006714004e-05, |
|
"loss": 4.1067, |
|
"step": 556032 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0887718059203484e-05, |
|
"loss": 4.1087, |
|
"step": 556544 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.087933211169296e-05, |
|
"loss": 4.1202, |
|
"step": 557056 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.087094616418244e-05, |
|
"loss": 4.1109, |
|
"step": 557568 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.086257659547565e-05, |
|
"loss": 4.1132, |
|
"step": 558080 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.085419064796513e-05, |
|
"loss": 4.1103, |
|
"step": 558592 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.084580470045461e-05, |
|
"loss": 4.0738, |
|
"step": 559104 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.083741875294409e-05, |
|
"loss": 4.1253, |
|
"step": 559616 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.082904918423731e-05, |
|
"loss": 4.103, |
|
"step": 560128 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.082066323672678e-05, |
|
"loss": 4.1135, |
|
"step": 560640 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.081227728921626e-05, |
|
"loss": 4.0949, |
|
"step": 561152 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.080389134170574e-05, |
|
"loss": 4.0909, |
|
"step": 561664 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.079552177299896e-05, |
|
"loss": 4.0987, |
|
"step": 562176 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.078713582548843e-05, |
|
"loss": 4.1018, |
|
"step": 562688 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.077874987797791e-05, |
|
"loss": 4.1034, |
|
"step": 563200 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.077036393046739e-05, |
|
"loss": 4.0947, |
|
"step": 563712 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.076197798295687e-05, |
|
"loss": 4.1164, |
|
"step": 564224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.075360841425008e-05, |
|
"loss": 4.0957, |
|
"step": 564736 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0745222466739566e-05, |
|
"loss": 4.094, |
|
"step": 565248 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0736836519229046e-05, |
|
"loss": 4.1097, |
|
"step": 565760 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0728450571718526e-05, |
|
"loss": 4.083, |
|
"step": 566272 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0720064624208006e-05, |
|
"loss": 4.0927, |
|
"step": 566784 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0711678676697486e-05, |
|
"loss": 4.1007, |
|
"step": 567296 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.070332548679443e-05, |
|
"loss": 4.112, |
|
"step": 567808 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0694939539283904e-05, |
|
"loss": 4.0858, |
|
"step": 568320 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0686553591773384e-05, |
|
"loss": 4.1029, |
|
"step": 568832 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0678167644262864e-05, |
|
"loss": 4.0832, |
|
"step": 569344 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0669781696752344e-05, |
|
"loss": 4.111, |
|
"step": 569856 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0661395749241824e-05, |
|
"loss": 4.1065, |
|
"step": 570368 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0653009801731304e-05, |
|
"loss": 4.1045, |
|
"step": 570880 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0644623854220784e-05, |
|
"loss": 4.1068, |
|
"step": 571392 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0636237906710264e-05, |
|
"loss": 4.1147, |
|
"step": 571904 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.062785195919975e-05, |
|
"loss": 4.1253, |
|
"step": 572416 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.061948239049296e-05, |
|
"loss": 4.1009, |
|
"step": 572928 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.061109644298244e-05, |
|
"loss": 4.1018, |
|
"step": 573440 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.060272687427565e-05, |
|
"loss": 4.1086, |
|
"step": 573952 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.059434092676513e-05, |
|
"loss": 4.092, |
|
"step": 574464 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.058595497925461e-05, |
|
"loss": 4.103, |
|
"step": 574976 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.057756903174409e-05, |
|
"loss": 4.1043, |
|
"step": 575488 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.056918308423357e-05, |
|
"loss": 4.1094, |
|
"step": 576000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.056079713672305e-05, |
|
"loss": 4.1104, |
|
"step": 576512 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.055241118921253e-05, |
|
"loss": 4.0945, |
|
"step": 577024 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.054404162050574e-05, |
|
"loss": 4.0924, |
|
"step": 577536 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.053565567299522e-05, |
|
"loss": 4.0959, |
|
"step": 578048 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.05272697254847e-05, |
|
"loss": 4.1076, |
|
"step": 578560 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0518883777974184e-05, |
|
"loss": 4.1012, |
|
"step": 579072 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0510497830463664e-05, |
|
"loss": 4.0957, |
|
"step": 579584 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0502111882953144e-05, |
|
"loss": 4.0961, |
|
"step": 580096 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.049372593544262e-05, |
|
"loss": 4.1046, |
|
"step": 580608 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.04853399879321e-05, |
|
"loss": 4.0956, |
|
"step": 581120 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0476970419225313e-05, |
|
"loss": 4.0935, |
|
"step": 581632 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.046858447171479e-05, |
|
"loss": 4.1126, |
|
"step": 582144 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0460214903008e-05, |
|
"loss": 4.0989, |
|
"step": 582656 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.045182895549748e-05, |
|
"loss": 4.1008, |
|
"step": 583168 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.044344300798696e-05, |
|
"loss": 4.0915, |
|
"step": 583680 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.043507343928017e-05, |
|
"loss": 4.094, |
|
"step": 584192 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.042668749176965e-05, |
|
"loss": 4.0923, |
|
"step": 584704 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.041830154425914e-05, |
|
"loss": 4.107, |
|
"step": 585216 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.040991559674862e-05, |
|
"loss": 4.1006, |
|
"step": 585728 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.040152964923809e-05, |
|
"loss": 4.1022, |
|
"step": 586240 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.039314370172757e-05, |
|
"loss": 4.1004, |
|
"step": 586752 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.038475775421705e-05, |
|
"loss": 4.0996, |
|
"step": 587264 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.037637180670653e-05, |
|
"loss": 4.0865, |
|
"step": 587776 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.036798585919601e-05, |
|
"loss": 4.0893, |
|
"step": 588288 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.035961629048922e-05, |
|
"loss": 4.0826, |
|
"step": 588800 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.03512303429787e-05, |
|
"loss": 4.0998, |
|
"step": 589312 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.034284439546818e-05, |
|
"loss": 4.0867, |
|
"step": 589824 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.033445844795766e-05, |
|
"loss": 4.0901, |
|
"step": 590336 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.032607250044714e-05, |
|
"loss": 4.083, |
|
"step": 590848 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.031768655293662e-05, |
|
"loss": 4.0933, |
|
"step": 591360 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.030930060542611e-05, |
|
"loss": 4.0948, |
|
"step": 591872 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.030091465791559e-05, |
|
"loss": 4.0832, |
|
"step": 592384 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0292561468012525e-05, |
|
"loss": 4.1011, |
|
"step": 592896 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0284175520502005e-05, |
|
"loss": 4.0982, |
|
"step": 593408 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0275805951795214e-05, |
|
"loss": 4.0916, |
|
"step": 593920 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0267420004284694e-05, |
|
"loss": 4.0993, |
|
"step": 594432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0259034056774174e-05, |
|
"loss": 4.0943, |
|
"step": 594944 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0250648109263654e-05, |
|
"loss": 4.0951, |
|
"step": 595456 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0242262161753134e-05, |
|
"loss": 4.0986, |
|
"step": 595968 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0233876214242614e-05, |
|
"loss": 4.099, |
|
"step": 596480 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0225490266732094e-05, |
|
"loss": 4.0872, |
|
"step": 596992 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0217104319221574e-05, |
|
"loss": 4.098, |
|
"step": 597504 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.020871837171106e-05, |
|
"loss": 4.089, |
|
"step": 598016 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.020033242420054e-05, |
|
"loss": 4.0902, |
|
"step": 598528 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.019194647669002e-05, |
|
"loss": 4.0807, |
|
"step": 599040 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.01835605291795e-05, |
|
"loss": 4.0877, |
|
"step": 599552 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.017519096047271e-05, |
|
"loss": 4.0827, |
|
"step": 600064 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.016680501296219e-05, |
|
"loss": 4.0965, |
|
"step": 600576 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.015841906545167e-05, |
|
"loss": 4.0917, |
|
"step": 601088 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.015003311794115e-05, |
|
"loss": 4.097, |
|
"step": 601600 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.014164717043063e-05, |
|
"loss": 4.0801, |
|
"step": 602112 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.013327760172384e-05, |
|
"loss": 4.1048, |
|
"step": 602624 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.012489165421332e-05, |
|
"loss": 4.0943, |
|
"step": 603136 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.01165057067028e-05, |
|
"loss": 4.0984, |
|
"step": 603648 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.010811975919228e-05, |
|
"loss": 4.096, |
|
"step": 604160 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.009973381168176e-05, |
|
"loss": 4.0912, |
|
"step": 604672 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0091364242974974e-05, |
|
"loss": 4.0907, |
|
"step": 605184 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.008299467426818e-05, |
|
"loss": 4.1018, |
|
"step": 605696 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.007460872675766e-05, |
|
"loss": 4.0838, |
|
"step": 606208 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.006622277924714e-05, |
|
"loss": 4.0937, |
|
"step": 606720 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.005783683173662e-05, |
|
"loss": 4.0935, |
|
"step": 607232 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.00494508842261e-05, |
|
"loss": 4.0902, |
|
"step": 607744 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0041064936715576e-05, |
|
"loss": 4.0843, |
|
"step": 608256 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0032678989205056e-05, |
|
"loss": 4.0829, |
|
"step": 608768 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.002430942049827e-05, |
|
"loss": 4.0963, |
|
"step": 609280 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.001592347298775e-05, |
|
"loss": 4.0866, |
|
"step": 609792 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.000753752547723e-05, |
|
"loss": 4.0971, |
|
"step": 610304 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.102852821350098, |
|
"eval_runtime": 293.4121, |
|
"eval_samples_per_second": 1300.529, |
|
"eval_steps_per_second": 40.642, |
|
"step": 610560 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.999915157796671e-05, |
|
"loss": 4.0817, |
|
"step": 610816 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.999076563045619e-05, |
|
"loss": 4.08, |
|
"step": 611328 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.998237968294567e-05, |
|
"loss": 4.087, |
|
"step": 611840 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.997399373543515e-05, |
|
"loss": 4.0861, |
|
"step": 612352 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.996560778792463e-05, |
|
"loss": 4.0958, |
|
"step": 612864 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.995722184041411e-05, |
|
"loss": 4.0886, |
|
"step": 613376 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.994883589290359e-05, |
|
"loss": 4.0786, |
|
"step": 613888 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.994044994539307e-05, |
|
"loss": 4.0792, |
|
"step": 614400 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.993206399788255e-05, |
|
"loss": 4.0817, |
|
"step": 614912 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.992367805037203e-05, |
|
"loss": 4.0943, |
|
"step": 615424 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.991529210286151e-05, |
|
"loss": 4.0826, |
|
"step": 615936 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.9906906155350984e-05, |
|
"loss": 4.0818, |
|
"step": 616448 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.98985365866442e-05, |
|
"loss": 4.092, |
|
"step": 616960 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.989015063913368e-05, |
|
"loss": 4.0723, |
|
"step": 617472 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.988176469162316e-05, |
|
"loss": 4.0803, |
|
"step": 617984 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.987337874411264e-05, |
|
"loss": 4.077, |
|
"step": 618496 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.986499279660212e-05, |
|
"loss": 4.0787, |
|
"step": 619008 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.98566068490916e-05, |
|
"loss": 4.0834, |
|
"step": 619520 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.984822090158108e-05, |
|
"loss": 4.0847, |
|
"step": 620032 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.983983495407056e-05, |
|
"loss": 4.0842, |
|
"step": 620544 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.983144900656004e-05, |
|
"loss": 4.0981, |
|
"step": 621056 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.982306305904952e-05, |
|
"loss": 4.0838, |
|
"step": 621568 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.9814677111539e-05, |
|
"loss": 4.0862, |
|
"step": 622080 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.980629116402848e-05, |
|
"loss": 4.0837, |
|
"step": 622592 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.979792159532169e-05, |
|
"loss": 4.0805, |
|
"step": 623104 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.978953564781117e-05, |
|
"loss": 4.0899, |
|
"step": 623616 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.978114970030065e-05, |
|
"loss": 4.0784, |
|
"step": 624128 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.9772780131593865e-05, |
|
"loss": 4.0813, |
|
"step": 624640 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.9764394184083345e-05, |
|
"loss": 4.0717, |
|
"step": 625152 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.9756008236572825e-05, |
|
"loss": 4.0784, |
|
"step": 625664 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9747638667866034e-05, |
|
"loss": 4.0764, |
|
"step": 626176 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9739252720355514e-05, |
|
"loss": 4.0824, |
|
"step": 626688 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9730866772844994e-05, |
|
"loss": 4.089, |
|
"step": 627200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9722480825334474e-05, |
|
"loss": 4.0894, |
|
"step": 627712 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9714094877823954e-05, |
|
"loss": 4.0787, |
|
"step": 628224 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9705708930313434e-05, |
|
"loss": 4.0841, |
|
"step": 628736 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9697322982802913e-05, |
|
"loss": 4.0879, |
|
"step": 629248 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9688937035292393e-05, |
|
"loss": 4.0761, |
|
"step": 629760 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9680551087781873e-05, |
|
"loss": 4.0762, |
|
"step": 630272 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.967216514027135e-05, |
|
"loss": 4.0783, |
|
"step": 630784 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.966377919276083e-05, |
|
"loss": 4.0809, |
|
"step": 631296 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.965540962405405e-05, |
|
"loss": 4.0805, |
|
"step": 631808 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.964702367654353e-05, |
|
"loss": 4.0839, |
|
"step": 632320 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9638637729033e-05, |
|
"loss": 4.0817, |
|
"step": 632832 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.963025178152248e-05, |
|
"loss": 4.086, |
|
"step": 633344 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.962186583401196e-05, |
|
"loss": 4.0811, |
|
"step": 633856 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.961347988650144e-05, |
|
"loss": 4.0802, |
|
"step": 634368 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.960509393899092e-05, |
|
"loss": 4.0789, |
|
"step": 634880 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.95967079914804e-05, |
|
"loss": 4.0436, |
|
"step": 635392 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.958833842277361e-05, |
|
"loss": 4.0917, |
|
"step": 635904 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.957995247526309e-05, |
|
"loss": 4.0725, |
|
"step": 636416 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.957156652775257e-05, |
|
"loss": 4.0841, |
|
"step": 636928 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.956318058024205e-05, |
|
"loss": 4.0682, |
|
"step": 637440 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.955481101153527e-05, |
|
"loss": 4.0623, |
|
"step": 637952 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.954642506402475e-05, |
|
"loss": 4.0634, |
|
"step": 638464 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.953803911651423e-05, |
|
"loss": 4.072, |
|
"step": 638976 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.952965316900371e-05, |
|
"loss": 4.0735, |
|
"step": 639488 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9521283600296916e-05, |
|
"loss": 4.068, |
|
"step": 640000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9512897652786396e-05, |
|
"loss": 4.0838, |
|
"step": 640512 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9504511705275876e-05, |
|
"loss": 4.0691, |
|
"step": 641024 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9496125757765356e-05, |
|
"loss": 4.0645, |
|
"step": 641536 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9487756189058565e-05, |
|
"loss": 4.0783, |
|
"step": 642048 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9479370241548045e-05, |
|
"loss": 4.0568, |
|
"step": 642560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.947100067284126e-05, |
|
"loss": 4.0612, |
|
"step": 643072 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.946261472533074e-05, |
|
"loss": 4.073, |
|
"step": 643584 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.945422877782022e-05, |
|
"loss": 4.0804, |
|
"step": 644096 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.94458428303097e-05, |
|
"loss": 4.0603, |
|
"step": 644608 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.943745688279918e-05, |
|
"loss": 4.0695, |
|
"step": 645120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.942907093528866e-05, |
|
"loss": 4.0544, |
|
"step": 645632 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.942068498777814e-05, |
|
"loss": 4.0799, |
|
"step": 646144 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.941229904026762e-05, |
|
"loss": 4.0795, |
|
"step": 646656 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.94039130927571e-05, |
|
"loss": 4.0727, |
|
"step": 647168 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.939552714524658e-05, |
|
"loss": 4.0788, |
|
"step": 647680 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.938714119773606e-05, |
|
"loss": 4.0838, |
|
"step": 648192 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9378755250225533e-05, |
|
"loss": 4.0977, |
|
"step": 648704 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.937040206032248e-05, |
|
"loss": 4.0718, |
|
"step": 649216 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.936201611281196e-05, |
|
"loss": 4.0727, |
|
"step": 649728 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.935363016530144e-05, |
|
"loss": 4.0735, |
|
"step": 650240 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9345244217790925e-05, |
|
"loss": 4.0701, |
|
"step": 650752 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9336858270280405e-05, |
|
"loss": 4.0704, |
|
"step": 651264 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9328472322769885e-05, |
|
"loss": 4.0763, |
|
"step": 651776 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9320086375259365e-05, |
|
"loss": 4.0789, |
|
"step": 652288 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.931170042774884e-05, |
|
"loss": 4.083, |
|
"step": 652800 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.930331448023832e-05, |
|
"loss": 4.0629, |
|
"step": 653312 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9294944911531534e-05, |
|
"loss": 4.0637, |
|
"step": 653824 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.928655896402101e-05, |
|
"loss": 4.066, |
|
"step": 654336 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.927817301651049e-05, |
|
"loss": 4.0807, |
|
"step": 654848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.92698034478037e-05, |
|
"loss": 4.0744, |
|
"step": 655360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.926141750029318e-05, |
|
"loss": 4.0634, |
|
"step": 655872 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.925303155278266e-05, |
|
"loss": 4.0684, |
|
"step": 656384 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.924464560527214e-05, |
|
"loss": 4.0748, |
|
"step": 656896 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.923627603656536e-05, |
|
"loss": 4.0689, |
|
"step": 657408 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.922789008905484e-05, |
|
"loss": 4.064, |
|
"step": 657920 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.921950414154431e-05, |
|
"loss": 4.0819, |
|
"step": 658432 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.921111819403379e-05, |
|
"loss": 4.0734, |
|
"step": 658944 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.920273224652327e-05, |
|
"loss": 4.0704, |
|
"step": 659456 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.919434629901275e-05, |
|
"loss": 4.0646, |
|
"step": 659968 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.918596035150223e-05, |
|
"loss": 4.0655, |
|
"step": 660480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.917757440399171e-05, |
|
"loss": 4.0657, |
|
"step": 660992 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.916920483528492e-05, |
|
"loss": 4.0785, |
|
"step": 661504 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.91608188877744e-05, |
|
"loss": 4.0746, |
|
"step": 662016 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.915243294026388e-05, |
|
"loss": 4.0697, |
|
"step": 662528 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.914404699275336e-05, |
|
"loss": 4.0756, |
|
"step": 663040 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.913566104524285e-05, |
|
"loss": 4.0667, |
|
"step": 663552 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.9127291476536056e-05, |
|
"loss": 4.0658, |
|
"step": 664064 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.9118905529025536e-05, |
|
"loss": 4.0579, |
|
"step": 664576 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.9110535960318745e-05, |
|
"loss": 4.0526, |
|
"step": 665088 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.9102150012808225e-05, |
|
"loss": 4.0742, |
|
"step": 665600 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.9093764065297705e-05, |
|
"loss": 4.0612, |
|
"step": 666112 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.9085394496590914e-05, |
|
"loss": 4.0585, |
|
"step": 666624 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.9077008549080394e-05, |
|
"loss": 4.0562, |
|
"step": 667136 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.9068622601569874e-05, |
|
"loss": 4.0591, |
|
"step": 667648 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.9060236654059354e-05, |
|
"loss": 4.0683, |
|
"step": 668160 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.905186708535257e-05, |
|
"loss": 4.059, |
|
"step": 668672 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.904348113784205e-05, |
|
"loss": 4.071, |
|
"step": 669184 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.903509519033153e-05, |
|
"loss": 4.069, |
|
"step": 669696 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.902670924282101e-05, |
|
"loss": 4.0693, |
|
"step": 670208 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.901832329531049e-05, |
|
"loss": 4.0696, |
|
"step": 670720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.900993734779997e-05, |
|
"loss": 4.0662, |
|
"step": 671232 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.900155140028945e-05, |
|
"loss": 4.0684, |
|
"step": 671744 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.899316545277893e-05, |
|
"loss": 4.0722, |
|
"step": 672256 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.898479588407214e-05, |
|
"loss": 4.0707, |
|
"step": 672768 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.897640993656162e-05, |
|
"loss": 4.0624, |
|
"step": 673280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.89680239890511e-05, |
|
"loss": 4.0697, |
|
"step": 673792 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.895963804154058e-05, |
|
"loss": 4.0619, |
|
"step": 674304 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.895125209403006e-05, |
|
"loss": 4.0627, |
|
"step": 674816 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.894286614651954e-05, |
|
"loss": 4.0525, |
|
"step": 675328 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.893448019900902e-05, |
|
"loss": 4.0615, |
|
"step": 675840 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8926110630302235e-05, |
|
"loss": 4.0534, |
|
"step": 676352 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8917724682791715e-05, |
|
"loss": 4.0698, |
|
"step": 676864 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8909338735281195e-05, |
|
"loss": 4.0602, |
|
"step": 677376 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8900952787770674e-05, |
|
"loss": 4.0745, |
|
"step": 677888 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.889256684026015e-05, |
|
"loss": 4.0523, |
|
"step": 678400 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.888418089274963e-05, |
|
"loss": 4.0746, |
|
"step": 678912 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8875811324042843e-05, |
|
"loss": 4.0683, |
|
"step": 679424 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.886742537653232e-05, |
|
"loss": 4.0696, |
|
"step": 679936 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8859039429021797e-05, |
|
"loss": 4.0693, |
|
"step": 680448 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8850653481511277e-05, |
|
"loss": 4.0648, |
|
"step": 680960 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8842267534000757e-05, |
|
"loss": 4.0675, |
|
"step": 681472 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.883389796529397e-05, |
|
"loss": 4.076, |
|
"step": 681984 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.882551201778345e-05, |
|
"loss": 4.0566, |
|
"step": 682496 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.881712607027293e-05, |
|
"loss": 4.0671, |
|
"step": 683008 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.880874012276241e-05, |
|
"loss": 4.0646, |
|
"step": 683520 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.880035417525189e-05, |
|
"loss": 4.0633, |
|
"step": 684032 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.879196822774137e-05, |
|
"loss": 4.0623, |
|
"step": 684544 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.878358228023085e-05, |
|
"loss": 4.0566, |
|
"step": 685056 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.877519633272033e-05, |
|
"loss": 4.0695, |
|
"step": 685568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.876682676401354e-05, |
|
"loss": 4.0628, |
|
"step": 686080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.875845719530675e-05, |
|
"loss": 4.0699, |
|
"step": 686592 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.083948612213135, |
|
"eval_runtime": 293.0728, |
|
"eval_samples_per_second": 1302.035, |
|
"eval_steps_per_second": 40.69, |
|
"step": 686880 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.875007124779623e-05, |
|
"loss": 4.0527, |
|
"step": 687104 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.874168530028571e-05, |
|
"loss": 4.056, |
|
"step": 687616 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.873329935277519e-05, |
|
"loss": 4.0587, |
|
"step": 688128 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.872491340526467e-05, |
|
"loss": 4.0608, |
|
"step": 688640 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.871652745775416e-05, |
|
"loss": 4.0732, |
|
"step": 689152 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.870814151024364e-05, |
|
"loss": 4.0619, |
|
"step": 689664 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.869975556273312e-05, |
|
"loss": 4.0551, |
|
"step": 690176 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.8691385994026326e-05, |
|
"loss": 4.0534, |
|
"step": 690688 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.8683000046515806e-05, |
|
"loss": 4.0526, |
|
"step": 691200 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.8674614099005286e-05, |
|
"loss": 4.0674, |
|
"step": 691712 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.8666228151494766e-05, |
|
"loss": 4.057, |
|
"step": 692224 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.8657842203984246e-05, |
|
"loss": 4.0559, |
|
"step": 692736 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.8649472635277455e-05, |
|
"loss": 4.0672, |
|
"step": 693248 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.8641086687766935e-05, |
|
"loss": 4.049, |
|
"step": 693760 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.8632700740256415e-05, |
|
"loss": 4.0524, |
|
"step": 694272 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.8624314792745895e-05, |
|
"loss": 4.0492, |
|
"step": 694784 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.861594522403911e-05, |
|
"loss": 4.0553, |
|
"step": 695296 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.860755927652859e-05, |
|
"loss": 4.0514, |
|
"step": 695808 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.859917332901807e-05, |
|
"loss": 4.0609, |
|
"step": 696320 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.859078738150755e-05, |
|
"loss": 4.06, |
|
"step": 696832 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.858240143399703e-05, |
|
"loss": 4.0719, |
|
"step": 697344 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.8574015486486503e-05, |
|
"loss": 4.058, |
|
"step": 697856 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.8565629538975983e-05, |
|
"loss": 4.0605, |
|
"step": 698368 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.8557243591465463e-05, |
|
"loss": 4.056, |
|
"step": 698880 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.854887402275868e-05, |
|
"loss": 4.0573, |
|
"step": 699392 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.854050445405189e-05, |
|
"loss": 4.062, |
|
"step": 699904 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.853211850654137e-05, |
|
"loss": 4.0498, |
|
"step": 700416 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.852373255903085e-05, |
|
"loss": 4.0592, |
|
"step": 700928 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.851534661152033e-05, |
|
"loss": 4.0471, |
|
"step": 701440 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.850696066400981e-05, |
|
"loss": 4.0527, |
|
"step": 701952 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.849857471649929e-05, |
|
"loss": 4.0515, |
|
"step": 702464 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.849018876898877e-05, |
|
"loss": 4.0557, |
|
"step": 702976 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.848181920028198e-05, |
|
"loss": 4.0632, |
|
"step": 703488 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.847343325277146e-05, |
|
"loss": 4.0636, |
|
"step": 704000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.846504730526094e-05, |
|
"loss": 4.0551, |
|
"step": 704512 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.845666135775042e-05, |
|
"loss": 4.0545, |
|
"step": 705024 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.84482754102399e-05, |
|
"loss": 4.0674, |
|
"step": 705536 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8439905841533106e-05, |
|
"loss": 4.051, |
|
"step": 706048 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.843153627282632e-05, |
|
"loss": 4.0506, |
|
"step": 706560 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.84231503253158e-05, |
|
"loss": 4.0535, |
|
"step": 707072 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.841476437780528e-05, |
|
"loss": 4.0568, |
|
"step": 707584 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.840637843029476e-05, |
|
"loss": 4.0548, |
|
"step": 708096 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.839799248278424e-05, |
|
"loss": 4.0547, |
|
"step": 708608 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.838960653527372e-05, |
|
"loss": 4.0577, |
|
"step": 709120 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.83812205877632e-05, |
|
"loss": 4.057, |
|
"step": 709632 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.837283464025268e-05, |
|
"loss": 4.057, |
|
"step": 710144 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.836444869274216e-05, |
|
"loss": 4.0568, |
|
"step": 710656 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.835606274523164e-05, |
|
"loss": 4.0566, |
|
"step": 711168 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.834767679772112e-05, |
|
"loss": 4.0218, |
|
"step": 711680 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.833930722901433e-05, |
|
"loss": 4.0644, |
|
"step": 712192 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.833092128150381e-05, |
|
"loss": 4.0472, |
|
"step": 712704 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.832253533399329e-05, |
|
"loss": 4.0603, |
|
"step": 713216 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.831414938648277e-05, |
|
"loss": 4.0417, |
|
"step": 713728 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.830576343897225e-05, |
|
"loss": 4.0373, |
|
"step": 714240 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8297393870265466e-05, |
|
"loss": 4.0372, |
|
"step": 714752 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8289007922754946e-05, |
|
"loss": 4.0463, |
|
"step": 715264 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8280621975244426e-05, |
|
"loss": 4.0556, |
|
"step": 715776 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8272236027733906e-05, |
|
"loss": 4.0389, |
|
"step": 716288 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8263850080223386e-05, |
|
"loss": 4.0596, |
|
"step": 716800 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8255464132712866e-05, |
|
"loss": 4.0492, |
|
"step": 717312 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.824707818520234e-05, |
|
"loss": 4.0338, |
|
"step": 717824 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.823869223769182e-05, |
|
"loss": 4.0535, |
|
"step": 718336 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8230322668985035e-05, |
|
"loss": 4.0368, |
|
"step": 718848 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8221936721474515e-05, |
|
"loss": 4.0348, |
|
"step": 719360 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.821355077396399e-05, |
|
"loss": 4.0481, |
|
"step": 719872 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.820516482645347e-05, |
|
"loss": 4.0549, |
|
"step": 720384 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.819677887894295e-05, |
|
"loss": 4.0387, |
|
"step": 720896 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8188409310236164e-05, |
|
"loss": 4.044, |
|
"step": 721408 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8180023362725644e-05, |
|
"loss": 4.0322, |
|
"step": 721920 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8171637415215124e-05, |
|
"loss": 4.0528, |
|
"step": 722432 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8163251467704604e-05, |
|
"loss": 4.0555, |
|
"step": 722944 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8154865520194084e-05, |
|
"loss": 4.05, |
|
"step": 723456 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.814649595148729e-05, |
|
"loss": 4.0534, |
|
"step": 723968 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.813811000397677e-05, |
|
"loss": 4.0593, |
|
"step": 724480 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.812974043526999e-05, |
|
"loss": 4.0707, |
|
"step": 724992 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.812135448775946e-05, |
|
"loss": 4.0501, |
|
"step": 725504 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.811296854024894e-05, |
|
"loss": 4.0514, |
|
"step": 726016 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.810458259273842e-05, |
|
"loss": 4.0476, |
|
"step": 726528 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.80961966452279e-05, |
|
"loss": 4.048, |
|
"step": 727040 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.808781069771739e-05, |
|
"loss": 4.0442, |
|
"step": 727552 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.807942475020687e-05, |
|
"loss": 4.0538, |
|
"step": 728064 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.807103880269635e-05, |
|
"loss": 4.052, |
|
"step": 728576 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.806266923398956e-05, |
|
"loss": 4.0633, |
|
"step": 729088 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.805428328647904e-05, |
|
"loss": 4.0375, |
|
"step": 729600 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.804591371777225e-05, |
|
"loss": 4.0392, |
|
"step": 730112 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8037527770261727e-05, |
|
"loss": 4.0446, |
|
"step": 730624 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8029158201554936e-05, |
|
"loss": 4.0513, |
|
"step": 731136 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8020772254044416e-05, |
|
"loss": 4.0521, |
|
"step": 731648 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8012386306533896e-05, |
|
"loss": 4.0423, |
|
"step": 732160 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.8004000359023376e-05, |
|
"loss": 4.043, |
|
"step": 732672 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7995614411512856e-05, |
|
"loss": 4.0526, |
|
"step": 733184 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.798722846400234e-05, |
|
"loss": 4.0454, |
|
"step": 733696 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.797884251649182e-05, |
|
"loss": 4.0373, |
|
"step": 734208 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.79704565689813e-05, |
|
"loss": 4.0589, |
|
"step": 734720 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.796207062147078e-05, |
|
"loss": 4.0493, |
|
"step": 735232 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.795368467396026e-05, |
|
"loss": 4.0508, |
|
"step": 735744 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.794531510525347e-05, |
|
"loss": 4.0392, |
|
"step": 736256 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.793692915774295e-05, |
|
"loss": 4.0422, |
|
"step": 736768 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.792854321023243e-05, |
|
"loss": 4.0401, |
|
"step": 737280 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.792015726272191e-05, |
|
"loss": 4.0515, |
|
"step": 737792 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.791177131521139e-05, |
|
"loss": 4.0554, |
|
"step": 738304 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.790338536770087e-05, |
|
"loss": 4.0427, |
|
"step": 738816 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.789501579899408e-05, |
|
"loss": 4.0515, |
|
"step": 739328 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.788662985148356e-05, |
|
"loss": 4.0439, |
|
"step": 739840 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.787824390397304e-05, |
|
"loss": 4.0472, |
|
"step": 740352 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.786985795646253e-05, |
|
"loss": 4.0341, |
|
"step": 740864 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7861488387755736e-05, |
|
"loss": 4.0324, |
|
"step": 741376 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7853102440245216e-05, |
|
"loss": 4.044, |
|
"step": 741888 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7844716492734696e-05, |
|
"loss": 4.0433, |
|
"step": 742400 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7836330545224176e-05, |
|
"loss": 4.03, |
|
"step": 742912 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.782794459771365e-05, |
|
"loss": 4.0345, |
|
"step": 743424 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.781955865020313e-05, |
|
"loss": 4.0376, |
|
"step": 743936 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7811189081496345e-05, |
|
"loss": 4.0453, |
|
"step": 744448 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7802803133985825e-05, |
|
"loss": 4.035, |
|
"step": 744960 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.77944171864753e-05, |
|
"loss": 4.0484, |
|
"step": 745472 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.778603123896478e-05, |
|
"loss": 4.0466, |
|
"step": 745984 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7777645291454264e-05, |
|
"loss": 4.0458, |
|
"step": 746496 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7769259343943744e-05, |
|
"loss": 4.0404, |
|
"step": 747008 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7760873396433224e-05, |
|
"loss": 4.0483, |
|
"step": 747520 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7752487448922704e-05, |
|
"loss": 4.0445, |
|
"step": 748032 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7744117880215913e-05, |
|
"loss": 4.0502, |
|
"step": 748544 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7735731932705393e-05, |
|
"loss": 4.0473, |
|
"step": 749056 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.772734598519487e-05, |
|
"loss": 4.039, |
|
"step": 749568 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.771897641648808e-05, |
|
"loss": 4.0452, |
|
"step": 750080 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.771059046897756e-05, |
|
"loss": 4.0441, |
|
"step": 750592 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.770220452146704e-05, |
|
"loss": 4.0359, |
|
"step": 751104 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.769381857395652e-05, |
|
"loss": 4.0336, |
|
"step": 751616 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.768544900524973e-05, |
|
"loss": 4.0403, |
|
"step": 752128 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.767706305773921e-05, |
|
"loss": 4.029, |
|
"step": 752640 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.76686771102287e-05, |
|
"loss": 4.049, |
|
"step": 753152 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.766029116271818e-05, |
|
"loss": 4.0355, |
|
"step": 753664 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.765190521520766e-05, |
|
"loss": 4.0507, |
|
"step": 754176 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.764351926769714e-05, |
|
"loss": 4.0312, |
|
"step": 754688 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.763513332018662e-05, |
|
"loss": 4.0497, |
|
"step": 755200 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.76267473726761e-05, |
|
"loss": 4.0476, |
|
"step": 755712 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.761837780396931e-05, |
|
"loss": 4.0484, |
|
"step": 756224 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.760999185645879e-05, |
|
"loss": 4.0446, |
|
"step": 756736 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.760160590894827e-05, |
|
"loss": 4.0397, |
|
"step": 757248 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.759321996143775e-05, |
|
"loss": 4.0498, |
|
"step": 757760 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7584850392730956e-05, |
|
"loss": 4.0507, |
|
"step": 758272 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7576464445220436e-05, |
|
"loss": 4.0371, |
|
"step": 758784 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7568078497709916e-05, |
|
"loss": 4.0425, |
|
"step": 759296 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7559692550199396e-05, |
|
"loss": 4.0443, |
|
"step": 759808 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.755130660268888e-05, |
|
"loss": 4.0404, |
|
"step": 760320 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.754293703398209e-05, |
|
"loss": 4.0396, |
|
"step": 760832 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.753455108647157e-05, |
|
"loss": 4.0343, |
|
"step": 761344 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.752616513896105e-05, |
|
"loss": 4.0463, |
|
"step": 761856 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.751777919145053e-05, |
|
"loss": 4.0419, |
|
"step": 762368 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.750939324394001e-05, |
|
"loss": 4.0479, |
|
"step": 762880 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.068617343902588, |
|
"eval_runtime": 294.5098, |
|
"eval_samples_per_second": 1295.682, |
|
"eval_steps_per_second": 40.491, |
|
"step": 763200 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.750102367523322e-05, |
|
"loss": 4.0267, |
|
"step": 763392 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.74926377277227e-05, |
|
"loss": 4.0331, |
|
"step": 763904 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.748425178021218e-05, |
|
"loss": 4.0344, |
|
"step": 764416 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.7475865832701654e-05, |
|
"loss": 4.0401, |
|
"step": 764928 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.7467479885191134e-05, |
|
"loss": 4.0489, |
|
"step": 765440 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.745909393768062e-05, |
|
"loss": 4.0445, |
|
"step": 765952 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.74507079901701e-05, |
|
"loss": 4.0313, |
|
"step": 766464 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.744232204265958e-05, |
|
"loss": 4.0317, |
|
"step": 766976 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.743393609514906e-05, |
|
"loss": 4.0313, |
|
"step": 767488 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.742555014763854e-05, |
|
"loss": 4.0391, |
|
"step": 768000 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.741716420012802e-05, |
|
"loss": 4.0369, |
|
"step": 768512 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.74087782526175e-05, |
|
"loss": 4.0382, |
|
"step": 769024 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.740040868391071e-05, |
|
"loss": 4.0434, |
|
"step": 769536 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.739202273640019e-05, |
|
"loss": 4.0226, |
|
"step": 770048 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.738363678888967e-05, |
|
"loss": 4.0323, |
|
"step": 770560 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.737525084137915e-05, |
|
"loss": 4.0238, |
|
"step": 771072 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.736686489386863e-05, |
|
"loss": 4.0412, |
|
"step": 771584 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.735847894635811e-05, |
|
"loss": 4.0311, |
|
"step": 772096 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.735009299884759e-05, |
|
"loss": 4.0375, |
|
"step": 772608 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.734170705133707e-05, |
|
"loss": 4.0346, |
|
"step": 773120 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.733332110382655e-05, |
|
"loss": 4.0493, |
|
"step": 773632 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.732493515631603e-05, |
|
"loss": 4.0351, |
|
"step": 774144 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.731654920880551e-05, |
|
"loss": 4.0427, |
|
"step": 774656 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.730816326129499e-05, |
|
"loss": 4.0343, |
|
"step": 775168 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.72997936925882e-05, |
|
"loss": 4.0365, |
|
"step": 775680 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.729140774507768e-05, |
|
"loss": 4.0396, |
|
"step": 776192 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.728302179756716e-05, |
|
"loss": 4.0261, |
|
"step": 776704 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.7274652228860374e-05, |
|
"loss": 4.0344, |
|
"step": 777216 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.726626628134985e-05, |
|
"loss": 4.0269, |
|
"step": 777728 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.725788033383933e-05, |
|
"loss": 4.03, |
|
"step": 778240 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.724949438632881e-05, |
|
"loss": 4.0327, |
|
"step": 778752 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.7241108438818287e-05, |
|
"loss": 4.0337, |
|
"step": 779264 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.72327388701115e-05, |
|
"loss": 4.0352, |
|
"step": 779776 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.722435292260098e-05, |
|
"loss": 4.0467, |
|
"step": 780288 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.721596697509046e-05, |
|
"loss": 4.0318, |
|
"step": 780800 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.720758102757994e-05, |
|
"loss": 4.0327, |
|
"step": 781312 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.719919508006942e-05, |
|
"loss": 4.0468, |
|
"step": 781824 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.71908091325589e-05, |
|
"loss": 4.0297, |
|
"step": 782336 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.718242318504838e-05, |
|
"loss": 4.0289, |
|
"step": 782848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.717405361634159e-05, |
|
"loss": 4.0329, |
|
"step": 783360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.716566766883107e-05, |
|
"loss": 4.0299, |
|
"step": 783872 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.715728172132055e-05, |
|
"loss": 4.0335, |
|
"step": 784384 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.714889577381003e-05, |
|
"loss": 4.031, |
|
"step": 784896 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.714050982629951e-05, |
|
"loss": 4.0404, |
|
"step": 785408 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.713212387878899e-05, |
|
"loss": 4.033, |
|
"step": 785920 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.712375431008221e-05, |
|
"loss": 4.0378, |
|
"step": 786432 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.711536836257169e-05, |
|
"loss": 4.0329, |
|
"step": 786944 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.710698241506117e-05, |
|
"loss": 4.0382, |
|
"step": 787456 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.709859646755065e-05, |
|
"loss": 4.0019, |
|
"step": 787968 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.709021052004013e-05, |
|
"loss": 4.041, |
|
"step": 788480 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.7081840951333336e-05, |
|
"loss": 4.0224, |
|
"step": 788992 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.7073455003822816e-05, |
|
"loss": 4.0388, |
|
"step": 789504 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.7065069056312296e-05, |
|
"loss": 4.0213, |
|
"step": 790016 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.7056683108801776e-05, |
|
"loss": 4.0177, |
|
"step": 790528 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.7048297161291256e-05, |
|
"loss": 4.0159, |
|
"step": 791040 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.7039927592584465e-05, |
|
"loss": 4.025, |
|
"step": 791552 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.7031541645073945e-05, |
|
"loss": 4.0348, |
|
"step": 792064 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.7023155697563425e-05, |
|
"loss": 4.0156, |
|
"step": 792576 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.7014769750052905e-05, |
|
"loss": 4.0395, |
|
"step": 793088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.7006383802542385e-05, |
|
"loss": 4.03, |
|
"step": 793600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.6997997855031865e-05, |
|
"loss": 4.011, |
|
"step": 794112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.698962828632508e-05, |
|
"loss": 4.0329, |
|
"step": 794624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.698124233881456e-05, |
|
"loss": 4.0136, |
|
"step": 795136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.6972856391304034e-05, |
|
"loss": 4.0141, |
|
"step": 795648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.6964470443793514e-05, |
|
"loss": 4.0274, |
|
"step": 796160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.695610087508673e-05, |
|
"loss": 4.0327, |
|
"step": 796672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.69477149275762e-05, |
|
"loss": 4.0142, |
|
"step": 797184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.693932898006568e-05, |
|
"loss": 4.0288, |
|
"step": 797696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.693094303255516e-05, |
|
"loss": 4.0127, |
|
"step": 798208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.692255708504464e-05, |
|
"loss": 4.0261, |
|
"step": 798720 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.691417113753413e-05, |
|
"loss": 4.0328, |
|
"step": 799232 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.690580156882734e-05, |
|
"loss": 4.0289, |
|
"step": 799744 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.689741562131682e-05, |
|
"loss": 4.035, |
|
"step": 800256 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.68890296738063e-05, |
|
"loss": 4.0362, |
|
"step": 800768 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.688064372629578e-05, |
|
"loss": 4.0506, |
|
"step": 801280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.687227415758899e-05, |
|
"loss": 4.0299, |
|
"step": 801792 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.686388821007847e-05, |
|
"loss": 4.033, |
|
"step": 802304 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.685550226256795e-05, |
|
"loss": 4.0211, |
|
"step": 802816 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.684711631505743e-05, |
|
"loss": 4.0283, |
|
"step": 803328 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.6838746746350636e-05, |
|
"loss": 4.025, |
|
"step": 803840 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.6830360798840116e-05, |
|
"loss": 4.0297, |
|
"step": 804352 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.6821974851329596e-05, |
|
"loss": 4.0353, |
|
"step": 804864 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.681358890381908e-05, |
|
"loss": 4.0404, |
|
"step": 805376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.680520295630856e-05, |
|
"loss": 4.0177, |
|
"step": 805888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.679681700879804e-05, |
|
"loss": 4.0209, |
|
"step": 806400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.678843106128752e-05, |
|
"loss": 4.0217, |
|
"step": 806912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.6780045113777e-05, |
|
"loss": 4.0308, |
|
"step": 807424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.677165916626648e-05, |
|
"loss": 4.0286, |
|
"step": 807936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.676328959755969e-05, |
|
"loss": 4.0221, |
|
"step": 808448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.675490365004917e-05, |
|
"loss": 4.0253, |
|
"step": 808960 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.674651770253865e-05, |
|
"loss": 4.0276, |
|
"step": 809472 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.673813175502813e-05, |
|
"loss": 4.0226, |
|
"step": 809984 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.672974580751761e-05, |
|
"loss": 4.0197, |
|
"step": 810496 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.672135986000709e-05, |
|
"loss": 4.0363, |
|
"step": 811008 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6712973912496565e-05, |
|
"loss": 4.0334, |
|
"step": 811520 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6704587964986045e-05, |
|
"loss": 4.0286, |
|
"step": 812032 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6696234775082996e-05, |
|
"loss": 4.0186, |
|
"step": 812544 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6687848827572476e-05, |
|
"loss": 4.0211, |
|
"step": 813056 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6679462880061956e-05, |
|
"loss": 4.0227, |
|
"step": 813568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6671076932551436e-05, |
|
"loss": 4.0299, |
|
"step": 814080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6662690985040916e-05, |
|
"loss": 4.0379, |
|
"step": 814592 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6654305037530396e-05, |
|
"loss": 4.0203, |
|
"step": 815104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.664591909001987e-05, |
|
"loss": 4.0328, |
|
"step": 815616 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.663753314250935e-05, |
|
"loss": 4.0254, |
|
"step": 816128 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6629163573802565e-05, |
|
"loss": 4.0244, |
|
"step": 816640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.662077762629204e-05, |
|
"loss": 4.015, |
|
"step": 817152 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6612408057585254e-05, |
|
"loss": 4.014, |
|
"step": 817664 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6604022110074734e-05, |
|
"loss": 4.0223, |
|
"step": 818176 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6595636162564214e-05, |
|
"loss": 4.0223, |
|
"step": 818688 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6587250215053694e-05, |
|
"loss": 4.0144, |
|
"step": 819200 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6578864267543174e-05, |
|
"loss": 4.0137, |
|
"step": 819712 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6570478320032654e-05, |
|
"loss": 4.0115, |
|
"step": 820224 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6562092372522134e-05, |
|
"loss": 4.0262, |
|
"step": 820736 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6553706425011614e-05, |
|
"loss": 4.0178, |
|
"step": 821248 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.654533685630482e-05, |
|
"loss": 4.0248, |
|
"step": 821760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.65369509087943e-05, |
|
"loss": 4.0255, |
|
"step": 822272 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.652856496128378e-05, |
|
"loss": 4.0238, |
|
"step": 822784 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.652021177138073e-05, |
|
"loss": 4.0206, |
|
"step": 823296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.651182582387021e-05, |
|
"loss": 4.0304, |
|
"step": 823808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.650343987635969e-05, |
|
"loss": 4.0237, |
|
"step": 824320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.649505392884917e-05, |
|
"loss": 4.0326, |
|
"step": 824832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6486684360142384e-05, |
|
"loss": 4.0237, |
|
"step": 825344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6478298412631864e-05, |
|
"loss": 4.0209, |
|
"step": 825856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6469912465121344e-05, |
|
"loss": 4.0235, |
|
"step": 826368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.646152651761082e-05, |
|
"loss": 4.0242, |
|
"step": 826880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.64531405701003e-05, |
|
"loss": 4.018, |
|
"step": 827392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.644475462258978e-05, |
|
"loss": 4.015, |
|
"step": 827904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.643636867507926e-05, |
|
"loss": 4.0149, |
|
"step": 828416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.642798272756874e-05, |
|
"loss": 4.012, |
|
"step": 828928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6419596780058217e-05, |
|
"loss": 4.0269, |
|
"step": 829440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6411210832547697e-05, |
|
"loss": 4.0214, |
|
"step": 829952 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6402824885037176e-05, |
|
"loss": 4.0262, |
|
"step": 830464 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6394438937526656e-05, |
|
"loss": 4.0154, |
|
"step": 830976 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6386052990016136e-05, |
|
"loss": 4.0288, |
|
"step": 831488 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.637768342130935e-05, |
|
"loss": 4.0275, |
|
"step": 832000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.636931385260256e-05, |
|
"loss": 4.0253, |
|
"step": 832512 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.636092790509204e-05, |
|
"loss": 4.0243, |
|
"step": 833024 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.635254195758152e-05, |
|
"loss": 4.0199, |
|
"step": 833536 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6344156010071e-05, |
|
"loss": 4.0306, |
|
"step": 834048 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.633577006256048e-05, |
|
"loss": 4.0322, |
|
"step": 834560 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.632738411504996e-05, |
|
"loss": 4.0143, |
|
"step": 835072 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.631899816753944e-05, |
|
"loss": 4.0209, |
|
"step": 835584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.631061222002892e-05, |
|
"loss": 4.0274, |
|
"step": 836096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.630224265132213e-05, |
|
"loss": 4.0195, |
|
"step": 836608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6293873082615346e-05, |
|
"loss": 4.0173, |
|
"step": 837120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6285487135104826e-05, |
|
"loss": 4.0177, |
|
"step": 837632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6277101187594306e-05, |
|
"loss": 4.0227, |
|
"step": 838144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6268715240083786e-05, |
|
"loss": 4.0224, |
|
"step": 838656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6260345671376995e-05, |
|
"loss": 4.0271, |
|
"step": 839168 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.055795669555664, |
|
"eval_runtime": 299.9809, |
|
"eval_samples_per_second": 1272.051, |
|
"eval_steps_per_second": 39.753, |
|
"step": 839520 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.6251959723866475e-05, |
|
"loss": 4.0038, |
|
"step": 839680 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.6243573776355955e-05, |
|
"loss": 4.0131, |
|
"step": 840192 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.6235187828845435e-05, |
|
"loss": 4.0155, |
|
"step": 840704 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.6226801881334915e-05, |
|
"loss": 4.0202, |
|
"step": 841216 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.6218415933824395e-05, |
|
"loss": 4.0261, |
|
"step": 841728 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.6210029986313875e-05, |
|
"loss": 4.0262, |
|
"step": 842240 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.620164403880335e-05, |
|
"loss": 4.0148, |
|
"step": 842752 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.619325809129283e-05, |
|
"loss": 4.014, |
|
"step": 843264 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.6184872143782315e-05, |
|
"loss": 4.0094, |
|
"step": 843776 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.6176486196271795e-05, |
|
"loss": 4.0162, |
|
"step": 844288 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.6168100248761275e-05, |
|
"loss": 4.0247, |
|
"step": 844800 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.6159714301250754e-05, |
|
"loss": 4.0172, |
|
"step": 845312 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.6151344732543964e-05, |
|
"loss": 4.0226, |
|
"step": 845824 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.6142958785033444e-05, |
|
"loss": 4.0043, |
|
"step": 846336 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.6134572837522923e-05, |
|
"loss": 4.0155, |
|
"step": 846848 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.6126186890012403e-05, |
|
"loss": 3.9994, |
|
"step": 847360 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.611780094250188e-05, |
|
"loss": 4.0226, |
|
"step": 847872 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.610941499499136e-05, |
|
"loss": 4.0123, |
|
"step": 848384 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.610102904748084e-05, |
|
"loss": 4.0155, |
|
"step": 848896 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.609264309997032e-05, |
|
"loss": 4.0137, |
|
"step": 849408 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.60842571524598e-05, |
|
"loss": 4.0334, |
|
"step": 849920 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.607587120494928e-05, |
|
"loss": 4.0121, |
|
"step": 850432 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.606748525743876e-05, |
|
"loss": 4.0243, |
|
"step": 850944 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.605909930992824e-05, |
|
"loss": 4.0138, |
|
"step": 851456 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.605071336241772e-05, |
|
"loss": 4.0172, |
|
"step": 851968 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.604234379371094e-05, |
|
"loss": 4.0222, |
|
"step": 852480 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.603395784620042e-05, |
|
"loss": 4.006, |
|
"step": 852992 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.602558827749363e-05, |
|
"loss": 4.0154, |
|
"step": 853504 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.601720232998311e-05, |
|
"loss": 4.0095, |
|
"step": 854016 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.600881638247259e-05, |
|
"loss": 4.0112, |
|
"step": 854528 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.600043043496206e-05, |
|
"loss": 4.0155, |
|
"step": 855040 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.599204448745154e-05, |
|
"loss": 4.0178, |
|
"step": 855552 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.598365853994102e-05, |
|
"loss": 4.0107, |
|
"step": 856064 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.59752725924305e-05, |
|
"loss": 4.0309, |
|
"step": 856576 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.596690302372372e-05, |
|
"loss": 4.0151, |
|
"step": 857088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.595853345501693e-05, |
|
"loss": 4.0138, |
|
"step": 857600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.595014750750641e-05, |
|
"loss": 4.0246, |
|
"step": 858112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.594176155999589e-05, |
|
"loss": 4.0094, |
|
"step": 858624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5933375612485366e-05, |
|
"loss": 4.0092, |
|
"step": 859136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5924989664974846e-05, |
|
"loss": 4.0196, |
|
"step": 859648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5916603717464326e-05, |
|
"loss": 4.0076, |
|
"step": 860160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5908217769953806e-05, |
|
"loss": 4.0164, |
|
"step": 860672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5899831822443286e-05, |
|
"loss": 4.0165, |
|
"step": 861184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5891445874932766e-05, |
|
"loss": 4.0126, |
|
"step": 861696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5883076306225975e-05, |
|
"loss": 4.0189, |
|
"step": 862208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5874690358715455e-05, |
|
"loss": 4.0169, |
|
"step": 862720 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5866304411204935e-05, |
|
"loss": 4.0148, |
|
"step": 863232 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5857918463694414e-05, |
|
"loss": 4.0183, |
|
"step": 863744 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.58495325161839e-05, |
|
"loss": 3.9839, |
|
"step": 864256 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.584114656867338e-05, |
|
"loss": 4.0171, |
|
"step": 864768 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.583276062116286e-05, |
|
"loss": 4.0095, |
|
"step": 865280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.582437467365234e-05, |
|
"loss": 4.0187, |
|
"step": 865792 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.581600510494555e-05, |
|
"loss": 4.005, |
|
"step": 866304 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.580763553623876e-05, |
|
"loss": 3.9959, |
|
"step": 866816 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.579924958872824e-05, |
|
"loss": 4.0002, |
|
"step": 867328 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.579086364121772e-05, |
|
"loss": 4.0044, |
|
"step": 867840 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.57824776937072e-05, |
|
"loss": 4.0151, |
|
"step": 868352 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.577409174619668e-05, |
|
"loss": 3.9949, |
|
"step": 868864 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.576572217748989e-05, |
|
"loss": 4.0222, |
|
"step": 869376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.575733622997937e-05, |
|
"loss": 4.014, |
|
"step": 869888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5748950282468855e-05, |
|
"loss": 3.9877, |
|
"step": 870400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5740564334958335e-05, |
|
"loss": 4.0146, |
|
"step": 870912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5732178387447815e-05, |
|
"loss": 3.9975, |
|
"step": 871424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5723792439937295e-05, |
|
"loss": 3.9945, |
|
"step": 871936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5715406492426775e-05, |
|
"loss": 4.0043, |
|
"step": 872448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.570702054491625e-05, |
|
"loss": 4.0162, |
|
"step": 872960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.569863459740573e-05, |
|
"loss": 3.9946, |
|
"step": 873472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5690265028698944e-05, |
|
"loss": 4.0111, |
|
"step": 873984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5681879081188424e-05, |
|
"loss": 3.9899, |
|
"step": 874496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.56734931336779e-05, |
|
"loss": 4.0078, |
|
"step": 875008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.566510718616738e-05, |
|
"loss": 4.0132, |
|
"step": 875520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.565673761746059e-05, |
|
"loss": 4.0149, |
|
"step": 876032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.564836804875381e-05, |
|
"loss": 4.0144, |
|
"step": 876544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.563998210124329e-05, |
|
"loss": 4.0162, |
|
"step": 877056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.563159615373277e-05, |
|
"loss": 4.0336, |
|
"step": 877568 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.562321020622225e-05, |
|
"loss": 4.0127, |
|
"step": 878080 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.561482425871172e-05, |
|
"loss": 4.0148, |
|
"step": 878592 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.56064383112012e-05, |
|
"loss": 4.0006, |
|
"step": 879104 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.559806874249442e-05, |
|
"loss": 4.0077, |
|
"step": 879616 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.55896827949839e-05, |
|
"loss": 4.0053, |
|
"step": 880128 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.558129684747337e-05, |
|
"loss": 4.0077, |
|
"step": 880640 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.557291089996285e-05, |
|
"loss": 4.0165, |
|
"step": 881152 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.556452495245233e-05, |
|
"loss": 4.0197, |
|
"step": 881664 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.555613900494181e-05, |
|
"loss": 4.0017, |
|
"step": 882176 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.554775305743129e-05, |
|
"loss": 4.001, |
|
"step": 882688 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.553936710992077e-05, |
|
"loss": 3.9976, |
|
"step": 883200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5530997541213986e-05, |
|
"loss": 4.0176, |
|
"step": 883712 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5522611593703466e-05, |
|
"loss": 4.0085, |
|
"step": 884224 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.551427478260414e-05, |
|
"loss": 4.0015, |
|
"step": 884736 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.550588883509362e-05, |
|
"loss": 4.0111, |
|
"step": 885248 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.54975028875831e-05, |
|
"loss": 4.0062, |
|
"step": 885760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.548911694007258e-05, |
|
"loss": 4.0058, |
|
"step": 886272 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.548073099256206e-05, |
|
"loss": 4.0014, |
|
"step": 886784 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.547236142385527e-05, |
|
"loss": 4.018, |
|
"step": 887296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.546397547634475e-05, |
|
"loss": 4.0094, |
|
"step": 887808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.545558952883423e-05, |
|
"loss": 4.0117, |
|
"step": 888320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.544720358132371e-05, |
|
"loss": 3.9988, |
|
"step": 888832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5438817633813196e-05, |
|
"loss": 4.0028, |
|
"step": 889344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.543043168630267e-05, |
|
"loss": 4.0042, |
|
"step": 889856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.542204573879215e-05, |
|
"loss": 4.013, |
|
"step": 890368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.541365979128163e-05, |
|
"loss": 4.0148, |
|
"step": 890880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.540527384377111e-05, |
|
"loss": 4.006, |
|
"step": 891392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.539688789626059e-05, |
|
"loss": 4.0111, |
|
"step": 891904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.538850194875007e-05, |
|
"loss": 4.0092, |
|
"step": 892416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.538011600123955e-05, |
|
"loss": 4.0048, |
|
"step": 892928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.537173005372903e-05, |
|
"loss": 3.996, |
|
"step": 893440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.536336048502224e-05, |
|
"loss": 3.9982, |
|
"step": 893952 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.535497453751172e-05, |
|
"loss": 3.9989, |
|
"step": 894464 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.53465885900012e-05, |
|
"loss": 4.0052, |
|
"step": 894976 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.533820264249068e-05, |
|
"loss": 3.9953, |
|
"step": 895488 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5329816694980164e-05, |
|
"loss": 3.9963, |
|
"step": 896000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5321430747469644e-05, |
|
"loss": 3.9946, |
|
"step": 896512 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5313061178762853e-05, |
|
"loss": 4.0027, |
|
"step": 897024 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5304675231252333e-05, |
|
"loss": 4.0018, |
|
"step": 897536 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.529628928374181e-05, |
|
"loss": 4.0036, |
|
"step": 898048 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.528790333623129e-05, |
|
"loss": 4.0065, |
|
"step": 898560 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.52795337675245e-05, |
|
"loss": 4.0078, |
|
"step": 899072 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.527114782001398e-05, |
|
"loss": 4.0024, |
|
"step": 899584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.526276187250346e-05, |
|
"loss": 4.0116, |
|
"step": 900096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.525437592499294e-05, |
|
"loss": 4.0053, |
|
"step": 900608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.524600635628615e-05, |
|
"loss": 4.0138, |
|
"step": 901120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.523762040877563e-05, |
|
"loss": 4.0046, |
|
"step": 901632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.522923446126512e-05, |
|
"loss": 4.0046, |
|
"step": 902144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.52208485137546e-05, |
|
"loss": 4.003, |
|
"step": 902656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.521246256624408e-05, |
|
"loss": 4.0074, |
|
"step": 903168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.520407661873356e-05, |
|
"loss": 3.9998, |
|
"step": 903680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.519569067122303e-05, |
|
"loss": 3.9988, |
|
"step": 904192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.518730472371251e-05, |
|
"loss": 3.9906, |
|
"step": 904704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.517893515500573e-05, |
|
"loss": 3.9975, |
|
"step": 905216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.517054920749521e-05, |
|
"loss": 4.0078, |
|
"step": 905728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.516216325998468e-05, |
|
"loss": 4.0017, |
|
"step": 906240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.515377731247416e-05, |
|
"loss": 4.0091, |
|
"step": 906752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.514539136496364e-05, |
|
"loss": 3.9901, |
|
"step": 907264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.513700541745312e-05, |
|
"loss": 4.0153, |
|
"step": 907776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.51286194699426e-05, |
|
"loss": 4.0105, |
|
"step": 908288 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.512023352243209e-05, |
|
"loss": 4.0105, |
|
"step": 908800 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5111847574921567e-05, |
|
"loss": 3.9978, |
|
"step": 909312 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5103461627411047e-05, |
|
"loss": 4.0051, |
|
"step": 909824 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5095075679900526e-05, |
|
"loss": 4.0139, |
|
"step": 910336 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5086689732390006e-05, |
|
"loss": 4.0133, |
|
"step": 910848 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5078303784879486e-05, |
|
"loss": 3.9995, |
|
"step": 911360 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5069934216172696e-05, |
|
"loss": 4.0005, |
|
"step": 911872 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5061548268662175e-05, |
|
"loss": 4.0062, |
|
"step": 912384 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5053162321151655e-05, |
|
"loss": 4.0055, |
|
"step": 912896 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5044776373641135e-05, |
|
"loss": 3.9976, |
|
"step": 913408 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5036406804934344e-05, |
|
"loss": 4.0022, |
|
"step": 913920 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5028020857423824e-05, |
|
"loss": 4.001, |
|
"step": 914432 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5019634909913304e-05, |
|
"loss": 4.0026, |
|
"step": 914944 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5011248962402784e-05, |
|
"loss": 4.011, |
|
"step": 915456 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.044595241546631, |
|
"eval_runtime": 295.5841, |
|
"eval_samples_per_second": 1290.973, |
|
"eval_steps_per_second": 40.344, |
|
"step": 915840 |
|
} |
|
], |
|
"logging_steps": 512, |
|
"max_steps": 3052726, |
|
"num_train_epochs": 9223372036854775807, |
|
"save_steps": 10, |
|
"total_flos": 3.780925982202824e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|