|
{ |
|
"best_metric": 4.0698771476745605, |
|
"best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/existential-there-quantifier/lstm/2/checkpoints/checkpoint-763200", |
|
"epoch": 1.0250006060157382, |
|
"eval_steps": 10, |
|
"global_step": 763200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999998362119627e-05, |
|
"loss": 10.8193, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999161405248948e-05, |
|
"loss": 7.5492, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.998322810497896e-05, |
|
"loss": 7.0645, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.997484215746844e-05, |
|
"loss": 7.002, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.996645620995792e-05, |
|
"loss": 6.9471, |
|
"step": 2048 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.99580702624474e-05, |
|
"loss": 6.9141, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.994968431493688e-05, |
|
"loss": 6.7538, |
|
"step": 3072 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.994129836742636e-05, |
|
"loss": 6.6419, |
|
"step": 3584 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.993292879871958e-05, |
|
"loss": 6.5403, |
|
"step": 4096 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.992454285120906e-05, |
|
"loss": 6.4622, |
|
"step": 4608 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.991615690369854e-05, |
|
"loss": 6.3981, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.990777095618801e-05, |
|
"loss": 6.3393, |
|
"step": 5632 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.989938500867749e-05, |
|
"loss": 6.268, |
|
"step": 6144 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.989099906116697e-05, |
|
"loss": 6.2066, |
|
"step": 6656 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.988261311365645e-05, |
|
"loss": 6.1484, |
|
"step": 7168 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.987422716614593e-05, |
|
"loss": 6.0968, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.986584121863541e-05, |
|
"loss": 6.0509, |
|
"step": 8192 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.985745527112489e-05, |
|
"loss": 6.0172, |
|
"step": 8704 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.984906932361437e-05, |
|
"loss": 5.967, |
|
"step": 9216 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.984068337610385e-05, |
|
"loss": 5.9336, |
|
"step": 9728 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.983229742859333e-05, |
|
"loss": 5.8997, |
|
"step": 10240 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.982391148108281e-05, |
|
"loss": 5.8501, |
|
"step": 10752 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.981552553357229e-05, |
|
"loss": 5.8196, |
|
"step": 11264 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.980713958606178e-05, |
|
"loss": 5.7869, |
|
"step": 11776 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9798770017354986e-05, |
|
"loss": 5.7698, |
|
"step": 12288 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9790384069844466e-05, |
|
"loss": 5.7335, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9781998122333946e-05, |
|
"loss": 5.7045, |
|
"step": 13312 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9773612174823426e-05, |
|
"loss": 5.6859, |
|
"step": 13824 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9765242606116635e-05, |
|
"loss": 5.6594, |
|
"step": 14336 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9756856658606115e-05, |
|
"loss": 5.6313, |
|
"step": 14848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9748487089899324e-05, |
|
"loss": 5.6066, |
|
"step": 15360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9740101142388804e-05, |
|
"loss": 5.5985, |
|
"step": 15872 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9731715194878284e-05, |
|
"loss": 5.5763, |
|
"step": 16384 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.97233456261715e-05, |
|
"loss": 5.5659, |
|
"step": 16896 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.971495967866098e-05, |
|
"loss": 5.5443, |
|
"step": 17408 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.970657373115046e-05, |
|
"loss": 5.525, |
|
"step": 17920 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.969818778363994e-05, |
|
"loss": 5.5193, |
|
"step": 18432 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.968980183612942e-05, |
|
"loss": 5.4807, |
|
"step": 18944 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.968143226742263e-05, |
|
"loss": 5.4701, |
|
"step": 19456 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.967304631991211e-05, |
|
"loss": 5.4515, |
|
"step": 19968 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.966466037240159e-05, |
|
"loss": 5.4446, |
|
"step": 20480 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.965627442489107e-05, |
|
"loss": 5.4206, |
|
"step": 20992 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.964790485618428e-05, |
|
"loss": 5.4259, |
|
"step": 21504 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.963951890867376e-05, |
|
"loss": 5.3917, |
|
"step": 22016 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.963113296116324e-05, |
|
"loss": 5.391, |
|
"step": 22528 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9622763392456454e-05, |
|
"loss": 5.3845, |
|
"step": 23040 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9614377444945934e-05, |
|
"loss": 5.3756, |
|
"step": 23552 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9605991497435414e-05, |
|
"loss": 5.3678, |
|
"step": 24064 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.959762192872862e-05, |
|
"loss": 5.3446, |
|
"step": 24576 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.95892359812181e-05, |
|
"loss": 5.3237, |
|
"step": 25088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.958085003370758e-05, |
|
"loss": 5.3379, |
|
"step": 25600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.957246408619706e-05, |
|
"loss": 5.3169, |
|
"step": 26112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.956407813868654e-05, |
|
"loss": 5.3024, |
|
"step": 26624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9555692191176016e-05, |
|
"loss": 5.2854, |
|
"step": 27136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9547306243665496e-05, |
|
"loss": 5.2922, |
|
"step": 27648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9538920296154976e-05, |
|
"loss": 5.272, |
|
"step": 28160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9530534348644456e-05, |
|
"loss": 5.2853, |
|
"step": 28672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9522148401133936e-05, |
|
"loss": 5.2534, |
|
"step": 29184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.951376245362342e-05, |
|
"loss": 5.2468, |
|
"step": 29696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.95053765061129e-05, |
|
"loss": 5.2473, |
|
"step": 30208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.949700693740611e-05, |
|
"loss": 5.2315, |
|
"step": 30720 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.948862098989559e-05, |
|
"loss": 5.2092, |
|
"step": 31232 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.948023504238507e-05, |
|
"loss": 5.2138, |
|
"step": 31744 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.947184909487455e-05, |
|
"loss": 5.192, |
|
"step": 32256 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.946346314736403e-05, |
|
"loss": 5.1927, |
|
"step": 32768 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.945507719985351e-05, |
|
"loss": 5.2033, |
|
"step": 33280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.944670763114672e-05, |
|
"loss": 5.1956, |
|
"step": 33792 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.94383216836362e-05, |
|
"loss": 5.177, |
|
"step": 34304 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.942995211492941e-05, |
|
"loss": 5.1571, |
|
"step": 34816 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.942156616741889e-05, |
|
"loss": 5.1516, |
|
"step": 35328 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.941318021990837e-05, |
|
"loss": 5.1625, |
|
"step": 35840 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9404794272397856e-05, |
|
"loss": 5.1509, |
|
"step": 36352 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9396408324887336e-05, |
|
"loss": 5.1476, |
|
"step": 36864 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9388022377376816e-05, |
|
"loss": 5.1455, |
|
"step": 37376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9379636429866296e-05, |
|
"loss": 5.1444, |
|
"step": 37888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9371266861159505e-05, |
|
"loss": 5.1363, |
|
"step": 38400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9362897292452714e-05, |
|
"loss": 5.1086, |
|
"step": 38912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9354511344942194e-05, |
|
"loss": 5.1124, |
|
"step": 39424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9346125397431674e-05, |
|
"loss": 5.1071, |
|
"step": 39936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9337739449921154e-05, |
|
"loss": 5.093, |
|
"step": 40448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9329353502410634e-05, |
|
"loss": 5.1114, |
|
"step": 40960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9320967554900114e-05, |
|
"loss": 5.0892, |
|
"step": 41472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9312581607389594e-05, |
|
"loss": 5.0963, |
|
"step": 41984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9304195659879074e-05, |
|
"loss": 5.0799, |
|
"step": 42496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9295809712368554e-05, |
|
"loss": 5.0572, |
|
"step": 43008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9287423764858034e-05, |
|
"loss": 5.0618, |
|
"step": 43520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9279037817347514e-05, |
|
"loss": 5.0654, |
|
"step": 44032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9270651869836994e-05, |
|
"loss": 5.0603, |
|
"step": 44544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9262265922326474e-05, |
|
"loss": 5.0547, |
|
"step": 45056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.925389635361968e-05, |
|
"loss": 5.0417, |
|
"step": 45568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.924551040610916e-05, |
|
"loss": 5.0396, |
|
"step": 46080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.923712445859864e-05, |
|
"loss": 5.0283, |
|
"step": 46592 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.922875488989185e-05, |
|
"loss": 5.0219, |
|
"step": 47104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.922036894238133e-05, |
|
"loss": 5.0194, |
|
"step": 47616 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.921198299487081e-05, |
|
"loss": 5.0117, |
|
"step": 48128 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.920359704736029e-05, |
|
"loss": 5.02, |
|
"step": 48640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.919521109984978e-05, |
|
"loss": 5.0006, |
|
"step": 49152 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.918682515233926e-05, |
|
"loss": 4.9951, |
|
"step": 49664 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.917843920482874e-05, |
|
"loss": 4.9888, |
|
"step": 50176 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.917005325731822e-05, |
|
"loss": 4.9906, |
|
"step": 50688 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.91616673098077e-05, |
|
"loss": 4.9893, |
|
"step": 51200 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.915328136229718e-05, |
|
"loss": 4.9769, |
|
"step": 51712 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.914491179359039e-05, |
|
"loss": 4.9699, |
|
"step": 52224 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.913652584607987e-05, |
|
"loss": 4.9543, |
|
"step": 52736 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.912813989856935e-05, |
|
"loss": 4.963, |
|
"step": 53248 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.911975395105883e-05, |
|
"loss": 4.9493, |
|
"step": 53760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9111384382352036e-05, |
|
"loss": 4.9441, |
|
"step": 54272 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9102998434841516e-05, |
|
"loss": 4.9462, |
|
"step": 54784 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9094612487330996e-05, |
|
"loss": 4.944, |
|
"step": 55296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9086226539820476e-05, |
|
"loss": 4.9389, |
|
"step": 55808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.907787334991742e-05, |
|
"loss": 4.9359, |
|
"step": 56320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.90694874024069e-05, |
|
"loss": 4.9245, |
|
"step": 56832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.906110145489638e-05, |
|
"loss": 4.9255, |
|
"step": 57344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.905271550738586e-05, |
|
"loss": 4.9159, |
|
"step": 57856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.904432955987534e-05, |
|
"loss": 4.9109, |
|
"step": 58368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.903594361236482e-05, |
|
"loss": 4.9194, |
|
"step": 58880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.90275576648543e-05, |
|
"loss": 4.9099, |
|
"step": 59392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.901917171734378e-05, |
|
"loss": 4.909, |
|
"step": 59904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.901080214863699e-05, |
|
"loss": 4.898, |
|
"step": 60416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.900241620112647e-05, |
|
"loss": 4.9054, |
|
"step": 60928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.899403025361595e-05, |
|
"loss": 4.9012, |
|
"step": 61440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8985660684909166e-05, |
|
"loss": 4.8819, |
|
"step": 61952 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8977274737398646e-05, |
|
"loss": 4.8966, |
|
"step": 62464 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8968888789888125e-05, |
|
"loss": 4.8841, |
|
"step": 62976 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8960502842377605e-05, |
|
"loss": 4.8852, |
|
"step": 63488 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8952116894867085e-05, |
|
"loss": 4.864, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8943730947356565e-05, |
|
"loss": 4.8568, |
|
"step": 64512 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.893534499984604e-05, |
|
"loss": 4.8546, |
|
"step": 65024 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8926975431139254e-05, |
|
"loss": 4.8574, |
|
"step": 65536 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8918589483628734e-05, |
|
"loss": 4.8465, |
|
"step": 66048 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8910203536118214e-05, |
|
"loss": 4.865, |
|
"step": 66560 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.890181758860769e-05, |
|
"loss": 4.8606, |
|
"step": 67072 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.88934480199009e-05, |
|
"loss": 4.8422, |
|
"step": 67584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.888506207239038e-05, |
|
"loss": 4.8469, |
|
"step": 68096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.887667612487986e-05, |
|
"loss": 4.842, |
|
"step": 68608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.886829017736934e-05, |
|
"loss": 4.8419, |
|
"step": 69120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.885990422985882e-05, |
|
"loss": 4.8419, |
|
"step": 69632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.88515182823483e-05, |
|
"loss": 4.8421, |
|
"step": 70144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.884313233483778e-05, |
|
"loss": 4.8245, |
|
"step": 70656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.883474638732726e-05, |
|
"loss": 4.8258, |
|
"step": 71168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.882636043981674e-05, |
|
"loss": 4.8207, |
|
"step": 71680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.881797449230622e-05, |
|
"loss": 4.8109, |
|
"step": 72192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.88095885447957e-05, |
|
"loss": 4.825, |
|
"step": 72704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.880121897608891e-05, |
|
"loss": 4.8092, |
|
"step": 73216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.879283302857839e-05, |
|
"loss": 4.7991, |
|
"step": 73728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.87844634598716e-05, |
|
"loss": 4.8154, |
|
"step": 74240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.877607751236109e-05, |
|
"loss": 4.7976, |
|
"step": 74752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.87677079436543e-05, |
|
"loss": 4.7951, |
|
"step": 75264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.875932199614378e-05, |
|
"loss": 4.799, |
|
"step": 75776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.875093604863326e-05, |
|
"loss": 4.7913, |
|
"step": 76288 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.7542266845703125, |
|
"eval_runtime": 305.3018, |
|
"eval_samples_per_second": 1249.881, |
|
"eval_steps_per_second": 39.06, |
|
"step": 76320 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.874255010112274e-05, |
|
"loss": 4.7754, |
|
"step": 76800 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.873416415361222e-05, |
|
"loss": 4.7866, |
|
"step": 77312 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.87257782061017e-05, |
|
"loss": 4.8014, |
|
"step": 77824 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.871739225859118e-05, |
|
"loss": 4.7759, |
|
"step": 78336 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8709006311080657e-05, |
|
"loss": 4.7881, |
|
"step": 78848 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8700620363570137e-05, |
|
"loss": 4.7614, |
|
"step": 79360 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8692234416059616e-05, |
|
"loss": 4.7783, |
|
"step": 79872 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8683848468549096e-05, |
|
"loss": 4.7561, |
|
"step": 80384 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8675478899842306e-05, |
|
"loss": 4.7648, |
|
"step": 80896 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8667092952331785e-05, |
|
"loss": 4.765, |
|
"step": 81408 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.865870700482127e-05, |
|
"loss": 4.769, |
|
"step": 81920 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.865032105731075e-05, |
|
"loss": 4.7632, |
|
"step": 82432 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8641935109800225e-05, |
|
"loss": 4.7461, |
|
"step": 82944 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8633549162289705e-05, |
|
"loss": 4.7442, |
|
"step": 83456 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8625163214779185e-05, |
|
"loss": 4.7382, |
|
"step": 83968 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8616777267268665e-05, |
|
"loss": 4.7423, |
|
"step": 84480 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8608391319758145e-05, |
|
"loss": 4.7559, |
|
"step": 84992 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8600021751051354e-05, |
|
"loss": 4.7366, |
|
"step": 85504 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8591635803540834e-05, |
|
"loss": 4.7437, |
|
"step": 86016 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8583249856030314e-05, |
|
"loss": 4.7549, |
|
"step": 86528 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8574863908519794e-05, |
|
"loss": 4.7309, |
|
"step": 87040 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.856649433981301e-05, |
|
"loss": 4.726, |
|
"step": 87552 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.855810839230249e-05, |
|
"loss": 4.7266, |
|
"step": 88064 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.854972244479197e-05, |
|
"loss": 4.7373, |
|
"step": 88576 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.854133649728145e-05, |
|
"loss": 4.7181, |
|
"step": 89088 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.853295054977093e-05, |
|
"loss": 4.7208, |
|
"step": 89600 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.852458098106414e-05, |
|
"loss": 4.718, |
|
"step": 90112 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.851619503355362e-05, |
|
"loss": 4.717, |
|
"step": 90624 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.85078090860431e-05, |
|
"loss": 4.6982, |
|
"step": 91136 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.849942313853258e-05, |
|
"loss": 4.7036, |
|
"step": 91648 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.849103719102206e-05, |
|
"loss": 4.7006, |
|
"step": 92160 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.848265124351154e-05, |
|
"loss": 4.7079, |
|
"step": 92672 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.847426529600102e-05, |
|
"loss": 4.7124, |
|
"step": 93184 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.846589572729423e-05, |
|
"loss": 4.6995, |
|
"step": 93696 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.845750977978371e-05, |
|
"loss": 4.6965, |
|
"step": 94208 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8449123832273194e-05, |
|
"loss": 4.7056, |
|
"step": 94720 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8440737884762674e-05, |
|
"loss": 4.6788, |
|
"step": 95232 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8432351937252154e-05, |
|
"loss": 4.6874, |
|
"step": 95744 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8423965989741634e-05, |
|
"loss": 4.6753, |
|
"step": 96256 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8415580042231114e-05, |
|
"loss": 4.6816, |
|
"step": 96768 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.840719409472059e-05, |
|
"loss": 4.674, |
|
"step": 97280 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.839880814721007e-05, |
|
"loss": 4.6878, |
|
"step": 97792 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.839043857850328e-05, |
|
"loss": 4.6702, |
|
"step": 98304 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.838205263099276e-05, |
|
"loss": 4.6765, |
|
"step": 98816 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8373666683482236e-05, |
|
"loss": 4.6707, |
|
"step": 99328 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8365280735971716e-05, |
|
"loss": 4.676, |
|
"step": 99840 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8356894788461196e-05, |
|
"loss": 4.6713, |
|
"step": 100352 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8348508840950676e-05, |
|
"loss": 4.6614, |
|
"step": 100864 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8340122893440156e-05, |
|
"loss": 4.649, |
|
"step": 101376 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.833175332473337e-05, |
|
"loss": 4.673, |
|
"step": 101888 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.832336737722285e-05, |
|
"loss": 4.6592, |
|
"step": 102400 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.831498142971233e-05, |
|
"loss": 4.6515, |
|
"step": 102912 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.830659548220181e-05, |
|
"loss": 4.6462, |
|
"step": 103424 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.829820953469129e-05, |
|
"loss": 4.6539, |
|
"step": 103936 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.82898399659845e-05, |
|
"loss": 4.6432, |
|
"step": 104448 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.828145401847398e-05, |
|
"loss": 4.6612, |
|
"step": 104960 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.827306807096346e-05, |
|
"loss": 4.6428, |
|
"step": 105472 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.826468212345294e-05, |
|
"loss": 4.64, |
|
"step": 105984 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.825629617594242e-05, |
|
"loss": 4.6505, |
|
"step": 106496 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.82479102284319e-05, |
|
"loss": 4.6292, |
|
"step": 107008 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.823952428092138e-05, |
|
"loss": 4.6244, |
|
"step": 107520 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.823113833341086e-05, |
|
"loss": 4.635, |
|
"step": 108032 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8222768764704077e-05, |
|
"loss": 4.6161, |
|
"step": 108544 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8214399195997286e-05, |
|
"loss": 4.6236, |
|
"step": 109056 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8206013248486766e-05, |
|
"loss": 4.6339, |
|
"step": 109568 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8197627300976246e-05, |
|
"loss": 4.6395, |
|
"step": 110080 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8189257732269455e-05, |
|
"loss": 4.6243, |
|
"step": 110592 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8180871784758935e-05, |
|
"loss": 4.6161, |
|
"step": 111104 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8172502216052144e-05, |
|
"loss": 4.6073, |
|
"step": 111616 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8164116268541624e-05, |
|
"loss": 4.6283, |
|
"step": 112128 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8155730321031104e-05, |
|
"loss": 4.6211, |
|
"step": 112640 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8147344373520584e-05, |
|
"loss": 4.6226, |
|
"step": 113152 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8138958426010064e-05, |
|
"loss": 4.6207, |
|
"step": 113664 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.813057247849955e-05, |
|
"loss": 4.629, |
|
"step": 114176 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.812218653098903e-05, |
|
"loss": 4.6238, |
|
"step": 114688 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.811380058347851e-05, |
|
"loss": 4.6028, |
|
"step": 115200 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.810541463596799e-05, |
|
"loss": 4.6123, |
|
"step": 115712 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.809702868845747e-05, |
|
"loss": 4.6107, |
|
"step": 116224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.808864274094695e-05, |
|
"loss": 4.5939, |
|
"step": 116736 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.808025679343642e-05, |
|
"loss": 4.6228, |
|
"step": 117248 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.80718708459259e-05, |
|
"loss": 4.6071, |
|
"step": 117760 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.806348489841538e-05, |
|
"loss": 4.6119, |
|
"step": 118272 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.80551153297086e-05, |
|
"loss": 4.6095, |
|
"step": 118784 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.804672938219807e-05, |
|
"loss": 4.5837, |
|
"step": 119296 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.803835981349129e-05, |
|
"loss": 4.5998, |
|
"step": 119808 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.802997386598077e-05, |
|
"loss": 4.5992, |
|
"step": 120320 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.802158791847025e-05, |
|
"loss": 4.5989, |
|
"step": 120832 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.801320197095973e-05, |
|
"loss": 4.5975, |
|
"step": 121344 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.800481602344921e-05, |
|
"loss": 4.5933, |
|
"step": 121856 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.799643007593869e-05, |
|
"loss": 4.5922, |
|
"step": 122368 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.798804412842817e-05, |
|
"loss": 4.5905, |
|
"step": 122880 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.797965818091765e-05, |
|
"loss": 4.5755, |
|
"step": 123392 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.797127223340713e-05, |
|
"loss": 4.5825, |
|
"step": 123904 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.796288628589661e-05, |
|
"loss": 4.5849, |
|
"step": 124416 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.795450033838609e-05, |
|
"loss": 4.5893, |
|
"step": 124928 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.79461307696793e-05, |
|
"loss": 4.5769, |
|
"step": 125440 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.793774482216878e-05, |
|
"loss": 4.5694, |
|
"step": 125952 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.792935887465826e-05, |
|
"loss": 4.57, |
|
"step": 126464 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.792097292714774e-05, |
|
"loss": 4.5816, |
|
"step": 126976 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7912586979637217e-05, |
|
"loss": 4.5785, |
|
"step": 127488 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7904201032126697e-05, |
|
"loss": 4.5719, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.789581508461618e-05, |
|
"loss": 4.5632, |
|
"step": 128512 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.788742913710566e-05, |
|
"loss": 4.557, |
|
"step": 129024 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.787905956839887e-05, |
|
"loss": 4.5668, |
|
"step": 129536 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.787067362088835e-05, |
|
"loss": 4.5523, |
|
"step": 130048 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.786228767337783e-05, |
|
"loss": 4.557, |
|
"step": 130560 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.785390172586731e-05, |
|
"loss": 4.5582, |
|
"step": 131072 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7845515778356785e-05, |
|
"loss": 4.5585, |
|
"step": 131584 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.783714620965e-05, |
|
"loss": 4.5573, |
|
"step": 132096 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.782876026213948e-05, |
|
"loss": 4.5602, |
|
"step": 132608 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.782037431462896e-05, |
|
"loss": 4.549, |
|
"step": 133120 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7811988367118434e-05, |
|
"loss": 4.5513, |
|
"step": 133632 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.780360241960792e-05, |
|
"loss": 4.5479, |
|
"step": 134144 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.779523285090114e-05, |
|
"loss": 4.5426, |
|
"step": 134656 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.778684690339061e-05, |
|
"loss": 4.5576, |
|
"step": 135168 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.777846095588009e-05, |
|
"loss": 4.5527, |
|
"step": 135680 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.777007500836957e-05, |
|
"loss": 4.5517, |
|
"step": 136192 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.776168906085905e-05, |
|
"loss": 4.5423, |
|
"step": 136704 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.775331949215226e-05, |
|
"loss": 4.549, |
|
"step": 137216 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.774493354464174e-05, |
|
"loss": 4.5478, |
|
"step": 137728 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.773654759713122e-05, |
|
"loss": 4.5354, |
|
"step": 138240 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.77281616496207e-05, |
|
"loss": 4.5538, |
|
"step": 138752 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.771979208091391e-05, |
|
"loss": 4.5402, |
|
"step": 139264 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.771140613340339e-05, |
|
"loss": 4.5434, |
|
"step": 139776 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7703020185892875e-05, |
|
"loss": 4.5264, |
|
"step": 140288 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7694634238382355e-05, |
|
"loss": 4.5276, |
|
"step": 140800 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7686248290871835e-05, |
|
"loss": 4.5213, |
|
"step": 141312 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7677862343361315e-05, |
|
"loss": 4.5242, |
|
"step": 141824 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7669476395850795e-05, |
|
"loss": 4.5212, |
|
"step": 142336 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7661090448340274e-05, |
|
"loss": 4.5369, |
|
"step": 142848 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7652704500829754e-05, |
|
"loss": 4.5394, |
|
"step": 143360 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.764435131092669e-05, |
|
"loss": 4.5277, |
|
"step": 143872 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.763596536341617e-05, |
|
"loss": 4.5213, |
|
"step": 144384 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.762757941590565e-05, |
|
"loss": 4.5263, |
|
"step": 144896 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.761919346839513e-05, |
|
"loss": 4.5266, |
|
"step": 145408 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.761080752088461e-05, |
|
"loss": 4.5347, |
|
"step": 145920 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.760242157337409e-05, |
|
"loss": 4.5289, |
|
"step": 146432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.759403562586357e-05, |
|
"loss": 4.5215, |
|
"step": 146944 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.758564967835306e-05, |
|
"loss": 4.5136, |
|
"step": 147456 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.757726373084254e-05, |
|
"loss": 4.5216, |
|
"step": 147968 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.756887778333202e-05, |
|
"loss": 4.5135, |
|
"step": 148480 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.756050821462523e-05, |
|
"loss": 4.5187, |
|
"step": 148992 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.755212226711471e-05, |
|
"loss": 4.5139, |
|
"step": 149504 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.754373631960419e-05, |
|
"loss": 4.5094, |
|
"step": 150016 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.753535037209367e-05, |
|
"loss": 4.5215, |
|
"step": 150528 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.752696442458315e-05, |
|
"loss": 4.509, |
|
"step": 151040 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.751857847707262e-05, |
|
"loss": 4.5043, |
|
"step": 151552 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.75101925295621e-05, |
|
"loss": 4.5076, |
|
"step": 152064 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.750180658205158e-05, |
|
"loss": 4.5039, |
|
"step": 152576 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.4751152992248535, |
|
"eval_runtime": 300.7316, |
|
"eval_samples_per_second": 1268.876, |
|
"eval_steps_per_second": 39.653, |
|
"step": 152640 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.749342063454106e-05, |
|
"loss": 4.4902, |
|
"step": 153088 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.748503468703054e-05, |
|
"loss": 4.5029, |
|
"step": 153600 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.747664873952003e-05, |
|
"loss": 4.5188, |
|
"step": 154112 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.746826279200951e-05, |
|
"loss": 4.498, |
|
"step": 154624 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.745987684449899e-05, |
|
"loss": 4.5092, |
|
"step": 155136 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.745149089698847e-05, |
|
"loss": 4.4903, |
|
"step": 155648 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.744310494947795e-05, |
|
"loss": 4.5046, |
|
"step": 156160 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.743471900196743e-05, |
|
"loss": 4.48, |
|
"step": 156672 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.742633305445691e-05, |
|
"loss": 4.4979, |
|
"step": 157184 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.741794710694639e-05, |
|
"loss": 4.4964, |
|
"step": 157696 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.740956115943586e-05, |
|
"loss": 4.4971, |
|
"step": 158208 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.740117521192534e-05, |
|
"loss": 4.505, |
|
"step": 158720 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7392805643218556e-05, |
|
"loss": 4.4806, |
|
"step": 159232 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7384419695708036e-05, |
|
"loss": 4.4802, |
|
"step": 159744 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.737603374819751e-05, |
|
"loss": 4.4779, |
|
"step": 160256 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.736764780068699e-05, |
|
"loss": 4.4864, |
|
"step": 160768 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7359261853176476e-05, |
|
"loss": 4.4925, |
|
"step": 161280 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7350875905665956e-05, |
|
"loss": 4.4838, |
|
"step": 161792 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7342489958155436e-05, |
|
"loss": 4.488, |
|
"step": 162304 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7334104010644916e-05, |
|
"loss": 4.5031, |
|
"step": 162816 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7325718063134396e-05, |
|
"loss": 4.4811, |
|
"step": 163328 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7317332115623876e-05, |
|
"loss": 4.4741, |
|
"step": 163840 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7308946168113356e-05, |
|
"loss": 4.4801, |
|
"step": 164352 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7300576599406565e-05, |
|
"loss": 4.4903, |
|
"step": 164864 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7292190651896045e-05, |
|
"loss": 4.4735, |
|
"step": 165376 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7283804704385525e-05, |
|
"loss": 4.4747, |
|
"step": 165888 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7275418756875005e-05, |
|
"loss": 4.4739, |
|
"step": 166400 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7267049188168214e-05, |
|
"loss": 4.4738, |
|
"step": 166912 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7258663240657694e-05, |
|
"loss": 4.4586, |
|
"step": 167424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7250277293147174e-05, |
|
"loss": 4.4651, |
|
"step": 167936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.724190772444039e-05, |
|
"loss": 4.4669, |
|
"step": 168448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.72335381557336e-05, |
|
"loss": 4.4683, |
|
"step": 168960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.722515220822308e-05, |
|
"loss": 4.4806, |
|
"step": 169472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.721676626071256e-05, |
|
"loss": 4.4614, |
|
"step": 169984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.720838031320204e-05, |
|
"loss": 4.4646, |
|
"step": 170496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.719999436569152e-05, |
|
"loss": 4.472, |
|
"step": 171008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7191608418181e-05, |
|
"loss": 4.4545, |
|
"step": 171520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.718322247067048e-05, |
|
"loss": 4.4537, |
|
"step": 172032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.717483652315996e-05, |
|
"loss": 4.4503, |
|
"step": 172544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.716645057564944e-05, |
|
"loss": 4.4558, |
|
"step": 173056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.715806462813892e-05, |
|
"loss": 4.4467, |
|
"step": 173568 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.714969505943213e-05, |
|
"loss": 4.4659, |
|
"step": 174080 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7141309111921614e-05, |
|
"loss": 4.4436, |
|
"step": 174592 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7132923164411094e-05, |
|
"loss": 4.4603, |
|
"step": 175104 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7124537216900574e-05, |
|
"loss": 4.451, |
|
"step": 175616 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.711616764819378e-05, |
|
"loss": 4.4524, |
|
"step": 176128 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.710778170068326e-05, |
|
"loss": 4.4563, |
|
"step": 176640 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.709939575317274e-05, |
|
"loss": 4.4415, |
|
"step": 177152 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.709100980566222e-05, |
|
"loss": 4.4333, |
|
"step": 177664 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7082623858151696e-05, |
|
"loss": 4.4575, |
|
"step": 178176 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7074237910641176e-05, |
|
"loss": 4.4479, |
|
"step": 178688 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7065851963130656e-05, |
|
"loss": 4.4375, |
|
"step": 179200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7057466015620136e-05, |
|
"loss": 4.4397, |
|
"step": 179712 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.704909644691335e-05, |
|
"loss": 4.4458, |
|
"step": 180224 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.704071049940283e-05, |
|
"loss": 4.4294, |
|
"step": 180736 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.703232455189231e-05, |
|
"loss": 4.4499, |
|
"step": 181248 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.702393860438179e-05, |
|
"loss": 4.4353, |
|
"step": 181760 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7015569035675e-05, |
|
"loss": 4.433, |
|
"step": 182272 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.700718308816448e-05, |
|
"loss": 4.446, |
|
"step": 182784 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.699879714065396e-05, |
|
"loss": 4.4254, |
|
"step": 183296 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.699041119314344e-05, |
|
"loss": 4.4227, |
|
"step": 183808 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.698204162443665e-05, |
|
"loss": 4.4353, |
|
"step": 184320 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.697365567692613e-05, |
|
"loss": 4.418, |
|
"step": 184832 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.696526972941561e-05, |
|
"loss": 4.4217, |
|
"step": 185344 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.695688378190509e-05, |
|
"loss": 4.4349, |
|
"step": 185856 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.694849783439457e-05, |
|
"loss": 4.4392, |
|
"step": 186368 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.694011188688405e-05, |
|
"loss": 4.4227, |
|
"step": 186880 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.693172593937353e-05, |
|
"loss": 4.4215, |
|
"step": 187392 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6923356370666746e-05, |
|
"loss": 4.41, |
|
"step": 187904 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6914970423156226e-05, |
|
"loss": 4.4345, |
|
"step": 188416 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6906584475645706e-05, |
|
"loss": 4.4252, |
|
"step": 188928 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6898198528135186e-05, |
|
"loss": 4.4321, |
|
"step": 189440 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6889812580624665e-05, |
|
"loss": 4.4262, |
|
"step": 189952 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6881426633114145e-05, |
|
"loss": 4.4357, |
|
"step": 190464 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6873040685603625e-05, |
|
"loss": 4.436, |
|
"step": 190976 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6864671116896834e-05, |
|
"loss": 4.41, |
|
"step": 191488 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6856285169386314e-05, |
|
"loss": 4.4224, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6847899221875794e-05, |
|
"loss": 4.427, |
|
"step": 192512 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6839513274365274e-05, |
|
"loss": 4.4073, |
|
"step": 193024 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6831143705658483e-05, |
|
"loss": 4.4333, |
|
"step": 193536 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.682275775814797e-05, |
|
"loss": 4.4229, |
|
"step": 194048 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.681437181063745e-05, |
|
"loss": 4.4277, |
|
"step": 194560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.680598586312693e-05, |
|
"loss": 4.4274, |
|
"step": 195072 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.679759991561641e-05, |
|
"loss": 4.4019, |
|
"step": 195584 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.678921396810588e-05, |
|
"loss": 4.4177, |
|
"step": 196096 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.678082802059536e-05, |
|
"loss": 4.4166, |
|
"step": 196608 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.677244207308484e-05, |
|
"loss": 4.4209, |
|
"step": 197120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.676407250437806e-05, |
|
"loss": 4.4153, |
|
"step": 197632 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.675570293567127e-05, |
|
"loss": 4.4163, |
|
"step": 198144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.674731698816075e-05, |
|
"loss": 4.413, |
|
"step": 198656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.673893104065023e-05, |
|
"loss": 4.4119, |
|
"step": 199168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.673054509313971e-05, |
|
"loss": 4.3993, |
|
"step": 199680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6722175524432924e-05, |
|
"loss": 4.4061, |
|
"step": 200192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6713789576922404e-05, |
|
"loss": 4.4115, |
|
"step": 200704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6705403629411884e-05, |
|
"loss": 4.414, |
|
"step": 201216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.669701768190136e-05, |
|
"loss": 4.405, |
|
"step": 201728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.668863173439084e-05, |
|
"loss": 4.3936, |
|
"step": 202240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.668024578688032e-05, |
|
"loss": 4.3984, |
|
"step": 202752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.66718598393698e-05, |
|
"loss": 4.4094, |
|
"step": 203264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6663490270663006e-05, |
|
"loss": 4.4033, |
|
"step": 203776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6655104323152486e-05, |
|
"loss": 4.4018, |
|
"step": 204288 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6646718375641966e-05, |
|
"loss": 4.3973, |
|
"step": 204800 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6638332428131446e-05, |
|
"loss": 4.3856, |
|
"step": 205312 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6629946480620926e-05, |
|
"loss": 4.4029, |
|
"step": 205824 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6621560533110406e-05, |
|
"loss": 4.3806, |
|
"step": 206336 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.661317458559989e-05, |
|
"loss": 4.3945, |
|
"step": 206848 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.66048050168931e-05, |
|
"loss": 4.3852, |
|
"step": 207360 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.659641906938258e-05, |
|
"loss": 4.4008, |
|
"step": 207872 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.658803312187206e-05, |
|
"loss": 4.3877, |
|
"step": 208384 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.657964717436154e-05, |
|
"loss": 4.3982, |
|
"step": 208896 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.657127760565475e-05, |
|
"loss": 4.3862, |
|
"step": 209408 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.656290803694796e-05, |
|
"loss": 4.3842, |
|
"step": 209920 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.655452208943744e-05, |
|
"loss": 4.392, |
|
"step": 210432 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.654613614192692e-05, |
|
"loss": 4.3783, |
|
"step": 210944 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.65377501944164e-05, |
|
"loss": 4.3983, |
|
"step": 211456 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6529380625709615e-05, |
|
"loss": 4.3929, |
|
"step": 211968 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6520994678199095e-05, |
|
"loss": 4.3932, |
|
"step": 212480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6512608730688575e-05, |
|
"loss": 4.3831, |
|
"step": 212992 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6504222783178055e-05, |
|
"loss": 4.389, |
|
"step": 213504 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6495836835667535e-05, |
|
"loss": 4.3933, |
|
"step": 214016 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6487467266960744e-05, |
|
"loss": 4.3733, |
|
"step": 214528 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6479081319450224e-05, |
|
"loss": 4.3967, |
|
"step": 215040 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6470695371939704e-05, |
|
"loss": 4.3854, |
|
"step": 215552 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6462309424429184e-05, |
|
"loss": 4.3857, |
|
"step": 216064 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6453923476918664e-05, |
|
"loss": 4.3757, |
|
"step": 216576 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6445537529408144e-05, |
|
"loss": 4.3776, |
|
"step": 217088 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.643716796070135e-05, |
|
"loss": 4.3632, |
|
"step": 217600 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.642878201319083e-05, |
|
"loss": 4.3743, |
|
"step": 218112 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.642039606568031e-05, |
|
"loss": 4.37, |
|
"step": 218624 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.641202649697353e-05, |
|
"loss": 4.3845, |
|
"step": 219136 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.640364054946301e-05, |
|
"loss": 4.387, |
|
"step": 219648 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.639525460195249e-05, |
|
"loss": 4.3773, |
|
"step": 220160 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.638686865444197e-05, |
|
"loss": 4.3721, |
|
"step": 220672 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.637848270693145e-05, |
|
"loss": 4.3741, |
|
"step": 221184 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.637009675942093e-05, |
|
"loss": 4.3784, |
|
"step": 221696 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.636171081191041e-05, |
|
"loss": 4.3838, |
|
"step": 222208 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.635332486439989e-05, |
|
"loss": 4.3785, |
|
"step": 222720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.634493891688936e-05, |
|
"loss": 4.3695, |
|
"step": 223232 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.633655296937884e-05, |
|
"loss": 4.3736, |
|
"step": 223744 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.632816702186832e-05, |
|
"loss": 4.3762, |
|
"step": 224256 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.63197810743578e-05, |
|
"loss": 4.3651, |
|
"step": 224768 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.631141150565102e-05, |
|
"loss": 4.3739, |
|
"step": 225280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.63030255581405e-05, |
|
"loss": 4.3682, |
|
"step": 225792 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.629463961062998e-05, |
|
"loss": 4.3651, |
|
"step": 226304 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.628625366311946e-05, |
|
"loss": 4.3786, |
|
"step": 226816 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6277884094412666e-05, |
|
"loss": 4.366, |
|
"step": 227328 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6269498146902146e-05, |
|
"loss": 4.3607, |
|
"step": 227840 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.626112857819536e-05, |
|
"loss": 4.3642, |
|
"step": 228352 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6252742630684835e-05, |
|
"loss": 4.357, |
|
"step": 228864 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.340610504150391, |
|
"eval_runtime": 301.3171, |
|
"eval_samples_per_second": 1266.41, |
|
"eval_steps_per_second": 39.576, |
|
"step": 228960 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6244356683174315e-05, |
|
"loss": 4.3551, |
|
"step": 229376 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6235970735663795e-05, |
|
"loss": 4.3574, |
|
"step": 229888 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.622760116695701e-05, |
|
"loss": 4.3778, |
|
"step": 230400 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6219215219446484e-05, |
|
"loss": 4.3581, |
|
"step": 230912 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.621082927193597e-05, |
|
"loss": 4.375, |
|
"step": 231424 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.620244332442545e-05, |
|
"loss": 4.3486, |
|
"step": 231936 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.619405737691493e-05, |
|
"loss": 4.3659, |
|
"step": 232448 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.618567142940441e-05, |
|
"loss": 4.3423, |
|
"step": 232960 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.617728548189389e-05, |
|
"loss": 4.3614, |
|
"step": 233472 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.616889953438337e-05, |
|
"loss": 4.3626, |
|
"step": 233984 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.616052996567658e-05, |
|
"loss": 4.3588, |
|
"step": 234496 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.615214401816606e-05, |
|
"loss": 4.3655, |
|
"step": 235008 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.614377444945927e-05, |
|
"loss": 4.3457, |
|
"step": 235520 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.613538850194875e-05, |
|
"loss": 4.3472, |
|
"step": 236032 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.612700255443823e-05, |
|
"loss": 4.3454, |
|
"step": 236544 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.611861660692771e-05, |
|
"loss": 4.3479, |
|
"step": 237056 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.611023065941719e-05, |
|
"loss": 4.3582, |
|
"step": 237568 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.610184471190667e-05, |
|
"loss": 4.3524, |
|
"step": 238080 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6093458764396156e-05, |
|
"loss": 4.3532, |
|
"step": 238592 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6085072816885636e-05, |
|
"loss": 4.3678, |
|
"step": 239104 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6076703248178845e-05, |
|
"loss": 4.3511, |
|
"step": 239616 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6068317300668325e-05, |
|
"loss": 4.3481, |
|
"step": 240128 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6059947731961534e-05, |
|
"loss": 4.3507, |
|
"step": 240640 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6051561784451014e-05, |
|
"loss": 4.357, |
|
"step": 241152 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6043175836940494e-05, |
|
"loss": 4.3424, |
|
"step": 241664 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6034789889429974e-05, |
|
"loss": 4.3458, |
|
"step": 242176 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6026403941919454e-05, |
|
"loss": 4.3462, |
|
"step": 242688 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6018017994408933e-05, |
|
"loss": 4.3396, |
|
"step": 243200 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6009632046898413e-05, |
|
"loss": 4.3332, |
|
"step": 243712 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.6001246099387893e-05, |
|
"loss": 4.3361, |
|
"step": 244224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.599286015187737e-05, |
|
"loss": 4.3426, |
|
"step": 244736 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.598449058317059e-05, |
|
"loss": 4.341, |
|
"step": 245248 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.597610463566007e-05, |
|
"loss": 4.3527, |
|
"step": 245760 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.596771868814955e-05, |
|
"loss": 4.3411, |
|
"step": 246272 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.595934911944276e-05, |
|
"loss": 4.3333, |
|
"step": 246784 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.595096317193224e-05, |
|
"loss": 4.3428, |
|
"step": 247296 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.594257722442172e-05, |
|
"loss": 4.336, |
|
"step": 247808 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.59341912769112e-05, |
|
"loss": 4.3253, |
|
"step": 248320 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.592580532940067e-05, |
|
"loss": 4.3296, |
|
"step": 248832 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.591741938189015e-05, |
|
"loss": 4.3286, |
|
"step": 249344 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.590903343437963e-05, |
|
"loss": 4.3255, |
|
"step": 249856 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.590064748686911e-05, |
|
"loss": 4.3427, |
|
"step": 250368 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.589227791816233e-05, |
|
"loss": 4.3245, |
|
"step": 250880 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.588389197065181e-05, |
|
"loss": 4.3376, |
|
"step": 251392 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.587550602314129e-05, |
|
"loss": 4.334, |
|
"step": 251904 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.586712007563077e-05, |
|
"loss": 4.3273, |
|
"step": 252416 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.585873412812025e-05, |
|
"loss": 4.3361, |
|
"step": 252928 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.585034818060973e-05, |
|
"loss": 4.3204, |
|
"step": 253440 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.584199499070667e-05, |
|
"loss": 4.3064, |
|
"step": 253952 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5833609043196145e-05, |
|
"loss": 4.3404, |
|
"step": 254464 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5825223095685625e-05, |
|
"loss": 4.3273, |
|
"step": 254976 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5816837148175105e-05, |
|
"loss": 4.3223, |
|
"step": 255488 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5808451200664585e-05, |
|
"loss": 4.3202, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5800065253154065e-05, |
|
"loss": 4.3243, |
|
"step": 256512 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5791679305643545e-05, |
|
"loss": 4.3139, |
|
"step": 257024 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5783293358133025e-05, |
|
"loss": 4.3272, |
|
"step": 257536 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.577492378942624e-05, |
|
"loss": 4.3231, |
|
"step": 258048 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.576653784191572e-05, |
|
"loss": 4.3182, |
|
"step": 258560 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.575816827320893e-05, |
|
"loss": 4.3281, |
|
"step": 259072 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.574978232569841e-05, |
|
"loss": 4.3088, |
|
"step": 259584 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.574139637818789e-05, |
|
"loss": 4.3056, |
|
"step": 260096 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.573301043067737e-05, |
|
"loss": 4.3244, |
|
"step": 260608 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.572462448316685e-05, |
|
"loss": 4.2981, |
|
"step": 261120 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.571623853565633e-05, |
|
"loss": 4.3035, |
|
"step": 261632 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.570785258814581e-05, |
|
"loss": 4.3228, |
|
"step": 262144 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5699499398242754e-05, |
|
"loss": 4.3258, |
|
"step": 262656 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5691113450732234e-05, |
|
"loss": 4.3051, |
|
"step": 263168 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5682727503221714e-05, |
|
"loss": 4.3143, |
|
"step": 263680 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5674341555711194e-05, |
|
"loss": 4.2959, |
|
"step": 264192 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5665955608200674e-05, |
|
"loss": 4.3209, |
|
"step": 264704 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5657569660690154e-05, |
|
"loss": 4.3127, |
|
"step": 265216 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5649183713179634e-05, |
|
"loss": 4.3211, |
|
"step": 265728 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5640797765669114e-05, |
|
"loss": 4.3115, |
|
"step": 266240 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5632411818158594e-05, |
|
"loss": 4.3221, |
|
"step": 266752 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5624025870648074e-05, |
|
"loss": 4.3261, |
|
"step": 267264 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.561565630194128e-05, |
|
"loss": 4.2982, |
|
"step": 267776 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.560728673323449e-05, |
|
"loss": 4.3154, |
|
"step": 268288 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.559890078572397e-05, |
|
"loss": 4.3169, |
|
"step": 268800 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.559051483821345e-05, |
|
"loss": 4.2921, |
|
"step": 269312 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.558212889070293e-05, |
|
"loss": 4.3253, |
|
"step": 269824 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.557374294319242e-05, |
|
"loss": 4.3135, |
|
"step": 270336 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.55653569956819e-05, |
|
"loss": 4.3153, |
|
"step": 270848 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.555697104817138e-05, |
|
"loss": 4.3218, |
|
"step": 271360 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.554858510066086e-05, |
|
"loss": 4.2975, |
|
"step": 271872 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.554019915315033e-05, |
|
"loss": 4.3029, |
|
"step": 272384 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.553181320563981e-05, |
|
"loss": 4.3086, |
|
"step": 272896 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.552342725812929e-05, |
|
"loss": 4.3138, |
|
"step": 273408 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.551504131061877e-05, |
|
"loss": 4.3053, |
|
"step": 273920 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.550665536310825e-05, |
|
"loss": 4.3105, |
|
"step": 274432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.549828579440146e-05, |
|
"loss": 4.3027, |
|
"step": 274944 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.548991622569468e-05, |
|
"loss": 4.3118, |
|
"step": 275456 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5481530278184157e-05, |
|
"loss": 4.2933, |
|
"step": 275968 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5473144330673637e-05, |
|
"loss": 4.3005, |
|
"step": 276480 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5464758383163116e-05, |
|
"loss": 4.3045, |
|
"step": 276992 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.545638881445633e-05, |
|
"loss": 4.3098, |
|
"step": 277504 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5448002866945806e-05, |
|
"loss": 4.3023, |
|
"step": 278016 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5439616919435286e-05, |
|
"loss": 4.2882, |
|
"step": 278528 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5431230971924765e-05, |
|
"loss": 4.2939, |
|
"step": 279040 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.542286140321798e-05, |
|
"loss": 4.3028, |
|
"step": 279552 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5414475455707455e-05, |
|
"loss": 4.3037, |
|
"step": 280064 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5406089508196934e-05, |
|
"loss": 4.3017, |
|
"step": 280576 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5397703560686414e-05, |
|
"loss": 4.2916, |
|
"step": 281088 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5389317613175894e-05, |
|
"loss": 4.2849, |
|
"step": 281600 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.538094804446911e-05, |
|
"loss": 4.2985, |
|
"step": 282112 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.537256209695859e-05, |
|
"loss": 4.2809, |
|
"step": 282624 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.536417614944807e-05, |
|
"loss": 4.2904, |
|
"step": 283136 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.535579020193755e-05, |
|
"loss": 4.2833, |
|
"step": 283648 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.534740425442703e-05, |
|
"loss": 4.2981, |
|
"step": 284160 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.533901830691651e-05, |
|
"loss": 4.2878, |
|
"step": 284672 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.533063235940599e-05, |
|
"loss": 4.3018, |
|
"step": 285184 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.532224641189547e-05, |
|
"loss": 4.2812, |
|
"step": 285696 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.531389322199241e-05, |
|
"loss": 4.2864, |
|
"step": 286208 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.530550727448189e-05, |
|
"loss": 4.2882, |
|
"step": 286720 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.529712132697137e-05, |
|
"loss": 4.2829, |
|
"step": 287232 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.528873537946085e-05, |
|
"loss": 4.2938, |
|
"step": 287744 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5280365810754064e-05, |
|
"loss": 4.2979, |
|
"step": 288256 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5271979863243544e-05, |
|
"loss": 4.293, |
|
"step": 288768 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5263593915733024e-05, |
|
"loss": 4.2843, |
|
"step": 289280 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5255207968222504e-05, |
|
"loss": 4.2886, |
|
"step": 289792 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5246822020711984e-05, |
|
"loss": 4.2958, |
|
"step": 290304 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5238436073201464e-05, |
|
"loss": 4.2751, |
|
"step": 290816 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5230050125690944e-05, |
|
"loss": 4.2993, |
|
"step": 291328 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5221664178180424e-05, |
|
"loss": 4.2844, |
|
"step": 291840 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5213278230669904e-05, |
|
"loss": 4.2895, |
|
"step": 292352 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5204892283159384e-05, |
|
"loss": 4.2813, |
|
"step": 292864 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5196506335648863e-05, |
|
"loss": 4.2822, |
|
"step": 293376 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5188120388138343e-05, |
|
"loss": 4.2652, |
|
"step": 293888 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.517975081943155e-05, |
|
"loss": 4.2818, |
|
"step": 294400 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.517136487192103e-05, |
|
"loss": 4.2721, |
|
"step": 294912 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.516297892441051e-05, |
|
"loss": 4.2847, |
|
"step": 295424 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.515460935570373e-05, |
|
"loss": 4.2875, |
|
"step": 295936 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.514622340819321e-05, |
|
"loss": 4.2872, |
|
"step": 296448 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.513783746068269e-05, |
|
"loss": 4.2718, |
|
"step": 296960 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.512945151317217e-05, |
|
"loss": 4.2849, |
|
"step": 297472 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.512106556566164e-05, |
|
"loss": 4.2852, |
|
"step": 297984 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.511269599695486e-05, |
|
"loss": 4.2885, |
|
"step": 298496 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.510431004944434e-05, |
|
"loss": 4.2861, |
|
"step": 299008 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.509592410193382e-05, |
|
"loss": 4.2767, |
|
"step": 299520 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.508753815442329e-05, |
|
"loss": 4.2769, |
|
"step": 300032 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.507915220691277e-05, |
|
"loss": 4.2848, |
|
"step": 300544 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.507076625940225e-05, |
|
"loss": 4.2701, |
|
"step": 301056 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.506238031189173e-05, |
|
"loss": 4.2822, |
|
"step": 301568 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5054010743184946e-05, |
|
"loss": 4.2717, |
|
"step": 302080 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5045624795674426e-05, |
|
"loss": 4.278, |
|
"step": 302592 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5037238848163906e-05, |
|
"loss": 4.2875, |
|
"step": 303104 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5028852900653386e-05, |
|
"loss": 4.2751, |
|
"step": 303616 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5020483331946595e-05, |
|
"loss": 4.2675, |
|
"step": 304128 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5012097384436075e-05, |
|
"loss": 4.2699, |
|
"step": 304640 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5003711436925555e-05, |
|
"loss": 4.2713, |
|
"step": 305152 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.257204532623291, |
|
"eval_runtime": 298.9529, |
|
"eval_samples_per_second": 1276.425, |
|
"eval_steps_per_second": 39.889, |
|
"step": 305280 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4995325489415035e-05, |
|
"loss": 4.2597, |
|
"step": 305664 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4986939541904515e-05, |
|
"loss": 4.2682, |
|
"step": 306176 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4978553594393995e-05, |
|
"loss": 4.2821, |
|
"step": 306688 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4970167646883475e-05, |
|
"loss": 4.2688, |
|
"step": 307200 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4961781699372955e-05, |
|
"loss": 4.2839, |
|
"step": 307712 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4953395751862435e-05, |
|
"loss": 4.2661, |
|
"step": 308224 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4945009804351915e-05, |
|
"loss": 4.2728, |
|
"step": 308736 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4936623856841395e-05, |
|
"loss": 4.2591, |
|
"step": 309248 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.492823790933088e-05, |
|
"loss": 4.2665, |
|
"step": 309760 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4919851961820354e-05, |
|
"loss": 4.275, |
|
"step": 310272 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4911466014309834e-05, |
|
"loss": 4.2726, |
|
"step": 310784 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4903080066799314e-05, |
|
"loss": 4.2765, |
|
"step": 311296 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.489471049809253e-05, |
|
"loss": 4.2573, |
|
"step": 311808 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4886324550582003e-05, |
|
"loss": 4.2603, |
|
"step": 312320 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4877938603071483e-05, |
|
"loss": 4.2603, |
|
"step": 312832 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.486955265556096e-05, |
|
"loss": 4.259, |
|
"step": 313344 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.486116670805044e-05, |
|
"loss": 4.2716, |
|
"step": 313856 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.485278076053992e-05, |
|
"loss": 4.2628, |
|
"step": 314368 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.48443948130294e-05, |
|
"loss": 4.2681, |
|
"step": 314880 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.483600886551888e-05, |
|
"loss": 4.2809, |
|
"step": 315392 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.482762291800836e-05, |
|
"loss": 4.2672, |
|
"step": 315904 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.481925334930158e-05, |
|
"loss": 4.2614, |
|
"step": 316416 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.481086740179106e-05, |
|
"loss": 4.2623, |
|
"step": 316928 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.480248145428054e-05, |
|
"loss": 4.2682, |
|
"step": 317440 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.479409550677002e-05, |
|
"loss": 4.2578, |
|
"step": 317952 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.47857095592595e-05, |
|
"loss": 4.2619, |
|
"step": 318464 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.477732361174898e-05, |
|
"loss": 4.2593, |
|
"step": 318976 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.476895404304219e-05, |
|
"loss": 4.2559, |
|
"step": 319488 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.476056809553167e-05, |
|
"loss": 4.2511, |
|
"step": 320000 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.475218214802115e-05, |
|
"loss": 4.2548, |
|
"step": 320512 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.474379620051063e-05, |
|
"loss": 4.2527, |
|
"step": 321024 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.473541025300011e-05, |
|
"loss": 4.2578, |
|
"step": 321536 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.472702430548959e-05, |
|
"loss": 4.2678, |
|
"step": 322048 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.471863835797907e-05, |
|
"loss": 4.2593, |
|
"step": 322560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4710268789272283e-05, |
|
"loss": 4.2481, |
|
"step": 323072 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.470189922056549e-05, |
|
"loss": 4.2605, |
|
"step": 323584 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.469351327305497e-05, |
|
"loss": 4.2589, |
|
"step": 324096 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.468512732554445e-05, |
|
"loss": 4.2381, |
|
"step": 324608 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.467674137803393e-05, |
|
"loss": 4.2448, |
|
"step": 325120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.466835543052341e-05, |
|
"loss": 4.248, |
|
"step": 325632 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.465996948301289e-05, |
|
"loss": 4.2434, |
|
"step": 326144 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4651583535502366e-05, |
|
"loss": 4.2625, |
|
"step": 326656 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4643197587991846e-05, |
|
"loss": 4.2427, |
|
"step": 327168 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4634811640481325e-05, |
|
"loss": 4.2525, |
|
"step": 327680 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.462644207177454e-05, |
|
"loss": 4.2534, |
|
"step": 328192 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.461805612426402e-05, |
|
"loss": 4.2495, |
|
"step": 328704 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.46096701767535e-05, |
|
"loss": 4.2537, |
|
"step": 329216 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.460128422924298e-05, |
|
"loss": 4.2397, |
|
"step": 329728 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.459291466053619e-05, |
|
"loss": 4.2227, |
|
"step": 330240 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.458452871302567e-05, |
|
"loss": 4.265, |
|
"step": 330752 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.457614276551515e-05, |
|
"loss": 4.2448, |
|
"step": 331264 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.456775681800463e-05, |
|
"loss": 4.2464, |
|
"step": 331776 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.455937087049411e-05, |
|
"loss": 4.2421, |
|
"step": 332288 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.455098492298359e-05, |
|
"loss": 4.239, |
|
"step": 332800 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.454259897547307e-05, |
|
"loss": 4.2375, |
|
"step": 333312 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.453421302796255e-05, |
|
"loss": 4.244, |
|
"step": 333824 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.452582708045203e-05, |
|
"loss": 4.2436, |
|
"step": 334336 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.451744113294151e-05, |
|
"loss": 4.238, |
|
"step": 334848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.450905518543099e-05, |
|
"loss": 4.254, |
|
"step": 335360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.450066923792047e-05, |
|
"loss": 4.2277, |
|
"step": 335872 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.449228329040995e-05, |
|
"loss": 4.2251, |
|
"step": 336384 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4483913721703166e-05, |
|
"loss": 4.2513, |
|
"step": 336896 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4475527774192646e-05, |
|
"loss": 4.217, |
|
"step": 337408 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4467141826682126e-05, |
|
"loss": 4.2255, |
|
"step": 337920 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4458755879171606e-05, |
|
"loss": 4.2471, |
|
"step": 338432 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4450402689268544e-05, |
|
"loss": 4.2481, |
|
"step": 338944 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4442016741758024e-05, |
|
"loss": 4.2309, |
|
"step": 339456 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4433630794247504e-05, |
|
"loss": 4.2389, |
|
"step": 339968 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4425244846736984e-05, |
|
"loss": 4.2191, |
|
"step": 340480 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4416858899226464e-05, |
|
"loss": 4.2412, |
|
"step": 340992 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4408472951715944e-05, |
|
"loss": 4.2355, |
|
"step": 341504 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4400087004205423e-05, |
|
"loss": 4.2425, |
|
"step": 342016 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4391701056694903e-05, |
|
"loss": 4.2352, |
|
"step": 342528 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.438333148798812e-05, |
|
"loss": 4.247, |
|
"step": 343040 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.437496191928133e-05, |
|
"loss": 4.2498, |
|
"step": 343552 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.436659235057454e-05, |
|
"loss": 4.2256, |
|
"step": 344064 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.435820640306402e-05, |
|
"loss": 4.234, |
|
"step": 344576 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.43498204555535e-05, |
|
"loss": 4.2435, |
|
"step": 345088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.434143450804298e-05, |
|
"loss": 4.2199, |
|
"step": 345600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4333064939336186e-05, |
|
"loss": 4.2451, |
|
"step": 346112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4324678991825666e-05, |
|
"loss": 4.2418, |
|
"step": 346624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4316293044315146e-05, |
|
"loss": 4.2427, |
|
"step": 347136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4307907096804626e-05, |
|
"loss": 4.2468, |
|
"step": 347648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.429952114929411e-05, |
|
"loss": 4.2244, |
|
"step": 348160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.429113520178359e-05, |
|
"loss": 4.227, |
|
"step": 348672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.428274925427307e-05, |
|
"loss": 4.2346, |
|
"step": 349184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.427436330676255e-05, |
|
"loss": 4.242, |
|
"step": 349696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4265977359252026e-05, |
|
"loss": 4.2341, |
|
"step": 350208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4257591411741506e-05, |
|
"loss": 4.2343, |
|
"step": 350720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4249205464230986e-05, |
|
"loss": 4.2254, |
|
"step": 351232 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4240819516720466e-05, |
|
"loss": 4.2385, |
|
"step": 351744 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4232433569209946e-05, |
|
"loss": 4.2244, |
|
"step": 352256 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.422408037930689e-05, |
|
"loss": 4.226, |
|
"step": 352768 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.421569443179637e-05, |
|
"loss": 4.2301, |
|
"step": 353280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.420730848428585e-05, |
|
"loss": 4.238, |
|
"step": 353792 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.419892253677533e-05, |
|
"loss": 4.2296, |
|
"step": 354304 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.419053658926481e-05, |
|
"loss": 4.218, |
|
"step": 354816 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.418215064175429e-05, |
|
"loss": 4.2214, |
|
"step": 355328 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.417376469424377e-05, |
|
"loss": 4.2287, |
|
"step": 355840 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.416537874673325e-05, |
|
"loss": 4.2335, |
|
"step": 356352 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.415700917802646e-05, |
|
"loss": 4.226, |
|
"step": 356864 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.414862323051594e-05, |
|
"loss": 4.2247, |
|
"step": 357376 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.414023728300542e-05, |
|
"loss": 4.2144, |
|
"step": 357888 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.41318513354949e-05, |
|
"loss": 4.2227, |
|
"step": 358400 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.412346538798438e-05, |
|
"loss": 4.2113, |
|
"step": 358912 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.411507944047386e-05, |
|
"loss": 4.2235, |
|
"step": 359424 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.410669349296334e-05, |
|
"loss": 4.206, |
|
"step": 359936 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.409830754545282e-05, |
|
"loss": 4.2339, |
|
"step": 360448 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.408993797674603e-05, |
|
"loss": 4.2179, |
|
"step": 360960 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4081552029235515e-05, |
|
"loss": 4.2248, |
|
"step": 361472 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4073166081724995e-05, |
|
"loss": 4.2137, |
|
"step": 361984 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4064780134214475e-05, |
|
"loss": 4.2156, |
|
"step": 362496 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4056394186703955e-05, |
|
"loss": 4.2195, |
|
"step": 363008 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4048008239193435e-05, |
|
"loss": 4.2152, |
|
"step": 363520 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4039622291682915e-05, |
|
"loss": 4.2256, |
|
"step": 364032 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4031252722976124e-05, |
|
"loss": 4.2275, |
|
"step": 364544 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.402288315426933e-05, |
|
"loss": 4.2228, |
|
"step": 365056 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.401449720675881e-05, |
|
"loss": 4.218, |
|
"step": 365568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.400611125924829e-05, |
|
"loss": 4.2191, |
|
"step": 366080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.399772531173777e-05, |
|
"loss": 4.2297, |
|
"step": 366592 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.398933936422725e-05, |
|
"loss": 4.2022, |
|
"step": 367104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.398095341671673e-05, |
|
"loss": 4.2319, |
|
"step": 367616 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.397256746920621e-05, |
|
"loss": 4.2132, |
|
"step": 368128 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.396418152169569e-05, |
|
"loss": 4.2243, |
|
"step": 368640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.395581195298891e-05, |
|
"loss": 4.2134, |
|
"step": 369152 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.394742600547839e-05, |
|
"loss": 4.2159, |
|
"step": 369664 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.39390564367716e-05, |
|
"loss": 4.2017, |
|
"step": 370176 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.393067048926108e-05, |
|
"loss": 4.2118, |
|
"step": 370688 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.392228454175056e-05, |
|
"loss": 4.2082, |
|
"step": 371200 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.391389859424003e-05, |
|
"loss": 4.2166, |
|
"step": 371712 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.390551264672951e-05, |
|
"loss": 4.2167, |
|
"step": 372224 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.389712669921899e-05, |
|
"loss": 4.2234, |
|
"step": 372736 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.388875713051221e-05, |
|
"loss": 4.2026, |
|
"step": 373248 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.388037118300169e-05, |
|
"loss": 4.2175, |
|
"step": 373760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.387198523549117e-05, |
|
"loss": 4.221, |
|
"step": 374272 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.386361566678438e-05, |
|
"loss": 4.2203, |
|
"step": 374784 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.385522971927386e-05, |
|
"loss": 4.2168, |
|
"step": 375296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3846843771763336e-05, |
|
"loss": 4.2176, |
|
"step": 375808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3838457824252816e-05, |
|
"loss": 4.2084, |
|
"step": 376320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3830071876742296e-05, |
|
"loss": 4.2197, |
|
"step": 376832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3821685929231775e-05, |
|
"loss": 4.2024, |
|
"step": 377344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3813299981721255e-05, |
|
"loss": 4.2172, |
|
"step": 377856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3804914034210735e-05, |
|
"loss": 4.2067, |
|
"step": 378368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3796528086700215e-05, |
|
"loss": 4.2097, |
|
"step": 378880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3788142139189695e-05, |
|
"loss": 4.2207, |
|
"step": 379392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3779756191679175e-05, |
|
"loss": 4.2109, |
|
"step": 379904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3771370244168655e-05, |
|
"loss": 4.205, |
|
"step": 380416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.376300067546187e-05, |
|
"loss": 4.2012, |
|
"step": 380928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.375461472795135e-05, |
|
"loss": 4.2085, |
|
"step": 381440 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.200985908508301, |
|
"eval_runtime": 289.7132, |
|
"eval_samples_per_second": 1317.134, |
|
"eval_steps_per_second": 41.161, |
|
"step": 381600 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.374622878044083e-05, |
|
"loss": 4.2017, |
|
"step": 381952 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.373784283293031e-05, |
|
"loss": 4.2012, |
|
"step": 382464 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.372947326422352e-05, |
|
"loss": 4.2195, |
|
"step": 382976 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.372110369551673e-05, |
|
"loss": 4.2068, |
|
"step": 383488 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.371271774800621e-05, |
|
"loss": 4.2193, |
|
"step": 384000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.370433180049569e-05, |
|
"loss": 4.2044, |
|
"step": 384512 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.36959622317889e-05, |
|
"loss": 4.2025, |
|
"step": 385024 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.368757628427838e-05, |
|
"loss": 4.1983, |
|
"step": 385536 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.367919033676786e-05, |
|
"loss": 4.1988, |
|
"step": 386048 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3670804389257345e-05, |
|
"loss": 4.2129, |
|
"step": 386560 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3662418441746825e-05, |
|
"loss": 4.2122, |
|
"step": 387072 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3654032494236305e-05, |
|
"loss": 4.2084, |
|
"step": 387584 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3645662925529514e-05, |
|
"loss": 4.2004, |
|
"step": 388096 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3637276978018994e-05, |
|
"loss": 4.1958, |
|
"step": 388608 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3628891030508474e-05, |
|
"loss": 4.197, |
|
"step": 389120 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3620505082997954e-05, |
|
"loss": 4.1944, |
|
"step": 389632 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.361213551429116e-05, |
|
"loss": 4.2074, |
|
"step": 390144 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.360374956678064e-05, |
|
"loss": 4.2021, |
|
"step": 390656 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.359536361927012e-05, |
|
"loss": 4.207, |
|
"step": 391168 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.35869776717596e-05, |
|
"loss": 4.2164, |
|
"step": 391680 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.357859172424908e-05, |
|
"loss": 4.2096, |
|
"step": 392192 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.357020577673856e-05, |
|
"loss": 4.1962, |
|
"step": 392704 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.356181982922804e-05, |
|
"loss": 4.2019, |
|
"step": 393216 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.355345026052126e-05, |
|
"loss": 4.2056, |
|
"step": 393728 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.354506431301074e-05, |
|
"loss": 4.1984, |
|
"step": 394240 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.353667836550022e-05, |
|
"loss": 4.2018, |
|
"step": 394752 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.352829241798969e-05, |
|
"loss": 4.1981, |
|
"step": 395264 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.351990647047917e-05, |
|
"loss": 4.1971, |
|
"step": 395776 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.351152052296865e-05, |
|
"loss": 4.1949, |
|
"step": 396288 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.350313457545813e-05, |
|
"loss": 4.1897, |
|
"step": 396800 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.349474862794761e-05, |
|
"loss": 4.1945, |
|
"step": 397312 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.348636268043709e-05, |
|
"loss": 4.1977, |
|
"step": 397824 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.347797673292657e-05, |
|
"loss": 4.2058, |
|
"step": 398336 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.346959078541605e-05, |
|
"loss": 4.1999, |
|
"step": 398848 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.346122121670926e-05, |
|
"loss": 4.1869, |
|
"step": 399360 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.345283526919875e-05, |
|
"loss": 4.2027, |
|
"step": 399872 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.344444932168823e-05, |
|
"loss": 4.1979, |
|
"step": 400384 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.343606337417771e-05, |
|
"loss": 4.1759, |
|
"step": 400896 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.342767742666719e-05, |
|
"loss": 4.1915, |
|
"step": 401408 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3419307857960396e-05, |
|
"loss": 4.1887, |
|
"step": 401920 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3410921910449876e-05, |
|
"loss": 4.1845, |
|
"step": 402432 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3402535962939356e-05, |
|
"loss": 4.2017, |
|
"step": 402944 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3394150015428836e-05, |
|
"loss": 4.1843, |
|
"step": 403456 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3385764067918316e-05, |
|
"loss": 4.1905, |
|
"step": 403968 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3377394499211525e-05, |
|
"loss": 4.1919, |
|
"step": 404480 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3369008551701005e-05, |
|
"loss": 4.1888, |
|
"step": 404992 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3360622604190485e-05, |
|
"loss": 4.1992, |
|
"step": 405504 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3352236656679965e-05, |
|
"loss": 4.1825, |
|
"step": 406016 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3343850709169445e-05, |
|
"loss": 4.1618, |
|
"step": 406528 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.333546476165893e-05, |
|
"loss": 4.2068, |
|
"step": 407040 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.332707881414841e-05, |
|
"loss": 4.1854, |
|
"step": 407552 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3318692866637885e-05, |
|
"loss": 4.1944, |
|
"step": 408064 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3310306919127365e-05, |
|
"loss": 4.1815, |
|
"step": 408576 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.330193735042058e-05, |
|
"loss": 4.1828, |
|
"step": 409088 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3293551402910054e-05, |
|
"loss": 4.1836, |
|
"step": 409600 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3285165455399534e-05, |
|
"loss": 4.1819, |
|
"step": 410112 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.327679588669275e-05, |
|
"loss": 4.1894, |
|
"step": 410624 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.326840993918223e-05, |
|
"loss": 4.1776, |
|
"step": 411136 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.32600239916717e-05, |
|
"loss": 4.1969, |
|
"step": 411648 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.325163804416118e-05, |
|
"loss": 4.1741, |
|
"step": 412160 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.324325209665067e-05, |
|
"loss": 4.1671, |
|
"step": 412672 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.323486614914015e-05, |
|
"loss": 4.1923, |
|
"step": 413184 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.322648020162963e-05, |
|
"loss": 4.1634, |
|
"step": 413696 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.321809425411911e-05, |
|
"loss": 4.1675, |
|
"step": 414208 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.320972468541232e-05, |
|
"loss": 4.1892, |
|
"step": 414720 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.32013387379018e-05, |
|
"loss": 4.1892, |
|
"step": 415232 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.319295279039128e-05, |
|
"loss": 4.1762, |
|
"step": 415744 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.318456684288076e-05, |
|
"loss": 4.1827, |
|
"step": 416256 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.317619727417397e-05, |
|
"loss": 4.1616, |
|
"step": 416768 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.316781132666345e-05, |
|
"loss": 4.1845, |
|
"step": 417280 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.315942537915293e-05, |
|
"loss": 4.1789, |
|
"step": 417792 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.315103943164241e-05, |
|
"loss": 4.1884, |
|
"step": 418304 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.314266986293562e-05, |
|
"loss": 4.1788, |
|
"step": 418816 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.31342839154251e-05, |
|
"loss": 4.1873, |
|
"step": 419328 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.312589796791458e-05, |
|
"loss": 4.1948, |
|
"step": 419840 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.311751202040406e-05, |
|
"loss": 4.1687, |
|
"step": 420352 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.310912607289354e-05, |
|
"loss": 4.1808, |
|
"step": 420864 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.310075650418675e-05, |
|
"loss": 4.1847, |
|
"step": 421376 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.309237055667623e-05, |
|
"loss": 4.1674, |
|
"step": 421888 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.308400098796944e-05, |
|
"loss": 4.1873, |
|
"step": 422400 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.307561504045892e-05, |
|
"loss": 4.1906, |
|
"step": 422912 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.30672290929484e-05, |
|
"loss": 4.1848, |
|
"step": 423424 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.305884314543788e-05, |
|
"loss": 4.1983, |
|
"step": 423936 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.305045719792736e-05, |
|
"loss": 4.1686, |
|
"step": 424448 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.304207125041684e-05, |
|
"loss": 4.1702, |
|
"step": 424960 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.303368530290632e-05, |
|
"loss": 4.1812, |
|
"step": 425472 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.30252993553958e-05, |
|
"loss": 4.1831, |
|
"step": 425984 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.301691340788529e-05, |
|
"loss": 4.184, |
|
"step": 426496 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.300852746037477e-05, |
|
"loss": 4.1787, |
|
"step": 427008 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.300014151286425e-05, |
|
"loss": 4.1672, |
|
"step": 427520 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.299175556535372e-05, |
|
"loss": 4.1836, |
|
"step": 428032 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2983385996646936e-05, |
|
"loss": 4.1718, |
|
"step": 428544 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2975016427940145e-05, |
|
"loss": 4.1723, |
|
"step": 429056 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2966630480429625e-05, |
|
"loss": 4.1754, |
|
"step": 429568 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2958244532919105e-05, |
|
"loss": 4.1862, |
|
"step": 430080 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2949858585408585e-05, |
|
"loss": 4.1766, |
|
"step": 430592 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2941472637898065e-05, |
|
"loss": 4.1621, |
|
"step": 431104 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.293308669038754e-05, |
|
"loss": 4.1687, |
|
"step": 431616 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2924700742877025e-05, |
|
"loss": 4.1756, |
|
"step": 432128 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.291633117417024e-05, |
|
"loss": 4.1778, |
|
"step": 432640 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2907945226659714e-05, |
|
"loss": 4.1768, |
|
"step": 433152 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2899559279149194e-05, |
|
"loss": 4.1715, |
|
"step": 433664 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2891173331638674e-05, |
|
"loss": 4.1606, |
|
"step": 434176 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2882787384128154e-05, |
|
"loss": 4.1668, |
|
"step": 434688 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2874401436617634e-05, |
|
"loss": 4.1634, |
|
"step": 435200 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2866015489107114e-05, |
|
"loss": 4.1689, |
|
"step": 435712 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.285764592040032e-05, |
|
"loss": 4.153, |
|
"step": 436224 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.28492599728898e-05, |
|
"loss": 4.1821, |
|
"step": 436736 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.284087402537928e-05, |
|
"loss": 4.1653, |
|
"step": 437248 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.283248807786876e-05, |
|
"loss": 4.1711, |
|
"step": 437760 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.282410213035824e-05, |
|
"loss": 4.1574, |
|
"step": 438272 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.281573256165146e-05, |
|
"loss": 4.1718, |
|
"step": 438784 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.280734661414094e-05, |
|
"loss": 4.1667, |
|
"step": 439296 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.279896066663042e-05, |
|
"loss": 4.1612, |
|
"step": 439808 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.27905747191199e-05, |
|
"loss": 4.1746, |
|
"step": 440320 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.278218877160938e-05, |
|
"loss": 4.1796, |
|
"step": 440832 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.277380282409886e-05, |
|
"loss": 4.1637, |
|
"step": 441344 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.276541687658834e-05, |
|
"loss": 4.1763, |
|
"step": 441856 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.275703092907782e-05, |
|
"loss": 4.1596, |
|
"step": 442368 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.274866136037103e-05, |
|
"loss": 4.1768, |
|
"step": 442880 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.274027541286051e-05, |
|
"loss": 4.1573, |
|
"step": 443392 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2731905844153717e-05, |
|
"loss": 4.1797, |
|
"step": 443904 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2723519896643196e-05, |
|
"loss": 4.1596, |
|
"step": 444416 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2715133949132676e-05, |
|
"loss": 4.1741, |
|
"step": 444928 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.270674800162216e-05, |
|
"loss": 4.1642, |
|
"step": 445440 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.269836205411164e-05, |
|
"loss": 4.1632, |
|
"step": 445952 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.268997610660112e-05, |
|
"loss": 4.1483, |
|
"step": 446464 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.26815901590906e-05, |
|
"loss": 4.1646, |
|
"step": 446976 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.267322059038381e-05, |
|
"loss": 4.1584, |
|
"step": 447488 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.266483464287329e-05, |
|
"loss": 4.1683, |
|
"step": 448000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.265644869536277e-05, |
|
"loss": 4.1605, |
|
"step": 448512 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.264806274785225e-05, |
|
"loss": 4.1737, |
|
"step": 449024 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2639676800341725e-05, |
|
"loss": 4.1551, |
|
"step": 449536 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.263130723163494e-05, |
|
"loss": 4.1658, |
|
"step": 450048 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.262292128412442e-05, |
|
"loss": 4.1701, |
|
"step": 450560 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.26145353366139e-05, |
|
"loss": 4.171, |
|
"step": 451072 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.260614938910338e-05, |
|
"loss": 4.1656, |
|
"step": 451584 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.259776344159286e-05, |
|
"loss": 4.1658, |
|
"step": 452096 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.258937749408234e-05, |
|
"loss": 4.1583, |
|
"step": 452608 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.258099154657182e-05, |
|
"loss": 4.1735, |
|
"step": 453120 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.25726055990613e-05, |
|
"loss": 4.1525, |
|
"step": 453632 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.256423603035451e-05, |
|
"loss": 4.1687, |
|
"step": 454144 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.255585008284399e-05, |
|
"loss": 4.1569, |
|
"step": 454656 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.254746413533347e-05, |
|
"loss": 4.1611, |
|
"step": 455168 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.253907818782295e-05, |
|
"loss": 4.1679, |
|
"step": 455680 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2530724997919895e-05, |
|
"loss": 4.1595, |
|
"step": 456192 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2522339050409375e-05, |
|
"loss": 4.1594, |
|
"step": 456704 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2513953102898855e-05, |
|
"loss": 4.148, |
|
"step": 457216 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2505567155388335e-05, |
|
"loss": 4.1613, |
|
"step": 457728 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.160315990447998, |
|
"eval_runtime": 290.8648, |
|
"eval_samples_per_second": 1311.919, |
|
"eval_steps_per_second": 40.998, |
|
"step": 457920 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2497181207877815e-05, |
|
"loss": 4.1537, |
|
"step": 458240 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2488795260367295e-05, |
|
"loss": 4.149, |
|
"step": 458752 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2480409312856774e-05, |
|
"loss": 4.1705, |
|
"step": 459264 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2472023365346254e-05, |
|
"loss": 4.1582, |
|
"step": 459776 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2463637417835734e-05, |
|
"loss": 4.1677, |
|
"step": 460288 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2455251470325214e-05, |
|
"loss": 4.1556, |
|
"step": 460800 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2446865522814694e-05, |
|
"loss": 4.155, |
|
"step": 461312 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2438479575304174e-05, |
|
"loss": 4.1513, |
|
"step": 461824 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2430093627793654e-05, |
|
"loss": 4.1528, |
|
"step": 462336 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2421707680283134e-05, |
|
"loss": 4.1654, |
|
"step": 462848 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2413321732772614e-05, |
|
"loss": 4.1595, |
|
"step": 463360 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.240493578526209e-05, |
|
"loss": 4.1609, |
|
"step": 463872 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.239654983775157e-05, |
|
"loss": 4.1524, |
|
"step": 464384 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.238816389024105e-05, |
|
"loss": 4.149, |
|
"step": 464896 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2379777942730534e-05, |
|
"loss": 4.1507, |
|
"step": 465408 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2371391995220014e-05, |
|
"loss": 4.1438, |
|
"step": 465920 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2363006047709494e-05, |
|
"loss": 4.161, |
|
"step": 466432 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2354620100198974e-05, |
|
"loss": 4.1556, |
|
"step": 466944 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2346234152688454e-05, |
|
"loss": 4.1556, |
|
"step": 467456 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2337848205177934e-05, |
|
"loss": 4.1664, |
|
"step": 467968 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2329462257667414e-05, |
|
"loss": 4.1675, |
|
"step": 468480 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.232109268896062e-05, |
|
"loss": 4.1515, |
|
"step": 468992 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.23127067414501e-05, |
|
"loss": 4.1507, |
|
"step": 469504 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.230432079393958e-05, |
|
"loss": 4.1593, |
|
"step": 470016 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.229593484642906e-05, |
|
"loss": 4.1549, |
|
"step": 470528 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.228756527772227e-05, |
|
"loss": 4.1536, |
|
"step": 471040 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.227917933021175e-05, |
|
"loss": 4.1525, |
|
"step": 471552 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.227079338270123e-05, |
|
"loss": 4.1502, |
|
"step": 472064 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.226240743519072e-05, |
|
"loss": 4.1471, |
|
"step": 472576 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.22540214876802e-05, |
|
"loss": 4.1457, |
|
"step": 473088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.224565191897341e-05, |
|
"loss": 4.1403, |
|
"step": 473600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.223726597146289e-05, |
|
"loss": 4.1486, |
|
"step": 474112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.222888002395237e-05, |
|
"loss": 4.1591, |
|
"step": 474624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.222049407644185e-05, |
|
"loss": 4.157, |
|
"step": 475136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.221210812893133e-05, |
|
"loss": 4.1431, |
|
"step": 475648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2203738560224536e-05, |
|
"loss": 4.153, |
|
"step": 476160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2195352612714016e-05, |
|
"loss": 4.1543, |
|
"step": 476672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2186983044007225e-05, |
|
"loss": 4.1317, |
|
"step": 477184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2178597096496705e-05, |
|
"loss": 4.1456, |
|
"step": 477696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2170211148986185e-05, |
|
"loss": 4.1411, |
|
"step": 478208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.216182520147567e-05, |
|
"loss": 4.1359, |
|
"step": 478720 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.215343925396515e-05, |
|
"loss": 4.1545, |
|
"step": 479232 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2145053306454625e-05, |
|
"loss": 4.1409, |
|
"step": 479744 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2136667358944105e-05, |
|
"loss": 4.1491, |
|
"step": 480256 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.212829779023732e-05, |
|
"loss": 4.1462, |
|
"step": 480768 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.21199118427268e-05, |
|
"loss": 4.1433, |
|
"step": 481280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2111525895216274e-05, |
|
"loss": 4.1544, |
|
"step": 481792 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2103139947705754e-05, |
|
"loss": 4.1381, |
|
"step": 482304 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2094754000195234e-05, |
|
"loss": 4.1124, |
|
"step": 482816 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2086368052684714e-05, |
|
"loss": 4.1616, |
|
"step": 483328 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2077982105174194e-05, |
|
"loss": 4.1414, |
|
"step": 483840 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.206961253646741e-05, |
|
"loss": 4.1495, |
|
"step": 484352 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.206122658895689e-05, |
|
"loss": 4.1373, |
|
"step": 484864 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.205284064144637e-05, |
|
"loss": 4.1379, |
|
"step": 485376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.204445469393585e-05, |
|
"loss": 4.1339, |
|
"step": 485888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.203608512522906e-05, |
|
"loss": 4.1403, |
|
"step": 486400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.202769917771854e-05, |
|
"loss": 4.1476, |
|
"step": 486912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.201931323020802e-05, |
|
"loss": 4.1299, |
|
"step": 487424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.20109272826975e-05, |
|
"loss": 4.1525, |
|
"step": 487936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.200254133518698e-05, |
|
"loss": 4.1345, |
|
"step": 488448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.199417176648019e-05, |
|
"loss": 4.1227, |
|
"step": 488960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.198578581896967e-05, |
|
"loss": 4.1432, |
|
"step": 489472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.197739987145915e-05, |
|
"loss": 4.1247, |
|
"step": 489984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.196901392394863e-05, |
|
"loss": 4.1215, |
|
"step": 490496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1960644355241843e-05, |
|
"loss": 4.1463, |
|
"step": 491008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1952258407731323e-05, |
|
"loss": 4.1456, |
|
"step": 491520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.19438724602208e-05, |
|
"loss": 4.1344, |
|
"step": 492032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.193548651271028e-05, |
|
"loss": 4.1354, |
|
"step": 492544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.192710056519976e-05, |
|
"loss": 4.1227, |
|
"step": 493056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.191871461768924e-05, |
|
"loss": 4.1393, |
|
"step": 493568 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.191034504898245e-05, |
|
"loss": 4.1386, |
|
"step": 494080 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.190195910147193e-05, |
|
"loss": 4.1409, |
|
"step": 494592 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.189358953276514e-05, |
|
"loss": 4.1365, |
|
"step": 495104 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.188520358525462e-05, |
|
"loss": 4.1426, |
|
"step": 495616 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.18768176377441e-05, |
|
"loss": 4.1526, |
|
"step": 496128 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.186843169023358e-05, |
|
"loss": 4.1272, |
|
"step": 496640 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.186004574272306e-05, |
|
"loss": 4.1399, |
|
"step": 497152 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.185167617401628e-05, |
|
"loss": 4.1366, |
|
"step": 497664 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.184329022650576e-05, |
|
"loss": 4.1277, |
|
"step": 498176 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.183490427899524e-05, |
|
"loss": 4.1434, |
|
"step": 498688 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.182651833148472e-05, |
|
"loss": 4.1476, |
|
"step": 499200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.18181323839742e-05, |
|
"loss": 4.1398, |
|
"step": 499712 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.180974643646368e-05, |
|
"loss": 4.1562, |
|
"step": 500224 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.180136048895316e-05, |
|
"loss": 4.1252, |
|
"step": 500736 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.179297454144264e-05, |
|
"loss": 4.1267, |
|
"step": 501248 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.178458859393211e-05, |
|
"loss": 4.1371, |
|
"step": 501760 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.177620264642159e-05, |
|
"loss": 4.1436, |
|
"step": 502272 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.176781669891107e-05, |
|
"loss": 4.1346, |
|
"step": 502784 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.175944713020428e-05, |
|
"loss": 4.1384, |
|
"step": 503296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1751061182693766e-05, |
|
"loss": 4.1291, |
|
"step": 503808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1742675235183246e-05, |
|
"loss": 4.1405, |
|
"step": 504320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1734289287672726e-05, |
|
"loss": 4.1286, |
|
"step": 504832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1725903340162206e-05, |
|
"loss": 4.1288, |
|
"step": 505344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1717517392651685e-05, |
|
"loss": 4.1339, |
|
"step": 505856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1709131445141165e-05, |
|
"loss": 4.1456, |
|
"step": 506368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1700745497630645e-05, |
|
"loss": 4.134, |
|
"step": 506880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1692375928923855e-05, |
|
"loss": 4.118, |
|
"step": 507392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1683989981413334e-05, |
|
"loss": 4.1309, |
|
"step": 507904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1675604033902814e-05, |
|
"loss": 4.1304, |
|
"step": 508416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1667234465196024e-05, |
|
"loss": 4.1375, |
|
"step": 508928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1658848517685503e-05, |
|
"loss": 4.1328, |
|
"step": 509440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1650462570174983e-05, |
|
"loss": 4.1265, |
|
"step": 509952 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.164207662266446e-05, |
|
"loss": 4.1236, |
|
"step": 510464 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.163369067515395e-05, |
|
"loss": 4.1204, |
|
"step": 510976 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.162530472764343e-05, |
|
"loss": 4.1292, |
|
"step": 511488 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.161691878013291e-05, |
|
"loss": 4.1228, |
|
"step": 512000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.160854921142612e-05, |
|
"loss": 4.114, |
|
"step": 512512 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.16001632639156e-05, |
|
"loss": 4.1408, |
|
"step": 513024 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.159177731640508e-05, |
|
"loss": 4.1239, |
|
"step": 513536 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.158339136889456e-05, |
|
"loss": 4.1259, |
|
"step": 514048 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.157502180018777e-05, |
|
"loss": 4.1166, |
|
"step": 514560 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.156663585267725e-05, |
|
"loss": 4.1293, |
|
"step": 515072 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.155824990516673e-05, |
|
"loss": 4.1289, |
|
"step": 515584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.154986395765621e-05, |
|
"loss": 4.1187, |
|
"step": 516096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.154147801014569e-05, |
|
"loss": 4.1331, |
|
"step": 516608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.153309206263517e-05, |
|
"loss": 4.1354, |
|
"step": 517120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.152470611512465e-05, |
|
"loss": 4.1284, |
|
"step": 517632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.151632016761413e-05, |
|
"loss": 4.1339, |
|
"step": 518144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1507950598907344e-05, |
|
"loss": 4.1201, |
|
"step": 518656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1499564651396824e-05, |
|
"loss": 4.1343, |
|
"step": 519168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.14911787038863e-05, |
|
"loss": 4.1195, |
|
"step": 519680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.148282551398324e-05, |
|
"loss": 4.1369, |
|
"step": 520192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.147443956647272e-05, |
|
"loss": 4.1201, |
|
"step": 520704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.14660536189622e-05, |
|
"loss": 4.1312, |
|
"step": 521216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.145766767145168e-05, |
|
"loss": 4.1264, |
|
"step": 521728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.144928172394116e-05, |
|
"loss": 4.1215, |
|
"step": 522240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.144089577643064e-05, |
|
"loss": 4.1096, |
|
"step": 522752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.143250982892012e-05, |
|
"loss": 4.1217, |
|
"step": 523264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.14241238814096e-05, |
|
"loss": 4.1148, |
|
"step": 523776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.141573793389908e-05, |
|
"loss": 4.1319, |
|
"step": 524288 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.140735198638856e-05, |
|
"loss": 4.1198, |
|
"step": 524800 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.139896603887804e-05, |
|
"loss": 4.1342, |
|
"step": 525312 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.139058009136752e-05, |
|
"loss": 4.1124, |
|
"step": 525824 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.138221052266073e-05, |
|
"loss": 4.1281, |
|
"step": 526336 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.137382457515021e-05, |
|
"loss": 4.1293, |
|
"step": 526848 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.136543862763969e-05, |
|
"loss": 4.1317, |
|
"step": 527360 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.135705268012917e-05, |
|
"loss": 4.1288, |
|
"step": 527872 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.134868311142238e-05, |
|
"loss": 4.127, |
|
"step": 528384 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1340313542715595e-05, |
|
"loss": 4.1176, |
|
"step": 528896 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1331927595205075e-05, |
|
"loss": 4.1346, |
|
"step": 529408 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1323541647694555e-05, |
|
"loss": 4.1121, |
|
"step": 529920 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1315155700184035e-05, |
|
"loss": 4.1266, |
|
"step": 530432 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1306769752673515e-05, |
|
"loss": 4.1205, |
|
"step": 530944 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1298383805162995e-05, |
|
"loss": 4.1297, |
|
"step": 531456 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1290014236456204e-05, |
|
"loss": 4.122, |
|
"step": 531968 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1281628288945684e-05, |
|
"loss": 4.1207, |
|
"step": 532480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1273242341435164e-05, |
|
"loss": 4.1216, |
|
"step": 532992 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1264856393924644e-05, |
|
"loss": 4.1112, |
|
"step": 533504 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.125648682521785e-05, |
|
"loss": 4.1228, |
|
"step": 534016 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.128869533538818, |
|
"eval_runtime": 292.7243, |
|
"eval_samples_per_second": 1303.585, |
|
"eval_steps_per_second": 40.738, |
|
"step": 534240 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.124810087770733e-05, |
|
"loss": 4.1157, |
|
"step": 534528 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.123971493019681e-05, |
|
"loss": 4.1088, |
|
"step": 535040 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.123132898268629e-05, |
|
"loss": 4.128, |
|
"step": 535552 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.122294303517577e-05, |
|
"loss": 4.121, |
|
"step": 536064 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.121455708766526e-05, |
|
"loss": 4.1301, |
|
"step": 536576 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.120617114015474e-05, |
|
"loss": 4.1207, |
|
"step": 537088 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.119778519264422e-05, |
|
"loss": 4.1187, |
|
"step": 537600 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.118941562393743e-05, |
|
"loss": 4.1139, |
|
"step": 538112 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.118102967642691e-05, |
|
"loss": 4.113, |
|
"step": 538624 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.117264372891639e-05, |
|
"loss": 4.1262, |
|
"step": 539136 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.116425778140587e-05, |
|
"loss": 4.1198, |
|
"step": 539648 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.115587183389535e-05, |
|
"loss": 4.1206, |
|
"step": 540160 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.114750226518856e-05, |
|
"loss": 4.1195, |
|
"step": 540672 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.113911631767804e-05, |
|
"loss": 4.1114, |
|
"step": 541184 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.113073037016752e-05, |
|
"loss": 4.1069, |
|
"step": 541696 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1122344422657e-05, |
|
"loss": 4.1082, |
|
"step": 542208 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.111395847514648e-05, |
|
"loss": 4.1254, |
|
"step": 542720 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.110557252763596e-05, |
|
"loss": 4.1186, |
|
"step": 543232 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.109720295892917e-05, |
|
"loss": 4.1113, |
|
"step": 543744 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.108881701141865e-05, |
|
"loss": 4.1304, |
|
"step": 544256 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.108043106390813e-05, |
|
"loss": 4.1279, |
|
"step": 544768 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.107206149520134e-05, |
|
"loss": 4.1165, |
|
"step": 545280 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.106367554769082e-05, |
|
"loss": 4.1158, |
|
"step": 545792 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.10552896001803e-05, |
|
"loss": 4.1132, |
|
"step": 546304 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1046903652669775e-05, |
|
"loss": 4.1192, |
|
"step": 546816 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1038517705159255e-05, |
|
"loss": 4.1159, |
|
"step": 547328 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1030131757648735e-05, |
|
"loss": 4.1156, |
|
"step": 547840 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.102176218894195e-05, |
|
"loss": 4.1167, |
|
"step": 548352 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.101337624143143e-05, |
|
"loss": 4.107, |
|
"step": 548864 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.100499029392091e-05, |
|
"loss": 4.1084, |
|
"step": 549376 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.099660434641039e-05, |
|
"loss": 4.1, |
|
"step": 549888 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.098821839889987e-05, |
|
"loss": 4.117, |
|
"step": 550400 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.097983245138935e-05, |
|
"loss": 4.1199, |
|
"step": 550912 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.097144650387883e-05, |
|
"loss": 4.1192, |
|
"step": 551424 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.096306055636831e-05, |
|
"loss": 4.1111, |
|
"step": 551936 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.095467460885779e-05, |
|
"loss": 4.1076, |
|
"step": 552448 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.094628866134727e-05, |
|
"loss": 4.1186, |
|
"step": 552960 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.093790271383675e-05, |
|
"loss": 4.101, |
|
"step": 553472 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.092951676632623e-05, |
|
"loss": 4.1024, |
|
"step": 553984 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.092113081881571e-05, |
|
"loss": 4.1068, |
|
"step": 554496 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.091277762891265e-05, |
|
"loss": 4.1027, |
|
"step": 555008 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0904391681402136e-05, |
|
"loss": 4.1116, |
|
"step": 555520 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0896005733891615e-05, |
|
"loss": 4.1057, |
|
"step": 556032 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0887619786381095e-05, |
|
"loss": 4.1138, |
|
"step": 556544 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0879233838870575e-05, |
|
"loss": 4.1054, |
|
"step": 557056 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0870847891360055e-05, |
|
"loss": 4.1101, |
|
"step": 557568 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0862461943849535e-05, |
|
"loss": 4.1177, |
|
"step": 558080 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0854075996339015e-05, |
|
"loss": 4.1025, |
|
"step": 558592 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0845706427632224e-05, |
|
"loss": 4.0767, |
|
"step": 559104 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0837320480121704e-05, |
|
"loss": 4.1236, |
|
"step": 559616 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0828934532611184e-05, |
|
"loss": 4.1047, |
|
"step": 560128 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0820548585100664e-05, |
|
"loss": 4.1141, |
|
"step": 560640 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.081216263759014e-05, |
|
"loss": 4.1019, |
|
"step": 561152 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.080377669007962e-05, |
|
"loss": 4.1044, |
|
"step": 561664 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.07953907425691e-05, |
|
"loss": 4.0949, |
|
"step": 562176 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.078702117386231e-05, |
|
"loss": 4.1039, |
|
"step": 562688 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.077863522635179e-05, |
|
"loss": 4.1151, |
|
"step": 563200 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.077024927884127e-05, |
|
"loss": 4.0911, |
|
"step": 563712 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.076186333133075e-05, |
|
"loss": 4.1206, |
|
"step": 564224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.075347738382023e-05, |
|
"loss": 4.0976, |
|
"step": 564736 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.074510781511344e-05, |
|
"loss": 4.0871, |
|
"step": 565248 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.073672186760292e-05, |
|
"loss": 4.1032, |
|
"step": 565760 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.07283359200924e-05, |
|
"loss": 4.0909, |
|
"step": 566272 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.071994997258188e-05, |
|
"loss": 4.0861, |
|
"step": 566784 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.071156402507136e-05, |
|
"loss": 4.1115, |
|
"step": 567296 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.070317807756084e-05, |
|
"loss": 4.1093, |
|
"step": 567808 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.069479213005032e-05, |
|
"loss": 4.0986, |
|
"step": 568320 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.06864061825398e-05, |
|
"loss": 4.104, |
|
"step": 568832 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.067803661383302e-05, |
|
"loss": 4.0829, |
|
"step": 569344 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.06696506663225e-05, |
|
"loss": 4.1058, |
|
"step": 569856 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.066128109761571e-05, |
|
"loss": 4.0992, |
|
"step": 570368 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.065289515010519e-05, |
|
"loss": 4.1061, |
|
"step": 570880 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.064450920259467e-05, |
|
"loss": 4.1066, |
|
"step": 571392 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0636123255084147e-05, |
|
"loss": 4.1033, |
|
"step": 571904 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0627737307573627e-05, |
|
"loss": 4.1208, |
|
"step": 572416 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0619367738866836e-05, |
|
"loss": 4.0914, |
|
"step": 572928 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0610981791356316e-05, |
|
"loss": 4.104, |
|
"step": 573440 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0602595843845796e-05, |
|
"loss": 4.101, |
|
"step": 573952 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0594209896335276e-05, |
|
"loss": 4.0951, |
|
"step": 574464 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0585823948824755e-05, |
|
"loss": 4.106, |
|
"step": 574976 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.057745438011797e-05, |
|
"loss": 4.1138, |
|
"step": 575488 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.056906843260745e-05, |
|
"loss": 4.1062, |
|
"step": 576000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.056068248509693e-05, |
|
"loss": 4.1218, |
|
"step": 576512 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.055229653758641e-05, |
|
"loss": 4.0926, |
|
"step": 577024 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.054391059007589e-05, |
|
"loss": 4.0923, |
|
"step": 577536 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.053552464256537e-05, |
|
"loss": 4.1007, |
|
"step": 578048 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.052713869505485e-05, |
|
"loss": 4.1109, |
|
"step": 578560 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0518752747544324e-05, |
|
"loss": 4.0982, |
|
"step": 579072 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.051039955764127e-05, |
|
"loss": 4.1056, |
|
"step": 579584 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.050201361013075e-05, |
|
"loss": 4.0958, |
|
"step": 580096 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.049362766262023e-05, |
|
"loss": 4.1041, |
|
"step": 580608 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.048524171510971e-05, |
|
"loss": 4.0977, |
|
"step": 581120 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.047685576759919e-05, |
|
"loss": 4.0915, |
|
"step": 581632 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0468486198892405e-05, |
|
"loss": 4.1014, |
|
"step": 582144 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0460100251381885e-05, |
|
"loss": 4.1104, |
|
"step": 582656 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0451714303871365e-05, |
|
"loss": 4.1005, |
|
"step": 583168 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0443328356360845e-05, |
|
"loss": 4.0817, |
|
"step": 583680 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0434958787654054e-05, |
|
"loss": 4.0974, |
|
"step": 584192 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0426572840143534e-05, |
|
"loss": 4.0954, |
|
"step": 584704 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0418186892633014e-05, |
|
"loss": 4.1055, |
|
"step": 585216 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0409800945122494e-05, |
|
"loss": 4.1027, |
|
"step": 585728 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0401414997611974e-05, |
|
"loss": 4.0908, |
|
"step": 586240 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.039302905010145e-05, |
|
"loss": 4.09, |
|
"step": 586752 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.038464310259093e-05, |
|
"loss": 4.086, |
|
"step": 587264 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.037627353388414e-05, |
|
"loss": 4.0967, |
|
"step": 587776 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.036788758637363e-05, |
|
"loss": 4.0887, |
|
"step": 588288 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.03595016388631e-05, |
|
"loss": 4.0819, |
|
"step": 588800 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.035111569135258e-05, |
|
"loss": 4.1045, |
|
"step": 589312 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.034272974384206e-05, |
|
"loss": 4.0931, |
|
"step": 589824 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.033436017513527e-05, |
|
"loss": 4.0908, |
|
"step": 590336 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.032597422762475e-05, |
|
"loss": 4.0877, |
|
"step": 590848 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.031758828011423e-05, |
|
"loss": 4.0923, |
|
"step": 591360 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.030920233260371e-05, |
|
"loss": 4.0949, |
|
"step": 591872 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.030081638509319e-05, |
|
"loss": 4.0858, |
|
"step": 592384 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.029243043758267e-05, |
|
"loss": 4.1016, |
|
"step": 592896 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.028404449007215e-05, |
|
"loss": 4.1019, |
|
"step": 593408 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.027565854256163e-05, |
|
"loss": 4.0932, |
|
"step": 593920 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.026728897385485e-05, |
|
"loss": 4.1014, |
|
"step": 594432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.025890302634433e-05, |
|
"loss": 4.0876, |
|
"step": 594944 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.025051707883381e-05, |
|
"loss": 4.1033, |
|
"step": 595456 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.024213113132329e-05, |
|
"loss": 4.0889, |
|
"step": 595968 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0233761562616496e-05, |
|
"loss": 4.0988, |
|
"step": 596480 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0225391993909705e-05, |
|
"loss": 4.0872, |
|
"step": 596992 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.021702242520292e-05, |
|
"loss": 4.0988, |
|
"step": 597504 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0208636477692394e-05, |
|
"loss": 4.098, |
|
"step": 598016 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0200250530181874e-05, |
|
"loss": 4.088, |
|
"step": 598528 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0191864582671354e-05, |
|
"loss": 4.0723, |
|
"step": 599040 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0183478635160834e-05, |
|
"loss": 4.0933, |
|
"step": 599552 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0175092687650314e-05, |
|
"loss": 4.0826, |
|
"step": 600064 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.01667067401398e-05, |
|
"loss": 4.0971, |
|
"step": 600576 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.015832079262928e-05, |
|
"loss": 4.089, |
|
"step": 601088 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.014993484511876e-05, |
|
"loss": 4.0967, |
|
"step": 601600 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.014154889760824e-05, |
|
"loss": 4.0788, |
|
"step": 602112 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.013316295009772e-05, |
|
"loss": 4.1003, |
|
"step": 602624 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.012479338139093e-05, |
|
"loss": 4.0924, |
|
"step": 603136 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.011640743388041e-05, |
|
"loss": 4.1018, |
|
"step": 603648 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.010802148636989e-05, |
|
"loss": 4.0958, |
|
"step": 604160 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.009963553885937e-05, |
|
"loss": 4.0896, |
|
"step": 604672 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.009124959134885e-05, |
|
"loss": 4.0891, |
|
"step": 605184 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.008288002264206e-05, |
|
"loss": 4.1018, |
|
"step": 605696 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.007449407513154e-05, |
|
"loss": 4.0776, |
|
"step": 606208 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.006610812762102e-05, |
|
"loss": 4.096, |
|
"step": 606720 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0057738558914235e-05, |
|
"loss": 4.0912, |
|
"step": 607232 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0049352611403714e-05, |
|
"loss": 4.0923, |
|
"step": 607744 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0040983042696924e-05, |
|
"loss": 4.089, |
|
"step": 608256 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0032597095186404e-05, |
|
"loss": 4.0904, |
|
"step": 608768 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0024211147675883e-05, |
|
"loss": 4.0884, |
|
"step": 609280 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0015825200165363e-05, |
|
"loss": 4.0814, |
|
"step": 609792 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0007439252654843e-05, |
|
"loss": 4.089, |
|
"step": 610304 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.105466365814209, |
|
"eval_runtime": 292.4543, |
|
"eval_samples_per_second": 1304.789, |
|
"eval_steps_per_second": 40.776, |
|
"step": 610560 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.999905330514432e-05, |
|
"loss": 4.0815, |
|
"step": 610816 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.99906673576338e-05, |
|
"loss": 4.0789, |
|
"step": 611328 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.998228141012328e-05, |
|
"loss": 4.0932, |
|
"step": 611840 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.9973895462612756e-05, |
|
"loss": 4.0891, |
|
"step": 612352 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.9965509515102236e-05, |
|
"loss": 4.1007, |
|
"step": 612864 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.995712356759172e-05, |
|
"loss": 4.0848, |
|
"step": 613376 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.99487376200812e-05, |
|
"loss": 4.0894, |
|
"step": 613888 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.994035167257068e-05, |
|
"loss": 4.0851, |
|
"step": 614400 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.993196572506016e-05, |
|
"loss": 4.0796, |
|
"step": 614912 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.992357977754964e-05, |
|
"loss": 4.0977, |
|
"step": 615424 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.991519383003912e-05, |
|
"loss": 4.0873, |
|
"step": 615936 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.99068078825286e-05, |
|
"loss": 4.0882, |
|
"step": 616448 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.989843831382181e-05, |
|
"loss": 4.0938, |
|
"step": 616960 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.989005236631129e-05, |
|
"loss": 4.0732, |
|
"step": 617472 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.988166641880077e-05, |
|
"loss": 4.0783, |
|
"step": 617984 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.987328047129025e-05, |
|
"loss": 4.0751, |
|
"step": 618496 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.986489452377973e-05, |
|
"loss": 4.0965, |
|
"step": 619008 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.985650857626921e-05, |
|
"loss": 4.085, |
|
"step": 619520 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.984812262875869e-05, |
|
"loss": 4.0827, |
|
"step": 620032 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.983973668124817e-05, |
|
"loss": 4.0983, |
|
"step": 620544 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.983135073373765e-05, |
|
"loss": 4.0956, |
|
"step": 621056 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.982296478622713e-05, |
|
"loss": 4.0848, |
|
"step": 621568 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.981457883871661e-05, |
|
"loss": 4.0876, |
|
"step": 622080 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.980619289120609e-05, |
|
"loss": 4.0843, |
|
"step": 622592 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.97978233224993e-05, |
|
"loss": 4.0886, |
|
"step": 623104 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.978943737498878e-05, |
|
"loss": 4.0836, |
|
"step": 623616 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.9781067806281996e-05, |
|
"loss": 4.0811, |
|
"step": 624128 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.977268185877147e-05, |
|
"loss": 4.0882, |
|
"step": 624640 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.976429591126095e-05, |
|
"loss": 4.0756, |
|
"step": 625152 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.975590996375043e-05, |
|
"loss": 4.0765, |
|
"step": 625664 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.974752401623991e-05, |
|
"loss": 4.0753, |
|
"step": 626176 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.973913806872939e-05, |
|
"loss": 4.0784, |
|
"step": 626688 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.973075212121887e-05, |
|
"loss": 4.0844, |
|
"step": 627200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9722382552512085e-05, |
|
"loss": 4.0948, |
|
"step": 627712 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9713996605001565e-05, |
|
"loss": 4.0789, |
|
"step": 628224 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9705627036294774e-05, |
|
"loss": 4.0795, |
|
"step": 628736 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9697241088784254e-05, |
|
"loss": 4.0893, |
|
"step": 629248 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9688855141273734e-05, |
|
"loss": 4.0663, |
|
"step": 629760 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9680469193763214e-05, |
|
"loss": 4.0726, |
|
"step": 630272 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9672083246252694e-05, |
|
"loss": 4.0792, |
|
"step": 630784 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9663697298742174e-05, |
|
"loss": 4.068, |
|
"step": 631296 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9655311351231654e-05, |
|
"loss": 4.0794, |
|
"step": 631808 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9646925403721134e-05, |
|
"loss": 4.0773, |
|
"step": 632320 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.963855583501434e-05, |
|
"loss": 4.0852, |
|
"step": 632832 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.963016988750382e-05, |
|
"loss": 4.0724, |
|
"step": 633344 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.962178393999331e-05, |
|
"loss": 4.0827, |
|
"step": 633856 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.961339799248279e-05, |
|
"loss": 4.0845, |
|
"step": 634368 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.960504480257973e-05, |
|
"loss": 4.0753, |
|
"step": 634880 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.959665885506921e-05, |
|
"loss": 4.0486, |
|
"step": 635392 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.958827290755869e-05, |
|
"loss": 4.0889, |
|
"step": 635904 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.957988696004817e-05, |
|
"loss": 4.0743, |
|
"step": 636416 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.957150101253765e-05, |
|
"loss": 4.0826, |
|
"step": 636928 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.956311506502713e-05, |
|
"loss": 4.072, |
|
"step": 637440 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.955472911751661e-05, |
|
"loss": 4.0699, |
|
"step": 637952 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.954634317000609e-05, |
|
"loss": 4.0687, |
|
"step": 638464 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.953795722249557e-05, |
|
"loss": 4.073, |
|
"step": 638976 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.952957127498505e-05, |
|
"loss": 4.0869, |
|
"step": 639488 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.952118532747453e-05, |
|
"loss": 4.0581, |
|
"step": 640000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.951279937996401e-05, |
|
"loss": 4.0934, |
|
"step": 640512 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.950442981125722e-05, |
|
"loss": 4.07, |
|
"step": 641024 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.949606024255043e-05, |
|
"loss": 4.0551, |
|
"step": 641536 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.948767429503991e-05, |
|
"loss": 4.0731, |
|
"step": 642048 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.947928834752939e-05, |
|
"loss": 4.0612, |
|
"step": 642560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.947090240001887e-05, |
|
"loss": 4.0594, |
|
"step": 643072 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.946251645250835e-05, |
|
"loss": 4.0765, |
|
"step": 643584 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.945413050499783e-05, |
|
"loss": 4.0814, |
|
"step": 644096 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9445744557487305e-05, |
|
"loss": 4.065, |
|
"step": 644608 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9437358609976785e-05, |
|
"loss": 4.0791, |
|
"step": 645120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.942898904127e-05, |
|
"loss": 4.0533, |
|
"step": 645632 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.942060309375948e-05, |
|
"loss": 4.0743, |
|
"step": 646144 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.941221714624896e-05, |
|
"loss": 4.0736, |
|
"step": 646656 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.940383119873844e-05, |
|
"loss": 4.0727, |
|
"step": 647168 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.939546163003166e-05, |
|
"loss": 4.0783, |
|
"step": 647680 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9387092061324866e-05, |
|
"loss": 4.0724, |
|
"step": 648192 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9378722492618075e-05, |
|
"loss": 4.0928, |
|
"step": 648704 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9370336545107555e-05, |
|
"loss": 4.0639, |
|
"step": 649216 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9361950597597035e-05, |
|
"loss": 4.0747, |
|
"step": 649728 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9353564650086515e-05, |
|
"loss": 4.0696, |
|
"step": 650240 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9345195081379724e-05, |
|
"loss": 4.0666, |
|
"step": 650752 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9336809133869204e-05, |
|
"loss": 4.0791, |
|
"step": 651264 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9328423186358684e-05, |
|
"loss": 4.0782, |
|
"step": 651776 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.932003723884817e-05, |
|
"loss": 4.0804, |
|
"step": 652288 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.931165129133765e-05, |
|
"loss": 4.0901, |
|
"step": 652800 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.930326534382713e-05, |
|
"loss": 4.0661, |
|
"step": 653312 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9294879396316604e-05, |
|
"loss": 4.0631, |
|
"step": 653824 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9286493448806084e-05, |
|
"loss": 4.0693, |
|
"step": 654336 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9278107501295564e-05, |
|
"loss": 4.0839, |
|
"step": 654848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9269721553785044e-05, |
|
"loss": 4.0675, |
|
"step": 655360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.926135198507825e-05, |
|
"loss": 4.0757, |
|
"step": 655872 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.925296603756773e-05, |
|
"loss": 4.0716, |
|
"step": 656384 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.924458009005721e-05, |
|
"loss": 4.0718, |
|
"step": 656896 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.923619414254669e-05, |
|
"loss": 4.0719, |
|
"step": 657408 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.922780819503617e-05, |
|
"loss": 4.0613, |
|
"step": 657920 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.921942224752565e-05, |
|
"loss": 4.0733, |
|
"step": 658432 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.921103630001514e-05, |
|
"loss": 4.0797, |
|
"step": 658944 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.920265035250462e-05, |
|
"loss": 4.0721, |
|
"step": 659456 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.919428078379783e-05, |
|
"loss": 4.0577, |
|
"step": 659968 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.918589483628731e-05, |
|
"loss": 4.0678, |
|
"step": 660480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.917750888877679e-05, |
|
"loss": 4.0655, |
|
"step": 660992 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.916912294126627e-05, |
|
"loss": 4.075, |
|
"step": 661504 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.916075337255948e-05, |
|
"loss": 4.0752, |
|
"step": 662016 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.915236742504896e-05, |
|
"loss": 4.0607, |
|
"step": 662528 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.914398147753844e-05, |
|
"loss": 4.065, |
|
"step": 663040 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.913559553002792e-05, |
|
"loss": 4.0552, |
|
"step": 663552 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.91272095825174e-05, |
|
"loss": 4.0675, |
|
"step": 664064 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.9118840013810606e-05, |
|
"loss": 4.0629, |
|
"step": 664576 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.911047044510382e-05, |
|
"loss": 4.056, |
|
"step": 665088 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.91020844975933e-05, |
|
"loss": 4.0725, |
|
"step": 665600 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.909369855008278e-05, |
|
"loss": 4.0635, |
|
"step": 666112 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.908531260257226e-05, |
|
"loss": 4.0635, |
|
"step": 666624 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.907692665506174e-05, |
|
"loss": 4.0557, |
|
"step": 667136 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.906854070755122e-05, |
|
"loss": 4.0626, |
|
"step": 667648 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.90601547600407e-05, |
|
"loss": 4.0703, |
|
"step": 668160 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.905176881253018e-05, |
|
"loss": 4.0569, |
|
"step": 668672 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.904339924382339e-05, |
|
"loss": 4.0756, |
|
"step": 669184 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.903501329631287e-05, |
|
"loss": 4.0714, |
|
"step": 669696 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.902664372760608e-05, |
|
"loss": 4.063, |
|
"step": 670208 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.901825778009556e-05, |
|
"loss": 4.0733, |
|
"step": 670720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.900987183258504e-05, |
|
"loss": 4.0577, |
|
"step": 671232 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.9001485885074527e-05, |
|
"loss": 4.0749, |
|
"step": 671744 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8993099937564007e-05, |
|
"loss": 4.0651, |
|
"step": 672256 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8984730368857216e-05, |
|
"loss": 4.0694, |
|
"step": 672768 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8976344421346696e-05, |
|
"loss": 4.0632, |
|
"step": 673280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8967958473836176e-05, |
|
"loss": 4.0689, |
|
"step": 673792 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8959572526325656e-05, |
|
"loss": 4.0726, |
|
"step": 674304 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8951186578815135e-05, |
|
"loss": 4.0568, |
|
"step": 674816 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8942817010108345e-05, |
|
"loss": 4.0486, |
|
"step": 675328 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8934431062597825e-05, |
|
"loss": 4.0612, |
|
"step": 675840 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8926045115087304e-05, |
|
"loss": 4.0585, |
|
"step": 676352 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8917659167576784e-05, |
|
"loss": 4.0691, |
|
"step": 676864 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8909273220066264e-05, |
|
"loss": 4.0605, |
|
"step": 677376 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8900887272555744e-05, |
|
"loss": 4.0714, |
|
"step": 677888 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.889251770384896e-05, |
|
"loss": 4.0515, |
|
"step": 678400 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.888413175633844e-05, |
|
"loss": 4.0787, |
|
"step": 678912 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.887576218763165e-05, |
|
"loss": 4.0634, |
|
"step": 679424 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.886737624012113e-05, |
|
"loss": 4.0724, |
|
"step": 679936 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.885899029261061e-05, |
|
"loss": 4.0671, |
|
"step": 680448 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.885060434510009e-05, |
|
"loss": 4.0602, |
|
"step": 680960 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.884221839758956e-05, |
|
"loss": 4.0627, |
|
"step": 681472 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.883383245007904e-05, |
|
"loss": 4.073, |
|
"step": 681984 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.882544650256852e-05, |
|
"loss": 4.0527, |
|
"step": 682496 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8817060555058e-05, |
|
"loss": 4.0673, |
|
"step": 683008 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.880869098635122e-05, |
|
"loss": 4.0619, |
|
"step": 683520 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.88003050388407e-05, |
|
"loss": 4.0653, |
|
"step": 684032 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.879191909133018e-05, |
|
"loss": 4.0623, |
|
"step": 684544 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.878353314381966e-05, |
|
"loss": 4.062, |
|
"step": 685056 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.877514719630914e-05, |
|
"loss": 4.0596, |
|
"step": 685568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.876677762760235e-05, |
|
"loss": 4.0605, |
|
"step": 686080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.875839168009183e-05, |
|
"loss": 4.0595, |
|
"step": 686592 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.085322380065918, |
|
"eval_runtime": 292.4032, |
|
"eval_samples_per_second": 1305.016, |
|
"eval_steps_per_second": 40.783, |
|
"step": 686880 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.875000573258131e-05, |
|
"loss": 4.0528, |
|
"step": 687104 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.874161978507079e-05, |
|
"loss": 4.0514, |
|
"step": 687616 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.873323383756027e-05, |
|
"loss": 4.0629, |
|
"step": 688128 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.872484789004975e-05, |
|
"loss": 4.0645, |
|
"step": 688640 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.871646194253923e-05, |
|
"loss": 4.074, |
|
"step": 689152 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.870807599502871e-05, |
|
"loss": 4.0541, |
|
"step": 689664 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.8699706426321916e-05, |
|
"loss": 4.0611, |
|
"step": 690176 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.86913204788114e-05, |
|
"loss": 4.0572, |
|
"step": 690688 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.868295091010461e-05, |
|
"loss": 4.0494, |
|
"step": 691200 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.867456496259409e-05, |
|
"loss": 4.0711, |
|
"step": 691712 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.866617901508357e-05, |
|
"loss": 4.0575, |
|
"step": 692224 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.865779306757305e-05, |
|
"loss": 4.0639, |
|
"step": 692736 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.864942349886626e-05, |
|
"loss": 4.0691, |
|
"step": 693248 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.864103755135574e-05, |
|
"loss": 4.0473, |
|
"step": 693760 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.863265160384522e-05, |
|
"loss": 4.0466, |
|
"step": 694272 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.86242656563347e-05, |
|
"loss": 4.0499, |
|
"step": 694784 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.861587970882418e-05, |
|
"loss": 4.0679, |
|
"step": 695296 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.860749376131366e-05, |
|
"loss": 4.065, |
|
"step": 695808 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.859912419260687e-05, |
|
"loss": 4.0495, |
|
"step": 696320 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.8590754623900085e-05, |
|
"loss": 4.0717, |
|
"step": 696832 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.8582368676389565e-05, |
|
"loss": 4.0695, |
|
"step": 697344 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.8573982728879045e-05, |
|
"loss": 4.0579, |
|
"step": 697856 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.8565596781368525e-05, |
|
"loss": 4.0613, |
|
"step": 698368 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.8557210833858005e-05, |
|
"loss": 4.0594, |
|
"step": 698880 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.8548824886347485e-05, |
|
"loss": 4.061, |
|
"step": 699392 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.8540438938836965e-05, |
|
"loss": 4.0591, |
|
"step": 699904 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.8532052991326445e-05, |
|
"loss": 4.0522, |
|
"step": 700416 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.852366704381592e-05, |
|
"loss": 4.0603, |
|
"step": 700928 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.85152810963054e-05, |
|
"loss": 4.052, |
|
"step": 701440 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.850689514879488e-05, |
|
"loss": 4.0489, |
|
"step": 701952 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.849850920128436e-05, |
|
"loss": 4.0481, |
|
"step": 702464 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8490139632577574e-05, |
|
"loss": 4.0538, |
|
"step": 702976 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8481753685067054e-05, |
|
"loss": 4.0556, |
|
"step": 703488 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8473367737556534e-05, |
|
"loss": 4.0695, |
|
"step": 704000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8464981790046014e-05, |
|
"loss": 4.0526, |
|
"step": 704512 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.845661222133922e-05, |
|
"loss": 4.0516, |
|
"step": 705024 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.84482262738287e-05, |
|
"loss": 4.0664, |
|
"step": 705536 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.843984032631818e-05, |
|
"loss": 4.0376, |
|
"step": 706048 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.843145437880766e-05, |
|
"loss": 4.0494, |
|
"step": 706560 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.842306843129714e-05, |
|
"loss": 4.0533, |
|
"step": 707072 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.841468248378662e-05, |
|
"loss": 4.0427, |
|
"step": 707584 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.84062965362761e-05, |
|
"loss": 4.0509, |
|
"step": 708096 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.839791058876558e-05, |
|
"loss": 4.0525, |
|
"step": 708608 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.838952464125506e-05, |
|
"loss": 4.058, |
|
"step": 709120 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.838115507254827e-05, |
|
"loss": 4.0476, |
|
"step": 709632 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.837276912503776e-05, |
|
"loss": 4.057, |
|
"step": 710144 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.836438317752724e-05, |
|
"loss": 4.0554, |
|
"step": 710656 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.835601360882045e-05, |
|
"loss": 4.0507, |
|
"step": 711168 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.834762766130993e-05, |
|
"loss": 4.0269, |
|
"step": 711680 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.833924171379941e-05, |
|
"loss": 4.0601, |
|
"step": 712192 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.833085576628889e-05, |
|
"loss": 4.0512, |
|
"step": 712704 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.832246981877837e-05, |
|
"loss": 4.0601, |
|
"step": 713216 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.831408387126785e-05, |
|
"loss": 4.0398, |
|
"step": 713728 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8305714302561056e-05, |
|
"loss": 4.0451, |
|
"step": 714240 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8297328355050536e-05, |
|
"loss": 4.0419, |
|
"step": 714752 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8288942407540016e-05, |
|
"loss": 4.046, |
|
"step": 715264 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8280556460029496e-05, |
|
"loss": 4.0639, |
|
"step": 715776 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8272170512518976e-05, |
|
"loss": 4.0348, |
|
"step": 716288 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8263784565008456e-05, |
|
"loss": 4.0643, |
|
"step": 716800 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8255398617497936e-05, |
|
"loss": 4.047, |
|
"step": 717312 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8247012669987416e-05, |
|
"loss": 4.0299, |
|
"step": 717824 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8238626722476896e-05, |
|
"loss": 4.0452, |
|
"step": 718336 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8230240774966376e-05, |
|
"loss": 4.0346, |
|
"step": 718848 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8221854827455856e-05, |
|
"loss": 4.0398, |
|
"step": 719360 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8213468879945336e-05, |
|
"loss": 4.0485, |
|
"step": 719872 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8205099311238545e-05, |
|
"loss": 4.0579, |
|
"step": 720384 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8196713363728025e-05, |
|
"loss": 4.0416, |
|
"step": 720896 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8188327416217505e-05, |
|
"loss": 4.0526, |
|
"step": 721408 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8179941468706985e-05, |
|
"loss": 4.0295, |
|
"step": 721920 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8171571900000194e-05, |
|
"loss": 4.0483, |
|
"step": 722432 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.816318595248968e-05, |
|
"loss": 4.0485, |
|
"step": 722944 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8154832762586626e-05, |
|
"loss": 4.0516, |
|
"step": 723456 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8146446815076106e-05, |
|
"loss": 4.0477, |
|
"step": 723968 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.813806086756558e-05, |
|
"loss": 4.0482, |
|
"step": 724480 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.812967492005506e-05, |
|
"loss": 4.0673, |
|
"step": 724992 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.812128897254454e-05, |
|
"loss": 4.0365, |
|
"step": 725504 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.811290302503402e-05, |
|
"loss": 4.0531, |
|
"step": 726016 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.810453345632723e-05, |
|
"loss": 4.043, |
|
"step": 726528 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.809614750881671e-05, |
|
"loss": 4.0395, |
|
"step": 727040 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8087777940109924e-05, |
|
"loss": 4.0518, |
|
"step": 727552 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8079391992599403e-05, |
|
"loss": 4.0562, |
|
"step": 728064 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8071006045088883e-05, |
|
"loss": 4.0535, |
|
"step": 728576 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.806262009757836e-05, |
|
"loss": 4.0694, |
|
"step": 729088 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.805423415006784e-05, |
|
"loss": 4.043, |
|
"step": 729600 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.804584820255732e-05, |
|
"loss": 4.0361, |
|
"step": 730112 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.80374622550468e-05, |
|
"loss": 4.0442, |
|
"step": 730624 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.802907630753628e-05, |
|
"loss": 4.056, |
|
"step": 731136 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.802069036002576e-05, |
|
"loss": 4.0437, |
|
"step": 731648 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.801230441251524e-05, |
|
"loss": 4.051, |
|
"step": 732160 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.800391846500472e-05, |
|
"loss": 4.0453, |
|
"step": 732672 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.79955325174942e-05, |
|
"loss": 4.0504, |
|
"step": 733184 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.798714656998368e-05, |
|
"loss": 4.0461, |
|
"step": 733696 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.797876062247316e-05, |
|
"loss": 4.0369, |
|
"step": 734208 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.797037467496264e-05, |
|
"loss": 4.0486, |
|
"step": 734720 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7961988727452116e-05, |
|
"loss": 4.0555, |
|
"step": 735232 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.795361915874533e-05, |
|
"loss": 4.0497, |
|
"step": 735744 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.794523321123481e-05, |
|
"loss": 4.0294, |
|
"step": 736256 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.79368472637243e-05, |
|
"loss": 4.0435, |
|
"step": 736768 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.792847769501751e-05, |
|
"loss": 4.043, |
|
"step": 737280 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.792009174750699e-05, |
|
"loss": 4.0491, |
|
"step": 737792 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.791170579999647e-05, |
|
"loss": 4.05, |
|
"step": 738304 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.790331985248594e-05, |
|
"loss": 4.0354, |
|
"step": 738816 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.789493390497542e-05, |
|
"loss": 4.0402, |
|
"step": 739328 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.788656433626864e-05, |
|
"loss": 4.0327, |
|
"step": 739840 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7878178388758117e-05, |
|
"loss": 4.0427, |
|
"step": 740352 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.786979244124759e-05, |
|
"loss": 4.0372, |
|
"step": 740864 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.786140649373707e-05, |
|
"loss": 4.0342, |
|
"step": 741376 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.785302054622655e-05, |
|
"loss": 4.0443, |
|
"step": 741888 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7844634598716036e-05, |
|
"loss": 4.0441, |
|
"step": 742400 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7836248651205516e-05, |
|
"loss": 4.0393, |
|
"step": 742912 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7827862703694996e-05, |
|
"loss": 4.0321, |
|
"step": 743424 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7819493134988205e-05, |
|
"loss": 4.0368, |
|
"step": 743936 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7811107187477685e-05, |
|
"loss": 4.0492, |
|
"step": 744448 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7802721239967165e-05, |
|
"loss": 4.0288, |
|
"step": 744960 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7794335292456645e-05, |
|
"loss": 4.0509, |
|
"step": 745472 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7785949344946125e-05, |
|
"loss": 4.0491, |
|
"step": 745984 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7777579776239334e-05, |
|
"loss": 4.0383, |
|
"step": 746496 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7769193828728814e-05, |
|
"loss": 4.0494, |
|
"step": 747008 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7760807881218294e-05, |
|
"loss": 4.0372, |
|
"step": 747520 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7752421933707774e-05, |
|
"loss": 4.0503, |
|
"step": 748032 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.774405236500099e-05, |
|
"loss": 4.0418, |
|
"step": 748544 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.77356827962942e-05, |
|
"loss": 4.0456, |
|
"step": 749056 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.772729684878368e-05, |
|
"loss": 4.0401, |
|
"step": 749568 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.771891090127316e-05, |
|
"loss": 4.0412, |
|
"step": 750080 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.771052495376264e-05, |
|
"loss": 4.0457, |
|
"step": 750592 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.770213900625212e-05, |
|
"loss": 4.0375, |
|
"step": 751104 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.769376943754533e-05, |
|
"loss": 4.0235, |
|
"step": 751616 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.768539986883854e-05, |
|
"loss": 4.0413, |
|
"step": 752128 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.767701392132802e-05, |
|
"loss": 4.0288, |
|
"step": 752640 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.76686279738175e-05, |
|
"loss": 4.0486, |
|
"step": 753152 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.766024202630698e-05, |
|
"loss": 4.035, |
|
"step": 753664 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.765185607879646e-05, |
|
"loss": 4.0462, |
|
"step": 754176 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7643470131285944e-05, |
|
"loss": 4.0267, |
|
"step": 754688 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7635084183775424e-05, |
|
"loss": 4.0512, |
|
"step": 755200 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7626698236264904e-05, |
|
"loss": 4.0405, |
|
"step": 755712 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7618312288754384e-05, |
|
"loss": 4.0523, |
|
"step": 756224 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.760994272004759e-05, |
|
"loss": 4.0439, |
|
"step": 756736 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.760155677253707e-05, |
|
"loss": 4.0408, |
|
"step": 757248 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.759317082502655e-05, |
|
"loss": 4.0345, |
|
"step": 757760 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.758478487751603e-05, |
|
"loss": 4.0551, |
|
"step": 758272 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.757639893000551e-05, |
|
"loss": 4.0254, |
|
"step": 758784 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.756801298249499e-05, |
|
"loss": 4.0476, |
|
"step": 759296 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.75596434137882e-05, |
|
"loss": 4.0375, |
|
"step": 759808 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.755125746627768e-05, |
|
"loss": 4.0401, |
|
"step": 760320 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.754287151876716e-05, |
|
"loss": 4.0396, |
|
"step": 760832 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.753448557125664e-05, |
|
"loss": 4.0357, |
|
"step": 761344 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.752609962374613e-05, |
|
"loss": 4.0361, |
|
"step": 761856 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.751773005503934e-05, |
|
"loss": 4.0396, |
|
"step": 762368 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.750934410752882e-05, |
|
"loss": 4.0332, |
|
"step": 762880 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.0698771476745605, |
|
"eval_runtime": 291.4953, |
|
"eval_samples_per_second": 1309.081, |
|
"eval_steps_per_second": 40.91, |
|
"step": 763200 |
|
} |
|
], |
|
"logging_steps": 512, |
|
"max_steps": 3052726, |
|
"num_train_epochs": 9223372036854775807, |
|
"save_steps": 10, |
|
"total_flos": 3.1400741488253894e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|