|
{ |
|
"best_metric": 3.8603413105010986, |
|
"best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/full2/transformer/0/checkpoints/checkpoint-534240", |
|
"epoch": 1.0250006060157382, |
|
"eval_steps": 10, |
|
"global_step": 534240, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999998362119627e-05, |
|
"loss": 11.0453, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999161405248948e-05, |
|
"loss": 6.8203, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.998322810497896e-05, |
|
"loss": 6.1803, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.997484215746844e-05, |
|
"loss": 5.976, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.996645620995792e-05, |
|
"loss": 5.8136, |
|
"step": 2048 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.99580702624474e-05, |
|
"loss": 5.7055, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.994968431493688e-05, |
|
"loss": 5.6058, |
|
"step": 3072 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.994129836742636e-05, |
|
"loss": 5.5376, |
|
"step": 3584 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.993291241991584e-05, |
|
"loss": 5.4512, |
|
"step": 4096 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.992452647240532e-05, |
|
"loss": 5.403, |
|
"step": 4608 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.99161405248948e-05, |
|
"loss": 5.3536, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.990775457738428e-05, |
|
"loss": 5.33, |
|
"step": 5632 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.989938500867749e-05, |
|
"loss": 5.2746, |
|
"step": 6144 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.989099906116697e-05, |
|
"loss": 5.211, |
|
"step": 6656 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.988261311365645e-05, |
|
"loss": 5.1872, |
|
"step": 7168 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.987422716614593e-05, |
|
"loss": 5.1491, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.986584121863541e-05, |
|
"loss": 5.1193, |
|
"step": 8192 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.985745527112489e-05, |
|
"loss": 5.0928, |
|
"step": 8704 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.984906932361437e-05, |
|
"loss": 5.0671, |
|
"step": 9216 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.984068337610385e-05, |
|
"loss": 5.0268, |
|
"step": 9728 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.983229742859333e-05, |
|
"loss": 5.0218, |
|
"step": 10240 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9823927859886547e-05, |
|
"loss": 4.986, |
|
"step": 10752 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9815541912376026e-05, |
|
"loss": 4.9659, |
|
"step": 11264 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9807155964865506e-05, |
|
"loss": 4.9377, |
|
"step": 11776 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9798770017354986e-05, |
|
"loss": 4.9353, |
|
"step": 12288 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9790400448648195e-05, |
|
"loss": 4.9084, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9782014501137675e-05, |
|
"loss": 4.8814, |
|
"step": 13312 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9773628553627155e-05, |
|
"loss": 4.8641, |
|
"step": 13824 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9765242606116635e-05, |
|
"loss": 4.8465, |
|
"step": 14336 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9756873037409844e-05, |
|
"loss": 4.8346, |
|
"step": 14848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9748487089899324e-05, |
|
"loss": 4.8112, |
|
"step": 15360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9740117521192533e-05, |
|
"loss": 4.8046, |
|
"step": 15872 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9731731573682013e-05, |
|
"loss": 4.7911, |
|
"step": 16384 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.97233456261715e-05, |
|
"loss": 4.7771, |
|
"step": 16896 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.971495967866098e-05, |
|
"loss": 4.7744, |
|
"step": 17408 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.970657373115046e-05, |
|
"loss": 4.7607, |
|
"step": 17920 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.969818778363994e-05, |
|
"loss": 4.7448, |
|
"step": 18432 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.968980183612942e-05, |
|
"loss": 4.7186, |
|
"step": 18944 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.96814158886189e-05, |
|
"loss": 4.714, |
|
"step": 19456 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.967304631991211e-05, |
|
"loss": 4.6948, |
|
"step": 19968 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.966466037240159e-05, |
|
"loss": 4.6941, |
|
"step": 20480 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.965627442489107e-05, |
|
"loss": 4.6788, |
|
"step": 20992 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.964788847738054e-05, |
|
"loss": 4.6731, |
|
"step": 21504 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.963950252987002e-05, |
|
"loss": 4.656, |
|
"step": 22016 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.963113296116324e-05, |
|
"loss": 4.6628, |
|
"step": 22528 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.962274701365272e-05, |
|
"loss": 4.6451, |
|
"step": 23040 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.96143610661422e-05, |
|
"loss": 4.6473, |
|
"step": 23552 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.960597511863168e-05, |
|
"loss": 4.6369, |
|
"step": 24064 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.959758917112116e-05, |
|
"loss": 4.609, |
|
"step": 24576 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.95892359812181e-05, |
|
"loss": 4.602, |
|
"step": 25088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.958085003370758e-05, |
|
"loss": 4.616, |
|
"step": 25600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.957246408619706e-05, |
|
"loss": 4.6008, |
|
"step": 26112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.956407813868654e-05, |
|
"loss": 4.5844, |
|
"step": 26624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9555692191176016e-05, |
|
"loss": 4.5682, |
|
"step": 27136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9547306243665496e-05, |
|
"loss": 4.5748, |
|
"step": 27648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9538920296154976e-05, |
|
"loss": 4.5575, |
|
"step": 28160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9530534348644456e-05, |
|
"loss": 4.5697, |
|
"step": 28672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.952216477993767e-05, |
|
"loss": 4.5357, |
|
"step": 29184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.951377883242715e-05, |
|
"loss": 4.5472, |
|
"step": 29696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.950539288491663e-05, |
|
"loss": 4.5447, |
|
"step": 30208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.949700693740611e-05, |
|
"loss": 4.5208, |
|
"step": 30720 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.948863736869932e-05, |
|
"loss": 4.5379, |
|
"step": 31232 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.94802514211888e-05, |
|
"loss": 4.5087, |
|
"step": 31744 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.947186547367828e-05, |
|
"loss": 4.5105, |
|
"step": 32256 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.946347952616776e-05, |
|
"loss": 4.4967, |
|
"step": 32768 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.945510995746097e-05, |
|
"loss": 4.513, |
|
"step": 33280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.944672400995045e-05, |
|
"loss": 4.486, |
|
"step": 33792 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.943833806243993e-05, |
|
"loss": 4.4883, |
|
"step": 34304 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.942995211492941e-05, |
|
"loss": 4.4747, |
|
"step": 34816 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.942156616741889e-05, |
|
"loss": 4.4701, |
|
"step": 35328 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9413196598712105e-05, |
|
"loss": 4.4835, |
|
"step": 35840 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.940482703000532e-05, |
|
"loss": 4.4759, |
|
"step": 36352 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9396441082494794e-05, |
|
"loss": 4.4659, |
|
"step": 36864 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9388055134984274e-05, |
|
"loss": 4.4653, |
|
"step": 37376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9379669187473754e-05, |
|
"loss": 4.4719, |
|
"step": 37888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9371283239963234e-05, |
|
"loss": 4.446, |
|
"step": 38400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.936291367125644e-05, |
|
"loss": 4.4601, |
|
"step": 38912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.935452772374592e-05, |
|
"loss": 4.4346, |
|
"step": 39424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.93461417762354e-05, |
|
"loss": 4.4338, |
|
"step": 39936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.933775582872488e-05, |
|
"loss": 4.4317, |
|
"step": 40448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.932936988121436e-05, |
|
"loss": 4.4345, |
|
"step": 40960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.932098393370384e-05, |
|
"loss": 4.4255, |
|
"step": 41472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.931259798619332e-05, |
|
"loss": 4.4334, |
|
"step": 41984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.930421203868281e-05, |
|
"loss": 4.418, |
|
"step": 42496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.929584246997602e-05, |
|
"loss": 4.3969, |
|
"step": 43008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.92874565224655e-05, |
|
"loss": 4.3995, |
|
"step": 43520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.927907057495498e-05, |
|
"loss": 4.411, |
|
"step": 44032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.927070100624819e-05, |
|
"loss": 4.3954, |
|
"step": 44544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.92623314375414e-05, |
|
"loss": 4.3944, |
|
"step": 45056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.925394549003088e-05, |
|
"loss": 4.3981, |
|
"step": 45568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.924555954252036e-05, |
|
"loss": 4.3813, |
|
"step": 46080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.923717359500984e-05, |
|
"loss": 4.3868, |
|
"step": 46592 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9228804026303046e-05, |
|
"loss": 4.3807, |
|
"step": 47104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.922041807879253e-05, |
|
"loss": 4.3858, |
|
"step": 47616 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.921203213128201e-05, |
|
"loss": 4.3832, |
|
"step": 48128 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.920364618377149e-05, |
|
"loss": 4.3773, |
|
"step": 48640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.919526023626097e-05, |
|
"loss": 4.3577, |
|
"step": 49152 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.918687428875045e-05, |
|
"loss": 4.3646, |
|
"step": 49664 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.917848834123993e-05, |
|
"loss": 4.3568, |
|
"step": 50176 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.917010239372941e-05, |
|
"loss": 4.3604, |
|
"step": 50688 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.916171644621889e-05, |
|
"loss": 4.3648, |
|
"step": 51200 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.915333049870837e-05, |
|
"loss": 4.3437, |
|
"step": 51712 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.914494455119785e-05, |
|
"loss": 4.3483, |
|
"step": 52224 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.913657498249106e-05, |
|
"loss": 4.3495, |
|
"step": 52736 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.912818903498054e-05, |
|
"loss": 4.3432, |
|
"step": 53248 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.911980308747002e-05, |
|
"loss": 4.3316, |
|
"step": 53760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.91114171399595e-05, |
|
"loss": 4.335, |
|
"step": 54272 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.910303119244898e-05, |
|
"loss": 4.322, |
|
"step": 54784 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.909464524493846e-05, |
|
"loss": 4.3218, |
|
"step": 55296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.908627567623168e-05, |
|
"loss": 4.3195, |
|
"step": 55808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.907788972872115e-05, |
|
"loss": 4.3196, |
|
"step": 56320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.906950378121063e-05, |
|
"loss": 4.314, |
|
"step": 56832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.906111783370011e-05, |
|
"loss": 4.3183, |
|
"step": 57344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.905273188618959e-05, |
|
"loss": 4.3097, |
|
"step": 57856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.904434593867907e-05, |
|
"loss": 4.3127, |
|
"step": 58368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.903595999116855e-05, |
|
"loss": 4.3213, |
|
"step": 58880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.902759042246176e-05, |
|
"loss": 4.3152, |
|
"step": 59392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.901920447495124e-05, |
|
"loss": 4.3094, |
|
"step": 59904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.901081852744072e-05, |
|
"loss": 4.2991, |
|
"step": 60416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.90024325799302e-05, |
|
"loss": 4.2987, |
|
"step": 60928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8994046632419686e-05, |
|
"loss": 4.2997, |
|
"step": 61440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8985660684909166e-05, |
|
"loss": 4.2858, |
|
"step": 61952 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8977291116202375e-05, |
|
"loss": 4.2887, |
|
"step": 62464 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8968905168691855e-05, |
|
"loss": 4.2851, |
|
"step": 62976 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8960519221181335e-05, |
|
"loss": 4.2893, |
|
"step": 63488 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8952133273670815e-05, |
|
"loss": 4.2801, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8943747326160294e-05, |
|
"loss": 4.2895, |
|
"step": 64512 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8935377757453504e-05, |
|
"loss": 4.2816, |
|
"step": 65024 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8926991809942984e-05, |
|
"loss": 4.2735, |
|
"step": 65536 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8918605862432463e-05, |
|
"loss": 4.2684, |
|
"step": 66048 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8910219914921943e-05, |
|
"loss": 4.2846, |
|
"step": 66560 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8901833967411423e-05, |
|
"loss": 4.2742, |
|
"step": 67072 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.88934480199009e-05, |
|
"loss": 4.2679, |
|
"step": 67584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.888506207239038e-05, |
|
"loss": 4.2691, |
|
"step": 68096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.88766925036836e-05, |
|
"loss": 4.2702, |
|
"step": 68608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.886830655617308e-05, |
|
"loss": 4.2672, |
|
"step": 69120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.885992060866256e-05, |
|
"loss": 4.2696, |
|
"step": 69632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.885155103995577e-05, |
|
"loss": 4.2588, |
|
"step": 70144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.884316509244525e-05, |
|
"loss": 4.2571, |
|
"step": 70656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.883477914493473e-05, |
|
"loss": 4.2581, |
|
"step": 71168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.882639319742421e-05, |
|
"loss": 4.25, |
|
"step": 71680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.881800724991369e-05, |
|
"loss": 4.2452, |
|
"step": 72192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.880962130240316e-05, |
|
"loss": 4.245, |
|
"step": 72704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.880123535489264e-05, |
|
"loss": 4.2489, |
|
"step": 73216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.879286578618586e-05, |
|
"loss": 4.2394, |
|
"step": 73728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.878447983867534e-05, |
|
"loss": 4.2489, |
|
"step": 74240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.877609389116482e-05, |
|
"loss": 4.2412, |
|
"step": 74752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.87677079436543e-05, |
|
"loss": 4.2424, |
|
"step": 75264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.875933837494751e-05, |
|
"loss": 4.2462, |
|
"step": 75776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8750952427436986e-05, |
|
"loss": 4.2278, |
|
"step": 76288 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.198204517364502, |
|
"eval_runtime": 302.7571, |
|
"eval_samples_per_second": 1260.387, |
|
"eval_steps_per_second": 39.388, |
|
"step": 76320 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8742566479926466e-05, |
|
"loss": 4.2153, |
|
"step": 76800 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8734180532415946e-05, |
|
"loss": 4.2138, |
|
"step": 77312 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.872581096370916e-05, |
|
"loss": 4.238, |
|
"step": 77824 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8717425016198635e-05, |
|
"loss": 4.2213, |
|
"step": 78336 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8709039068688115e-05, |
|
"loss": 4.2241, |
|
"step": 78848 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8700653121177595e-05, |
|
"loss": 4.2182, |
|
"step": 79360 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8692267173667075e-05, |
|
"loss": 4.2109, |
|
"step": 79872 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8683881226156555e-05, |
|
"loss": 4.202, |
|
"step": 80384 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.867551165744977e-05, |
|
"loss": 4.2111, |
|
"step": 80896 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.866712570993925e-05, |
|
"loss": 4.2031, |
|
"step": 81408 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.865873976242873e-05, |
|
"loss": 4.2263, |
|
"step": 81920 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.865035381491821e-05, |
|
"loss": 4.2114, |
|
"step": 82432 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.864198424621142e-05, |
|
"loss": 4.1925, |
|
"step": 82944 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.86335982987009e-05, |
|
"loss": 4.1953, |
|
"step": 83456 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.862521235119038e-05, |
|
"loss": 4.1971, |
|
"step": 83968 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.861682640367986e-05, |
|
"loss": 4.1843, |
|
"step": 84480 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.860844045616934e-05, |
|
"loss": 4.1969, |
|
"step": 84992 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.860005450865882e-05, |
|
"loss": 4.1913, |
|
"step": 85504 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.85916685611483e-05, |
|
"loss": 4.1805, |
|
"step": 86016 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.858328261363778e-05, |
|
"loss": 4.2049, |
|
"step": 86528 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.857489666612726e-05, |
|
"loss": 4.1852, |
|
"step": 87040 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.856651071861674e-05, |
|
"loss": 4.186, |
|
"step": 87552 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8558141149909955e-05, |
|
"loss": 4.1768, |
|
"step": 88064 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8549755202399435e-05, |
|
"loss": 4.1956, |
|
"step": 88576 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8541369254888915e-05, |
|
"loss": 4.1737, |
|
"step": 89088 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8532983307378395e-05, |
|
"loss": 4.1744, |
|
"step": 89600 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8524597359867875e-05, |
|
"loss": 4.1703, |
|
"step": 90112 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8516227791161084e-05, |
|
"loss": 4.1688, |
|
"step": 90624 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8507841843650564e-05, |
|
"loss": 4.1673, |
|
"step": 91136 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8499455896140044e-05, |
|
"loss": 4.166, |
|
"step": 91648 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8491069948629524e-05, |
|
"loss": 4.1693, |
|
"step": 92160 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8482684001119e-05, |
|
"loss": 4.1671, |
|
"step": 92672 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.847429805360848e-05, |
|
"loss": 4.1652, |
|
"step": 93184 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8465912106097964e-05, |
|
"loss": 4.1701, |
|
"step": 93696 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.845754253739117e-05, |
|
"loss": 4.1644, |
|
"step": 94208 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.844915658988065e-05, |
|
"loss": 4.1653, |
|
"step": 94720 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.844077064237013e-05, |
|
"loss": 4.1492, |
|
"step": 95232 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.843238469485961e-05, |
|
"loss": 4.1523, |
|
"step": 95744 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.842399874734909e-05, |
|
"loss": 4.1433, |
|
"step": 96256 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.841561279983857e-05, |
|
"loss": 4.1509, |
|
"step": 96768 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.840724323113178e-05, |
|
"loss": 4.1423, |
|
"step": 97280 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.839885728362126e-05, |
|
"loss": 4.1517, |
|
"step": 97792 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.839047133611074e-05, |
|
"loss": 4.1339, |
|
"step": 98304 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.838208538860022e-05, |
|
"loss": 4.1537, |
|
"step": 98816 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.83736994410897e-05, |
|
"loss": 4.1457, |
|
"step": 99328 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.836531349357918e-05, |
|
"loss": 4.1453, |
|
"step": 99840 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.83569439248724e-05, |
|
"loss": 4.1428, |
|
"step": 100352 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.834855797736188e-05, |
|
"loss": 4.1279, |
|
"step": 100864 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.834017202985136e-05, |
|
"loss": 4.13, |
|
"step": 101376 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.833178608234084e-05, |
|
"loss": 4.1439, |
|
"step": 101888 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.832340013483032e-05, |
|
"loss": 4.1345, |
|
"step": 102400 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.83150141873198e-05, |
|
"loss": 4.1282, |
|
"step": 102912 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.830662823980928e-05, |
|
"loss": 4.1195, |
|
"step": 103424 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8298258671102486e-05, |
|
"loss": 4.1293, |
|
"step": 103936 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8289872723591966e-05, |
|
"loss": 4.1114, |
|
"step": 104448 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8281486776081446e-05, |
|
"loss": 4.137, |
|
"step": 104960 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8273100828570926e-05, |
|
"loss": 4.1076, |
|
"step": 105472 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8264714881060406e-05, |
|
"loss": 4.127, |
|
"step": 105984 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8256328933549886e-05, |
|
"loss": 4.1235, |
|
"step": 106496 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8247942986039366e-05, |
|
"loss": 4.1103, |
|
"step": 107008 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.823957341733258e-05, |
|
"loss": 4.1243, |
|
"step": 107520 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.823118746982206e-05, |
|
"loss": 4.1121, |
|
"step": 108032 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8222801522311535e-05, |
|
"loss": 4.1111, |
|
"step": 108544 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8214415574801015e-05, |
|
"loss": 4.0977, |
|
"step": 109056 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8206029627290495e-05, |
|
"loss": 4.1234, |
|
"step": 109568 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8197643679779975e-05, |
|
"loss": 4.0986, |
|
"step": 110080 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8189257732269455e-05, |
|
"loss": 4.107, |
|
"step": 110592 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8180888163562664e-05, |
|
"loss": 4.0888, |
|
"step": 111104 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8172502216052144e-05, |
|
"loss": 4.1007, |
|
"step": 111616 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8164116268541624e-05, |
|
"loss": 4.1112, |
|
"step": 112128 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8155730321031104e-05, |
|
"loss": 4.1088, |
|
"step": 112640 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.814736075232432e-05, |
|
"loss": 4.1031, |
|
"step": 113152 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.81389748048138e-05, |
|
"loss": 4.1086, |
|
"step": 113664 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.813058885730328e-05, |
|
"loss": 4.1114, |
|
"step": 114176 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.812220290979276e-05, |
|
"loss": 4.0973, |
|
"step": 114688 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.811381696228224e-05, |
|
"loss": 4.1069, |
|
"step": 115200 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.810544739357545e-05, |
|
"loss": 4.0937, |
|
"step": 115712 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.809706144606493e-05, |
|
"loss": 4.0938, |
|
"step": 116224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.808867549855441e-05, |
|
"loss": 4.0892, |
|
"step": 116736 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.808028955104389e-05, |
|
"loss": 4.0989, |
|
"step": 117248 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.80719199823371e-05, |
|
"loss": 4.0985, |
|
"step": 117760 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.806353403482658e-05, |
|
"loss": 4.1027, |
|
"step": 118272 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.805514808731606e-05, |
|
"loss": 4.0917, |
|
"step": 118784 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.804676213980554e-05, |
|
"loss": 4.072, |
|
"step": 119296 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.803839257109875e-05, |
|
"loss": 4.0766, |
|
"step": 119808 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.803000662358823e-05, |
|
"loss": 4.0888, |
|
"step": 120320 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.802162067607771e-05, |
|
"loss": 4.0822, |
|
"step": 120832 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.801323472856719e-05, |
|
"loss": 4.0807, |
|
"step": 121344 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.80048651598604e-05, |
|
"loss": 4.0884, |
|
"step": 121856 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.799647921234988e-05, |
|
"loss": 4.0742, |
|
"step": 122368 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.798809326483936e-05, |
|
"loss": 4.0768, |
|
"step": 122880 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.797970731732884e-05, |
|
"loss": 4.0781, |
|
"step": 123392 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.797133774862205e-05, |
|
"loss": 4.0878, |
|
"step": 123904 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.796295180111153e-05, |
|
"loss": 4.0841, |
|
"step": 124416 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.795456585360101e-05, |
|
"loss": 4.0889, |
|
"step": 124928 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.794617990609049e-05, |
|
"loss": 4.0642, |
|
"step": 125440 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.793781033738371e-05, |
|
"loss": 4.073, |
|
"step": 125952 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.792942438987319e-05, |
|
"loss": 4.0714, |
|
"step": 126464 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.792103844236267e-05, |
|
"loss": 4.0711, |
|
"step": 126976 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.791265249485215e-05, |
|
"loss": 4.0835, |
|
"step": 127488 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.790426654734163e-05, |
|
"loss": 4.0634, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7895896978634836e-05, |
|
"loss": 4.0669, |
|
"step": 128512 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7887511031124316e-05, |
|
"loss": 4.074, |
|
"step": 129024 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7879125083613796e-05, |
|
"loss": 4.0708, |
|
"step": 129536 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7870739136103276e-05, |
|
"loss": 4.0543, |
|
"step": 130048 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7862369567396485e-05, |
|
"loss": 4.0656, |
|
"step": 130560 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7853983619885965e-05, |
|
"loss": 4.0573, |
|
"step": 131072 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7845597672375445e-05, |
|
"loss": 4.0569, |
|
"step": 131584 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7837211724864925e-05, |
|
"loss": 4.0536, |
|
"step": 132096 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.782882577735441e-05, |
|
"loss": 4.0531, |
|
"step": 132608 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.782045620864762e-05, |
|
"loss": 4.0534, |
|
"step": 133120 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.78120702611371e-05, |
|
"loss": 4.0642, |
|
"step": 133632 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.780368431362658e-05, |
|
"loss": 4.0494, |
|
"step": 134144 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.779529836611606e-05, |
|
"loss": 4.0583, |
|
"step": 134656 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.778692879740927e-05, |
|
"loss": 4.0687, |
|
"step": 135168 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.777854284989875e-05, |
|
"loss": 4.0629, |
|
"step": 135680 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.777017328119196e-05, |
|
"loss": 4.0586, |
|
"step": 136192 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.776178733368144e-05, |
|
"loss": 4.0525, |
|
"step": 136704 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.775340138617092e-05, |
|
"loss": 4.0492, |
|
"step": 137216 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.77450154386604e-05, |
|
"loss": 4.0526, |
|
"step": 137728 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.773662949114988e-05, |
|
"loss": 4.0443, |
|
"step": 138240 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7728259922443094e-05, |
|
"loss": 4.0453, |
|
"step": 138752 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7719873974932574e-05, |
|
"loss": 4.046, |
|
"step": 139264 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7711488027422054e-05, |
|
"loss": 4.0504, |
|
"step": 139776 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7703102079911534e-05, |
|
"loss": 4.047, |
|
"step": 140288 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7694716132401014e-05, |
|
"loss": 4.0485, |
|
"step": 140800 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7686330184890494e-05, |
|
"loss": 4.0481, |
|
"step": 141312 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.767794423737997e-05, |
|
"loss": 4.0367, |
|
"step": 141824 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.766957466867318e-05, |
|
"loss": 4.0365, |
|
"step": 142336 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.766118872116266e-05, |
|
"loss": 4.0551, |
|
"step": 142848 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7652802773652136e-05, |
|
"loss": 4.0489, |
|
"step": 143360 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7644416826141616e-05, |
|
"loss": 4.044, |
|
"step": 143872 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7636030878631096e-05, |
|
"loss": 4.0391, |
|
"step": 144384 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.762766130992431e-05, |
|
"loss": 4.0439, |
|
"step": 144896 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.761927536241379e-05, |
|
"loss": 4.0417, |
|
"step": 145408 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.761088941490327e-05, |
|
"loss": 4.0507, |
|
"step": 145920 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.760250346739275e-05, |
|
"loss": 4.0414, |
|
"step": 146432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.759413389868596e-05, |
|
"loss": 4.0368, |
|
"step": 146944 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.758574795117544e-05, |
|
"loss": 4.0389, |
|
"step": 147456 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.757736200366492e-05, |
|
"loss": 4.0347, |
|
"step": 147968 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.75689760561544e-05, |
|
"loss": 4.0279, |
|
"step": 148480 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.756060648744761e-05, |
|
"loss": 4.0322, |
|
"step": 148992 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.755222053993709e-05, |
|
"loss": 4.0382, |
|
"step": 149504 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.754383459242657e-05, |
|
"loss": 4.0225, |
|
"step": 150016 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.753544864491605e-05, |
|
"loss": 4.0408, |
|
"step": 150528 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7527079076209266e-05, |
|
"loss": 4.0322, |
|
"step": 151040 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7518693128698745e-05, |
|
"loss": 4.0297, |
|
"step": 151552 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7510307181188225e-05, |
|
"loss": 4.0355, |
|
"step": 152064 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7501921233677705e-05, |
|
"loss": 4.022, |
|
"step": 152576 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.027729034423828, |
|
"eval_runtime": 302.5198, |
|
"eval_samples_per_second": 1261.375, |
|
"eval_steps_per_second": 39.419, |
|
"step": 152640 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7493535286167185e-05, |
|
"loss": 4.0091, |
|
"step": 153088 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7485149338656665e-05, |
|
"loss": 4.0107, |
|
"step": 153600 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7476763391146145e-05, |
|
"loss": 4.0363, |
|
"step": 154112 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7468377443635625e-05, |
|
"loss": 4.0207, |
|
"step": 154624 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7459991496125105e-05, |
|
"loss": 4.0299, |
|
"step": 155136 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7451605548614585e-05, |
|
"loss": 4.0155, |
|
"step": 155648 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7443219601104065e-05, |
|
"loss": 4.0168, |
|
"step": 156160 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7434833653593545e-05, |
|
"loss": 4.0049, |
|
"step": 156672 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7426447706083025e-05, |
|
"loss": 4.0167, |
|
"step": 157184 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7418061758572505e-05, |
|
"loss": 4.0108, |
|
"step": 157696 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7409675811061985e-05, |
|
"loss": 4.0314, |
|
"step": 158208 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7401289863551465e-05, |
|
"loss": 4.0211, |
|
"step": 158720 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.739293667364841e-05, |
|
"loss": 3.9997, |
|
"step": 159232 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.738455072613789e-05, |
|
"loss": 4.0054, |
|
"step": 159744 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.737616477862737e-05, |
|
"loss": 4.0103, |
|
"step": 160256 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.736777883111685e-05, |
|
"loss": 3.9944, |
|
"step": 160768 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.735939288360632e-05, |
|
"loss": 4.0083, |
|
"step": 161280 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.73510069360958e-05, |
|
"loss": 4.0086, |
|
"step": 161792 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.734262098858528e-05, |
|
"loss": 3.9988, |
|
"step": 162304 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.733423504107476e-05, |
|
"loss": 4.0262, |
|
"step": 162816 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.732586547236797e-05, |
|
"loss": 4.0019, |
|
"step": 163328 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.731747952485746e-05, |
|
"loss": 4.0083, |
|
"step": 163840 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.730909357734694e-05, |
|
"loss": 3.999, |
|
"step": 164352 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.730070762983642e-05, |
|
"loss": 4.0161, |
|
"step": 164864 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.729233806112963e-05, |
|
"loss": 3.9915, |
|
"step": 165376 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.728395211361911e-05, |
|
"loss": 4.0005, |
|
"step": 165888 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.727556616610859e-05, |
|
"loss": 3.9929, |
|
"step": 166400 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.726718021859807e-05, |
|
"loss": 3.998, |
|
"step": 166912 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7258810649891277e-05, |
|
"loss": 3.9925, |
|
"step": 167424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7250424702380757e-05, |
|
"loss": 3.9929, |
|
"step": 167936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7242038754870237e-05, |
|
"loss": 3.9991, |
|
"step": 168448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7233652807359716e-05, |
|
"loss": 3.997, |
|
"step": 168960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7225283238652926e-05, |
|
"loss": 3.9921, |
|
"step": 169472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.721689729114241e-05, |
|
"loss": 4.0027, |
|
"step": 169984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.720851134363189e-05, |
|
"loss": 3.9928, |
|
"step": 170496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.720012539612137e-05, |
|
"loss": 3.9981, |
|
"step": 171008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.719175582741458e-05, |
|
"loss": 3.9835, |
|
"step": 171520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.718336987990406e-05, |
|
"loss": 3.9878, |
|
"step": 172032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.717498393239354e-05, |
|
"loss": 3.9749, |
|
"step": 172544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.716659798488302e-05, |
|
"loss": 3.9898, |
|
"step": 173056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.715822841617623e-05, |
|
"loss": 3.9765, |
|
"step": 173568 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.714984246866571e-05, |
|
"loss": 3.9905, |
|
"step": 174080 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.714145652115519e-05, |
|
"loss": 3.9759, |
|
"step": 174592 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.713307057364467e-05, |
|
"loss": 3.9871, |
|
"step": 175104 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.712470100493788e-05, |
|
"loss": 3.9899, |
|
"step": 175616 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7116315057427366e-05, |
|
"loss": 3.9842, |
|
"step": 176128 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7107929109916846e-05, |
|
"loss": 3.9856, |
|
"step": 176640 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7099543162406326e-05, |
|
"loss": 3.9708, |
|
"step": 177152 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7091173593699535e-05, |
|
"loss": 3.9721, |
|
"step": 177664 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7082787646189015e-05, |
|
"loss": 3.9872, |
|
"step": 178176 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7074401698678495e-05, |
|
"loss": 3.9778, |
|
"step": 178688 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7066015751167975e-05, |
|
"loss": 3.9775, |
|
"step": 179200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7057646182461184e-05, |
|
"loss": 3.9644, |
|
"step": 179712 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.704927661375439e-05, |
|
"loss": 3.9781, |
|
"step": 180224 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.704089066624387e-05, |
|
"loss": 3.9544, |
|
"step": 180736 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.703250471873335e-05, |
|
"loss": 3.9878, |
|
"step": 181248 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.702411877122283e-05, |
|
"loss": 3.9558, |
|
"step": 181760 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.701573282371232e-05, |
|
"loss": 3.9763, |
|
"step": 182272 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.70073468762018e-05, |
|
"loss": 3.9773, |
|
"step": 182784 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.699896092869128e-05, |
|
"loss": 3.9599, |
|
"step": 183296 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.699057498118076e-05, |
|
"loss": 3.9784, |
|
"step": 183808 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.698220541247397e-05, |
|
"loss": 3.9628, |
|
"step": 184320 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.697381946496345e-05, |
|
"loss": 3.964, |
|
"step": 184832 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.696543351745293e-05, |
|
"loss": 3.952, |
|
"step": 185344 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.695704756994241e-05, |
|
"loss": 3.9766, |
|
"step": 185856 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.694867800123562e-05, |
|
"loss": 3.9557, |
|
"step": 186368 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.69402920537251e-05, |
|
"loss": 3.9609, |
|
"step": 186880 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.693190610621458e-05, |
|
"loss": 3.9473, |
|
"step": 187392 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.692352015870406e-05, |
|
"loss": 3.9563, |
|
"step": 187904 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6915150589997267e-05, |
|
"loss": 3.9664, |
|
"step": 188416 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.690676464248675e-05, |
|
"loss": 3.9695, |
|
"step": 188928 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.689837869497623e-05, |
|
"loss": 3.9581, |
|
"step": 189440 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.688999274746571e-05, |
|
"loss": 3.9721, |
|
"step": 189952 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.688162317875892e-05, |
|
"loss": 3.9701, |
|
"step": 190464 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.68732372312484e-05, |
|
"loss": 3.9617, |
|
"step": 190976 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.686485128373788e-05, |
|
"loss": 3.9624, |
|
"step": 191488 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.685646533622736e-05, |
|
"loss": 3.9597, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.684809576752057e-05, |
|
"loss": 3.9542, |
|
"step": 192512 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.683970982001005e-05, |
|
"loss": 3.954, |
|
"step": 193024 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.683132387249953e-05, |
|
"loss": 3.96, |
|
"step": 193536 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.682293792498901e-05, |
|
"loss": 3.9627, |
|
"step": 194048 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.681456835628222e-05, |
|
"loss": 3.9699, |
|
"step": 194560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.680618240877171e-05, |
|
"loss": 3.9544, |
|
"step": 195072 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.679779646126119e-05, |
|
"loss": 3.9353, |
|
"step": 195584 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.678941051375067e-05, |
|
"loss": 3.9439, |
|
"step": 196096 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6781040945043876e-05, |
|
"loss": 3.9539, |
|
"step": 196608 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6772654997533356e-05, |
|
"loss": 3.9505, |
|
"step": 197120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6764269050022836e-05, |
|
"loss": 3.9479, |
|
"step": 197632 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6755883102512316e-05, |
|
"loss": 3.9543, |
|
"step": 198144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6747513533805525e-05, |
|
"loss": 3.9489, |
|
"step": 198656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6739127586295005e-05, |
|
"loss": 3.9393, |
|
"step": 199168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6730741638784485e-05, |
|
"loss": 3.9497, |
|
"step": 199680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6722355691273965e-05, |
|
"loss": 3.9589, |
|
"step": 200192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6713986122567174e-05, |
|
"loss": 3.9531, |
|
"step": 200704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.670560017505666e-05, |
|
"loss": 3.9583, |
|
"step": 201216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.669721422754614e-05, |
|
"loss": 3.9415, |
|
"step": 201728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.668882828003562e-05, |
|
"loss": 3.9446, |
|
"step": 202240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.668045871132883e-05, |
|
"loss": 3.9413, |
|
"step": 202752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.667207276381831e-05, |
|
"loss": 3.9457, |
|
"step": 203264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.666368681630779e-05, |
|
"loss": 3.9545, |
|
"step": 203776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.665530086879727e-05, |
|
"loss": 3.9385, |
|
"step": 204288 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.664693130009048e-05, |
|
"loss": 3.9397, |
|
"step": 204800 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.663854535257996e-05, |
|
"loss": 3.9476, |
|
"step": 205312 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.663015940506944e-05, |
|
"loss": 3.9488, |
|
"step": 205824 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.662177345755892e-05, |
|
"loss": 3.9306, |
|
"step": 206336 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6613420267655863e-05, |
|
"loss": 3.9381, |
|
"step": 206848 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.660503432014534e-05, |
|
"loss": 3.9392, |
|
"step": 207360 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.659664837263482e-05, |
|
"loss": 3.9339, |
|
"step": 207872 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.65882624251243e-05, |
|
"loss": 3.9298, |
|
"step": 208384 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.657987647761378e-05, |
|
"loss": 3.9321, |
|
"step": 208896 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.657149053010326e-05, |
|
"loss": 3.9358, |
|
"step": 209408 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.656310458259274e-05, |
|
"loss": 3.938, |
|
"step": 209920 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.655471863508222e-05, |
|
"loss": 3.929, |
|
"step": 210432 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.654634906637543e-05, |
|
"loss": 3.9371, |
|
"step": 210944 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.653796311886491e-05, |
|
"loss": 3.9474, |
|
"step": 211456 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.652957717135439e-05, |
|
"loss": 3.949, |
|
"step": 211968 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.65212076026476e-05, |
|
"loss": 3.9373, |
|
"step": 212480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.651282165513708e-05, |
|
"loss": 3.9367, |
|
"step": 212992 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.650443570762657e-05, |
|
"loss": 3.9314, |
|
"step": 213504 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.649604976011605e-05, |
|
"loss": 3.9355, |
|
"step": 214016 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.648766381260553e-05, |
|
"loss": 3.9252, |
|
"step": 214528 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.647929424389874e-05, |
|
"loss": 3.9278, |
|
"step": 215040 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.647090829638822e-05, |
|
"loss": 3.9285, |
|
"step": 215552 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.64625223488777e-05, |
|
"loss": 3.938, |
|
"step": 216064 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.645413640136718e-05, |
|
"loss": 3.9302, |
|
"step": 216576 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.644575045385665e-05, |
|
"loss": 3.9327, |
|
"step": 217088 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.643736450634613e-05, |
|
"loss": 3.9322, |
|
"step": 217600 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.642897855883561e-05, |
|
"loss": 3.9229, |
|
"step": 218112 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.642059261132509e-05, |
|
"loss": 3.9205, |
|
"step": 218624 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6412223042618306e-05, |
|
"loss": 3.9408, |
|
"step": 219136 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6403837095107786e-05, |
|
"loss": 3.9383, |
|
"step": 219648 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6395451147597266e-05, |
|
"loss": 3.9303, |
|
"step": 220160 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6387081578890475e-05, |
|
"loss": 3.9268, |
|
"step": 220672 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.637871201018369e-05, |
|
"loss": 3.9271, |
|
"step": 221184 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.637032606267317e-05, |
|
"loss": 3.9306, |
|
"step": 221696 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.636194011516265e-05, |
|
"loss": 3.9418, |
|
"step": 222208 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6353554167652124e-05, |
|
"loss": 3.929, |
|
"step": 222720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6345168220141604e-05, |
|
"loss": 3.9228, |
|
"step": 223232 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6336782272631084e-05, |
|
"loss": 3.9264, |
|
"step": 223744 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6328396325120564e-05, |
|
"loss": 3.9232, |
|
"step": 224256 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6320010377610043e-05, |
|
"loss": 3.9204, |
|
"step": 224768 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.631164080890326e-05, |
|
"loss": 3.9203, |
|
"step": 225280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.630325486139274e-05, |
|
"loss": 3.9275, |
|
"step": 225792 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.629486891388222e-05, |
|
"loss": 3.9163, |
|
"step": 226304 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.62864829663717e-05, |
|
"loss": 3.9289, |
|
"step": 226816 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.627811339766491e-05, |
|
"loss": 3.9262, |
|
"step": 227328 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.626972745015439e-05, |
|
"loss": 3.9209, |
|
"step": 227840 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.626134150264387e-05, |
|
"loss": 3.9269, |
|
"step": 228352 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.625295555513335e-05, |
|
"loss": 3.9114, |
|
"step": 228864 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 3.95308780670166, |
|
"eval_runtime": 304.6541, |
|
"eval_samples_per_second": 1252.539, |
|
"eval_steps_per_second": 39.143, |
|
"step": 228960 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.624458598642656e-05, |
|
"loss": 3.915, |
|
"step": 229376 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.623620003891604e-05, |
|
"loss": 3.9042, |
|
"step": 229888 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.622781409140552e-05, |
|
"loss": 3.9287, |
|
"step": 230400 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6219428143895e-05, |
|
"loss": 3.9135, |
|
"step": 230912 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.621105857518821e-05, |
|
"loss": 3.9283, |
|
"step": 231424 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.620267262767769e-05, |
|
"loss": 3.9081, |
|
"step": 231936 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.619428668016717e-05, |
|
"loss": 3.9171, |
|
"step": 232448 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.618591711146038e-05, |
|
"loss": 3.8989, |
|
"step": 232960 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.617753116394986e-05, |
|
"loss": 3.9165, |
|
"step": 233472 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.616914521643934e-05, |
|
"loss": 3.9041, |
|
"step": 233984 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.616077564773255e-05, |
|
"loss": 3.9276, |
|
"step": 234496 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.615238970022203e-05, |
|
"loss": 3.9171, |
|
"step": 235008 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.614402013151524e-05, |
|
"loss": 3.8957, |
|
"step": 235520 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.613563418400472e-05, |
|
"loss": 3.9043, |
|
"step": 236032 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.61272482364942e-05, |
|
"loss": 3.9104, |
|
"step": 236544 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.611886228898368e-05, |
|
"loss": 3.8943, |
|
"step": 237056 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.611047634147316e-05, |
|
"loss": 3.9046, |
|
"step": 237568 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.610209039396265e-05, |
|
"loss": 3.9067, |
|
"step": 238080 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.609370444645213e-05, |
|
"loss": 3.8982, |
|
"step": 238592 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6085318498941607e-05, |
|
"loss": 3.9294, |
|
"step": 239104 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6076932551431087e-05, |
|
"loss": 3.902, |
|
"step": 239616 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6068546603920566e-05, |
|
"loss": 3.9048, |
|
"step": 240128 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6060177035213776e-05, |
|
"loss": 3.9026, |
|
"step": 240640 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6051791087703256e-05, |
|
"loss": 3.9106, |
|
"step": 241152 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6043405140192735e-05, |
|
"loss": 3.8958, |
|
"step": 241664 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6035019192682215e-05, |
|
"loss": 3.9016, |
|
"step": 242176 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6026633245171695e-05, |
|
"loss": 3.8971, |
|
"step": 242688 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6018263676464904e-05, |
|
"loss": 3.9002, |
|
"step": 243200 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6009877728954384e-05, |
|
"loss": 3.8941, |
|
"step": 243712 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.6001491781443864e-05, |
|
"loss": 3.8956, |
|
"step": 244224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5993105833933344e-05, |
|
"loss": 3.8977, |
|
"step": 244736 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.598471988642283e-05, |
|
"loss": 3.9027, |
|
"step": 245248 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.597633393891231e-05, |
|
"loss": 3.9027, |
|
"step": 245760 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5967947991401784e-05, |
|
"loss": 3.9028, |
|
"step": 246272 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5959578422695e-05, |
|
"loss": 3.8961, |
|
"step": 246784 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.595119247518448e-05, |
|
"loss": 3.9025, |
|
"step": 247296 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.594280652767395e-05, |
|
"loss": 3.8846, |
|
"step": 247808 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.593442058016343e-05, |
|
"loss": 3.899, |
|
"step": 248320 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.592603463265291e-05, |
|
"loss": 3.8816, |
|
"step": 248832 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.591764868514239e-05, |
|
"loss": 3.8918, |
|
"step": 249344 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.590926273763187e-05, |
|
"loss": 3.8831, |
|
"step": 249856 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.590087679012135e-05, |
|
"loss": 3.9, |
|
"step": 250368 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.58925236002183e-05, |
|
"loss": 3.8821, |
|
"step": 250880 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5884137652707785e-05, |
|
"loss": 3.8901, |
|
"step": 251392 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.587575170519726e-05, |
|
"loss": 3.9007, |
|
"step": 251904 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.586736575768674e-05, |
|
"loss": 3.893, |
|
"step": 252416 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.585897981017622e-05, |
|
"loss": 3.8952, |
|
"step": 252928 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.585061024146943e-05, |
|
"loss": 3.8799, |
|
"step": 253440 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.584222429395891e-05, |
|
"loss": 3.8795, |
|
"step": 253952 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.583383834644839e-05, |
|
"loss": 3.8934, |
|
"step": 254464 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.582545239893787e-05, |
|
"loss": 3.8872, |
|
"step": 254976 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.581706645142735e-05, |
|
"loss": 3.8842, |
|
"step": 255488 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.580868050391683e-05, |
|
"loss": 3.8778, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5800310935210036e-05, |
|
"loss": 3.8886, |
|
"step": 256512 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.579192498769952e-05, |
|
"loss": 3.8676, |
|
"step": 257024 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5783539040189e-05, |
|
"loss": 3.8898, |
|
"step": 257536 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.577515309267848e-05, |
|
"loss": 3.8708, |
|
"step": 258048 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.576676714516796e-05, |
|
"loss": 3.882, |
|
"step": 258560 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.575838119765744e-05, |
|
"loss": 3.892, |
|
"step": 259072 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.574999525014692e-05, |
|
"loss": 3.8722, |
|
"step": 259584 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.57416093026364e-05, |
|
"loss": 3.8855, |
|
"step": 260096 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.573323973392961e-05, |
|
"loss": 3.8773, |
|
"step": 260608 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.572485378641909e-05, |
|
"loss": 3.8731, |
|
"step": 261120 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.57164842177123e-05, |
|
"loss": 3.8693, |
|
"step": 261632 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.570809827020178e-05, |
|
"loss": 3.8849, |
|
"step": 262144 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.569971232269126e-05, |
|
"loss": 3.8712, |
|
"step": 262656 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.569132637518074e-05, |
|
"loss": 3.8743, |
|
"step": 263168 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.568294042767022e-05, |
|
"loss": 3.8621, |
|
"step": 263680 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.567455448015971e-05, |
|
"loss": 3.867, |
|
"step": 264192 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.566616853264919e-05, |
|
"loss": 3.8787, |
|
"step": 264704 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5657798963942396e-05, |
|
"loss": 3.8837, |
|
"step": 265216 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5649413016431876e-05, |
|
"loss": 3.8725, |
|
"step": 265728 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5641027068921356e-05, |
|
"loss": 3.8872, |
|
"step": 266240 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5632641121410836e-05, |
|
"loss": 3.8814, |
|
"step": 266752 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5624255173900316e-05, |
|
"loss": 3.8741, |
|
"step": 267264 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.561586922638979e-05, |
|
"loss": 3.8793, |
|
"step": 267776 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.560748327887927e-05, |
|
"loss": 3.8739, |
|
"step": 268288 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5599113710172485e-05, |
|
"loss": 3.8727, |
|
"step": 268800 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5590727762661965e-05, |
|
"loss": 3.8701, |
|
"step": 269312 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5582341815151445e-05, |
|
"loss": 3.8744, |
|
"step": 269824 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5573955867640925e-05, |
|
"loss": 3.8806, |
|
"step": 270336 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5565569920130405e-05, |
|
"loss": 3.8784, |
|
"step": 270848 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5557200351423614e-05, |
|
"loss": 3.873, |
|
"step": 271360 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5548814403913094e-05, |
|
"loss": 3.8545, |
|
"step": 271872 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5540428456402574e-05, |
|
"loss": 3.8604, |
|
"step": 272384 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5532042508892054e-05, |
|
"loss": 3.8699, |
|
"step": 272896 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5523656561381534e-05, |
|
"loss": 3.8693, |
|
"step": 273408 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5515270613871014e-05, |
|
"loss": 3.8658, |
|
"step": 273920 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5506884666360494e-05, |
|
"loss": 3.8705, |
|
"step": 274432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.54985150976537e-05, |
|
"loss": 3.867, |
|
"step": 274944 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.549012915014318e-05, |
|
"loss": 3.8598, |
|
"step": 275456 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.548174320263266e-05, |
|
"loss": 3.8654, |
|
"step": 275968 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.547335725512214e-05, |
|
"loss": 3.8724, |
|
"step": 276480 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.546497130761162e-05, |
|
"loss": 3.8707, |
|
"step": 276992 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.545660173890484e-05, |
|
"loss": 3.8805, |
|
"step": 277504 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.544821579139432e-05, |
|
"loss": 3.8625, |
|
"step": 278016 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.54398298438838e-05, |
|
"loss": 3.8603, |
|
"step": 278528 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.543144389637328e-05, |
|
"loss": 3.8639, |
|
"step": 279040 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.542305794886276e-05, |
|
"loss": 3.8637, |
|
"step": 279552 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.541467200135224e-05, |
|
"loss": 3.8751, |
|
"step": 280064 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.540630243264545e-05, |
|
"loss": 3.858, |
|
"step": 280576 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.539791648513493e-05, |
|
"loss": 3.8618, |
|
"step": 281088 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.538953053762441e-05, |
|
"loss": 3.8658, |
|
"step": 281600 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.538114459011389e-05, |
|
"loss": 3.8699, |
|
"step": 282112 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.537275864260337e-05, |
|
"loss": 3.8519, |
|
"step": 282624 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5364389073896576e-05, |
|
"loss": 3.8596, |
|
"step": 283136 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.535600312638606e-05, |
|
"loss": 3.861, |
|
"step": 283648 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.534761717887554e-05, |
|
"loss": 3.8543, |
|
"step": 284160 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.533923123136502e-05, |
|
"loss": 3.8479, |
|
"step": 284672 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.53308452838545e-05, |
|
"loss": 3.8571, |
|
"step": 285184 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5322459336343976e-05, |
|
"loss": 3.8591, |
|
"step": 285696 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.531408976763719e-05, |
|
"loss": 3.8586, |
|
"step": 286208 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.530570382012667e-05, |
|
"loss": 3.8529, |
|
"step": 286720 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.529731787261615e-05, |
|
"loss": 3.8563, |
|
"step": 287232 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5288931925105625e-05, |
|
"loss": 3.8716, |
|
"step": 287744 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5280545977595105e-05, |
|
"loss": 3.8697, |
|
"step": 288256 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.527217640888832e-05, |
|
"loss": 3.8632, |
|
"step": 288768 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.52637904613778e-05, |
|
"loss": 3.8587, |
|
"step": 289280 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.525540451386728e-05, |
|
"loss": 3.8559, |
|
"step": 289792 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.524701856635676e-05, |
|
"loss": 3.8548, |
|
"step": 290304 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.523863261884624e-05, |
|
"loss": 3.8483, |
|
"step": 290816 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.523026305013945e-05, |
|
"loss": 3.8519, |
|
"step": 291328 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.522187710262893e-05, |
|
"loss": 3.852, |
|
"step": 291840 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.521349115511841e-05, |
|
"loss": 3.8612, |
|
"step": 292352 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.520510520760789e-05, |
|
"loss": 3.8535, |
|
"step": 292864 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.51967356389011e-05, |
|
"loss": 3.8604, |
|
"step": 293376 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.518834969139058e-05, |
|
"loss": 3.8522, |
|
"step": 293888 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.517996374388006e-05, |
|
"loss": 3.8507, |
|
"step": 294400 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.517157779636954e-05, |
|
"loss": 3.8452, |
|
"step": 294912 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.516319184885902e-05, |
|
"loss": 3.8666, |
|
"step": 295424 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5154822280152234e-05, |
|
"loss": 3.8648, |
|
"step": 295936 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5146436332641714e-05, |
|
"loss": 3.8551, |
|
"step": 296448 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5138050385131194e-05, |
|
"loss": 3.8512, |
|
"step": 296960 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5129664437620674e-05, |
|
"loss": 3.856, |
|
"step": 297472 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5121278490110154e-05, |
|
"loss": 3.8536, |
|
"step": 297984 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5112892542599634e-05, |
|
"loss": 3.8672, |
|
"step": 298496 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5104506595089114e-05, |
|
"loss": 3.8496, |
|
"step": 299008 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.509613702638232e-05, |
|
"loss": 3.8537, |
|
"step": 299520 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.50877510788718e-05, |
|
"loss": 3.8513, |
|
"step": 300032 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.507936513136128e-05, |
|
"loss": 3.8557, |
|
"step": 300544 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.507097918385076e-05, |
|
"loss": 3.8441, |
|
"step": 301056 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.506259323634024e-05, |
|
"loss": 3.8465, |
|
"step": 301568 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.505422366763345e-05, |
|
"loss": 3.8545, |
|
"step": 302080 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.504583772012293e-05, |
|
"loss": 3.8464, |
|
"step": 302592 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.503745177261242e-05, |
|
"loss": 3.8544, |
|
"step": 303104 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.50290658251019e-05, |
|
"loss": 3.8558, |
|
"step": 303616 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.502067987759138e-05, |
|
"loss": 3.8488, |
|
"step": 304128 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.501231030888459e-05, |
|
"loss": 3.8515, |
|
"step": 304640 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.500392436137407e-05, |
|
"loss": 3.8457, |
|
"step": 305152 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 3.9113528728485107, |
|
"eval_runtime": 304.0304, |
|
"eval_samples_per_second": 1255.108, |
|
"eval_steps_per_second": 39.223, |
|
"step": 305280 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.499553841386355e-05, |
|
"loss": 3.835, |
|
"step": 305664 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.498715246635303e-05, |
|
"loss": 3.8334, |
|
"step": 306176 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.497876651884251e-05, |
|
"loss": 3.855, |
|
"step": 306688 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.497038057133199e-05, |
|
"loss": 3.8469, |
|
"step": 307200 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.496199462382146e-05, |
|
"loss": 3.8579, |
|
"step": 307712 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.495360867631094e-05, |
|
"loss": 3.8369, |
|
"step": 308224 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.494522272880042e-05, |
|
"loss": 3.8458, |
|
"step": 308736 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.49368367812899e-05, |
|
"loss": 3.8284, |
|
"step": 309248 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4928467212583116e-05, |
|
"loss": 3.8483, |
|
"step": 309760 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4920081265072596e-05, |
|
"loss": 3.8335, |
|
"step": 310272 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4911695317562076e-05, |
|
"loss": 3.8516, |
|
"step": 310784 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4903309370051556e-05, |
|
"loss": 3.849, |
|
"step": 311296 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4894939801344765e-05, |
|
"loss": 3.8236, |
|
"step": 311808 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4886553853834245e-05, |
|
"loss": 3.8379, |
|
"step": 312320 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4878167906323725e-05, |
|
"loss": 3.8383, |
|
"step": 312832 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4869781958813205e-05, |
|
"loss": 3.8259, |
|
"step": 313344 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4861412390106414e-05, |
|
"loss": 3.8361, |
|
"step": 313856 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4853026442595894e-05, |
|
"loss": 3.8371, |
|
"step": 314368 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4844640495085374e-05, |
|
"loss": 3.8334, |
|
"step": 314880 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4836254547574854e-05, |
|
"loss": 3.8543, |
|
"step": 315392 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.482788497886807e-05, |
|
"loss": 3.8338, |
|
"step": 315904 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.481949903135755e-05, |
|
"loss": 3.8351, |
|
"step": 316416 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.481111308384703e-05, |
|
"loss": 3.8378, |
|
"step": 316928 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.480272713633651e-05, |
|
"loss": 3.8412, |
|
"step": 317440 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.479435756762972e-05, |
|
"loss": 3.8288, |
|
"step": 317952 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.47859716201192e-05, |
|
"loss": 3.8402, |
|
"step": 318464 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.477758567260868e-05, |
|
"loss": 3.8259, |
|
"step": 318976 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.476919972509816e-05, |
|
"loss": 3.8298, |
|
"step": 319488 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.476083015639137e-05, |
|
"loss": 3.8273, |
|
"step": 320000 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.475244420888085e-05, |
|
"loss": 3.8292, |
|
"step": 320512 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.474405826137033e-05, |
|
"loss": 3.8315, |
|
"step": 321024 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.473567231385981e-05, |
|
"loss": 3.8365, |
|
"step": 321536 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4727302745153024e-05, |
|
"loss": 3.8369, |
|
"step": 322048 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4718916797642504e-05, |
|
"loss": 3.8362, |
|
"step": 322560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4710530850131984e-05, |
|
"loss": 3.8281, |
|
"step": 323072 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4702144902621464e-05, |
|
"loss": 3.8381, |
|
"step": 323584 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.469377533391467e-05, |
|
"loss": 3.8242, |
|
"step": 324096 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.468538938640415e-05, |
|
"loss": 3.8267, |
|
"step": 324608 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.467700343889363e-05, |
|
"loss": 3.8179, |
|
"step": 325120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.466861749138311e-05, |
|
"loss": 3.8251, |
|
"step": 325632 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.466024792267632e-05, |
|
"loss": 3.8208, |
|
"step": 326144 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.46518619751658e-05, |
|
"loss": 3.8341, |
|
"step": 326656 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.464347602765528e-05, |
|
"loss": 3.8167, |
|
"step": 327168 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.463509008014476e-05, |
|
"loss": 3.8231, |
|
"step": 327680 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.462670413263425e-05, |
|
"loss": 3.8397, |
|
"step": 328192 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.461833456392746e-05, |
|
"loss": 3.8293, |
|
"step": 328704 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.460994861641694e-05, |
|
"loss": 3.8285, |
|
"step": 329216 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.460156266890642e-05, |
|
"loss": 3.8197, |
|
"step": 329728 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.45931767213959e-05, |
|
"loss": 3.8095, |
|
"step": 330240 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4584807152689106e-05, |
|
"loss": 3.8317, |
|
"step": 330752 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4576421205178586e-05, |
|
"loss": 3.8243, |
|
"step": 331264 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4568035257668066e-05, |
|
"loss": 3.819, |
|
"step": 331776 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4559649310157546e-05, |
|
"loss": 3.8164, |
|
"step": 332288 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4551279741450755e-05, |
|
"loss": 3.8259, |
|
"step": 332800 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4542893793940235e-05, |
|
"loss": 3.8029, |
|
"step": 333312 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4534507846429715e-05, |
|
"loss": 3.8282, |
|
"step": 333824 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.45261218989192e-05, |
|
"loss": 3.8056, |
|
"step": 334336 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.451775233021241e-05, |
|
"loss": 3.8206, |
|
"step": 334848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.450936638270189e-05, |
|
"loss": 3.8253, |
|
"step": 335360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.450098043519137e-05, |
|
"loss": 3.8134, |
|
"step": 335872 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.449259448768085e-05, |
|
"loss": 3.8176, |
|
"step": 336384 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.448420854017033e-05, |
|
"loss": 3.8168, |
|
"step": 336896 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.447583897146354e-05, |
|
"loss": 3.8084, |
|
"step": 337408 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.446745302395302e-05, |
|
"loss": 3.815, |
|
"step": 337920 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.44590670764425e-05, |
|
"loss": 3.8175, |
|
"step": 338432 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.445068112893198e-05, |
|
"loss": 3.812, |
|
"step": 338944 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.444231156022519e-05, |
|
"loss": 3.8119, |
|
"step": 339456 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.443392561271467e-05, |
|
"loss": 3.8028, |
|
"step": 339968 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4425539665204156e-05, |
|
"loss": 3.8024, |
|
"step": 340480 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4417153717693636e-05, |
|
"loss": 3.816, |
|
"step": 340992 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4408784148986845e-05, |
|
"loss": 3.8252, |
|
"step": 341504 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4400398201476325e-05, |
|
"loss": 3.808, |
|
"step": 342016 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4392012253965805e-05, |
|
"loss": 3.8275, |
|
"step": 342528 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4383626306455285e-05, |
|
"loss": 3.8235, |
|
"step": 343040 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4375240358944764e-05, |
|
"loss": 3.8112, |
|
"step": 343552 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4366870790237974e-05, |
|
"loss": 3.8146, |
|
"step": 344064 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4358484842727454e-05, |
|
"loss": 3.8205, |
|
"step": 344576 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4350098895216933e-05, |
|
"loss": 3.8117, |
|
"step": 345088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4341712947706413e-05, |
|
"loss": 3.8096, |
|
"step": 345600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.433334337899962e-05, |
|
"loss": 3.8166, |
|
"step": 346112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.43249574314891e-05, |
|
"loss": 3.8161, |
|
"step": 346624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.431657148397859e-05, |
|
"loss": 3.8193, |
|
"step": 347136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.430818553646807e-05, |
|
"loss": 3.8107, |
|
"step": 347648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.429981596776128e-05, |
|
"loss": 3.7951, |
|
"step": 348160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.429143002025076e-05, |
|
"loss": 3.7979, |
|
"step": 348672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.428304407274024e-05, |
|
"loss": 3.8096, |
|
"step": 349184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.427465812522971e-05, |
|
"loss": 3.8106, |
|
"step": 349696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.426628855652293e-05, |
|
"loss": 3.8077, |
|
"step": 350208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.425790260901241e-05, |
|
"loss": 3.8102, |
|
"step": 350720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.424951666150189e-05, |
|
"loss": 3.8051, |
|
"step": 351232 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.424113071399136e-05, |
|
"loss": 3.802, |
|
"step": 351744 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4232761145284576e-05, |
|
"loss": 3.8071, |
|
"step": 352256 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4224375197774056e-05, |
|
"loss": 3.8079, |
|
"step": 352768 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.421598925026354e-05, |
|
"loss": 3.8154, |
|
"step": 353280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4207603302753016e-05, |
|
"loss": 3.8166, |
|
"step": 353792 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.419923373404623e-05, |
|
"loss": 3.8104, |
|
"step": 354304 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.419084778653571e-05, |
|
"loss": 3.7991, |
|
"step": 354816 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4182461839025185e-05, |
|
"loss": 3.8107, |
|
"step": 355328 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4174075891514665e-05, |
|
"loss": 3.8024, |
|
"step": 355840 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.416570632280788e-05, |
|
"loss": 3.8161, |
|
"step": 356352 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.415732037529736e-05, |
|
"loss": 3.803, |
|
"step": 356864 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4148934427786834e-05, |
|
"loss": 3.7983, |
|
"step": 357376 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4140548480276314e-05, |
|
"loss": 3.8141, |
|
"step": 357888 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.413217891156953e-05, |
|
"loss": 3.8097, |
|
"step": 358400 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.412379296405901e-05, |
|
"loss": 3.8001, |
|
"step": 358912 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.411540701654849e-05, |
|
"loss": 3.8043, |
|
"step": 359424 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.410702106903797e-05, |
|
"loss": 3.8048, |
|
"step": 359936 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4098651500331186e-05, |
|
"loss": 3.7885, |
|
"step": 360448 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.409026555282066e-05, |
|
"loss": 3.7919, |
|
"step": 360960 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.408187960531014e-05, |
|
"loss": 3.8042, |
|
"step": 361472 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.407349365779962e-05, |
|
"loss": 3.7999, |
|
"step": 361984 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4065124089092835e-05, |
|
"loss": 3.8018, |
|
"step": 362496 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.405673814158231e-05, |
|
"loss": 3.7961, |
|
"step": 363008 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.404835219407179e-05, |
|
"loss": 3.7967, |
|
"step": 363520 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.403996624656127e-05, |
|
"loss": 3.8159, |
|
"step": 364032 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4031596677854484e-05, |
|
"loss": 3.8152, |
|
"step": 364544 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4023210730343963e-05, |
|
"loss": 3.8079, |
|
"step": 365056 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4014824782833443e-05, |
|
"loss": 3.8029, |
|
"step": 365568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4006438835322923e-05, |
|
"loss": 3.796, |
|
"step": 366080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.399806926661613e-05, |
|
"loss": 3.799, |
|
"step": 366592 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.398968331910561e-05, |
|
"loss": 3.7937, |
|
"step": 367104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.398129737159509e-05, |
|
"loss": 3.7995, |
|
"step": 367616 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.397291142408457e-05, |
|
"loss": 3.7963, |
|
"step": 368128 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.396454185537778e-05, |
|
"loss": 3.8055, |
|
"step": 368640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.395615590786726e-05, |
|
"loss": 3.7993, |
|
"step": 369152 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.394776996035674e-05, |
|
"loss": 3.8023, |
|
"step": 369664 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.393938401284622e-05, |
|
"loss": 3.7984, |
|
"step": 370176 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.39309980653357e-05, |
|
"loss": 3.7931, |
|
"step": 370688 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.392262849662892e-05, |
|
"loss": 3.7927, |
|
"step": 371200 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.39142425491184e-05, |
|
"loss": 3.8059, |
|
"step": 371712 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.390585660160788e-05, |
|
"loss": 3.8094, |
|
"step": 372224 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.389747065409736e-05, |
|
"loss": 3.8024, |
|
"step": 372736 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3889101085390566e-05, |
|
"loss": 3.7935, |
|
"step": 373248 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3880715137880046e-05, |
|
"loss": 3.8035, |
|
"step": 373760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3872329190369526e-05, |
|
"loss": 3.7955, |
|
"step": 374272 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3863943242859006e-05, |
|
"loss": 3.8105, |
|
"step": 374784 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3855573674152215e-05, |
|
"loss": 3.7968, |
|
"step": 375296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3847187726641695e-05, |
|
"loss": 3.7977, |
|
"step": 375808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3838801779131175e-05, |
|
"loss": 3.7959, |
|
"step": 376320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3830415831620655e-05, |
|
"loss": 3.8041, |
|
"step": 376832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.382204626291387e-05, |
|
"loss": 3.7876, |
|
"step": 377344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.381366031540335e-05, |
|
"loss": 3.7929, |
|
"step": 377856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.380527436789283e-05, |
|
"loss": 3.7995, |
|
"step": 378368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.379688842038231e-05, |
|
"loss": 3.7924, |
|
"step": 378880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.378851885167552e-05, |
|
"loss": 3.8002, |
|
"step": 379392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3780132904165e-05, |
|
"loss": 3.8013, |
|
"step": 379904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.377174695665448e-05, |
|
"loss": 3.7984, |
|
"step": 380416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.376336100914396e-05, |
|
"loss": 3.7933, |
|
"step": 380928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.375499144043717e-05, |
|
"loss": 3.7937, |
|
"step": 381440 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 3.8858225345611572, |
|
"eval_runtime": 335.7172, |
|
"eval_samples_per_second": 1136.644, |
|
"eval_steps_per_second": 35.521, |
|
"step": 381600 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.374660549292665e-05, |
|
"loss": 3.7837, |
|
"step": 381952 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.373821954541613e-05, |
|
"loss": 3.7775, |
|
"step": 382464 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.372983359790561e-05, |
|
"loss": 3.8026, |
|
"step": 382976 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3721447650395095e-05, |
|
"loss": 3.795, |
|
"step": 383488 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3713061702884575e-05, |
|
"loss": 3.8023, |
|
"step": 384000 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3704675755374055e-05, |
|
"loss": 3.7857, |
|
"step": 384512 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3696289807863535e-05, |
|
"loss": 3.7887, |
|
"step": 385024 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3687903860353015e-05, |
|
"loss": 3.7811, |
|
"step": 385536 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3679534291646224e-05, |
|
"loss": 3.79, |
|
"step": 386048 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3671148344135704e-05, |
|
"loss": 3.7867, |
|
"step": 386560 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3662762396625184e-05, |
|
"loss": 3.7974, |
|
"step": 387072 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3654376449114664e-05, |
|
"loss": 3.7977, |
|
"step": 387584 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.364600688040787e-05, |
|
"loss": 3.7709, |
|
"step": 388096 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.363762093289735e-05, |
|
"loss": 3.7877, |
|
"step": 388608 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.362923498538683e-05, |
|
"loss": 3.7878, |
|
"step": 389120 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.362084903787631e-05, |
|
"loss": 3.7752, |
|
"step": 389632 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.361246309036579e-05, |
|
"loss": 3.7824, |
|
"step": 390144 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.360407714285527e-05, |
|
"loss": 3.7844, |
|
"step": 390656 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.359569119534475e-05, |
|
"loss": 3.783, |
|
"step": 391168 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.358730524783423e-05, |
|
"loss": 3.7977, |
|
"step": 391680 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.357893567912744e-05, |
|
"loss": 3.783, |
|
"step": 392192 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.357054973161692e-05, |
|
"loss": 3.7887, |
|
"step": 392704 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.35621637841064e-05, |
|
"loss": 3.7829, |
|
"step": 393216 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.355377783659588e-05, |
|
"loss": 3.7872, |
|
"step": 393728 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.354540826788909e-05, |
|
"loss": 3.7771, |
|
"step": 394240 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.353702232037857e-05, |
|
"loss": 3.7848, |
|
"step": 394752 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.352863637286805e-05, |
|
"loss": 3.7808, |
|
"step": 395264 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.352025042535753e-05, |
|
"loss": 3.7765, |
|
"step": 395776 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.351188085665075e-05, |
|
"loss": 3.7773, |
|
"step": 396288 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.350349490914023e-05, |
|
"loss": 3.7777, |
|
"step": 396800 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.349510896162971e-05, |
|
"loss": 3.7801, |
|
"step": 397312 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3486723014119187e-05, |
|
"loss": 3.7829, |
|
"step": 397824 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3478353445412396e-05, |
|
"loss": 3.7883, |
|
"step": 398336 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3469967497901876e-05, |
|
"loss": 3.7862, |
|
"step": 398848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3461581550391356e-05, |
|
"loss": 3.7782, |
|
"step": 399360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3453195602880836e-05, |
|
"loss": 3.7851, |
|
"step": 399872 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3444826034174045e-05, |
|
"loss": 3.7766, |
|
"step": 400384 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3436440086663525e-05, |
|
"loss": 3.7783, |
|
"step": 400896 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3428054139153005e-05, |
|
"loss": 3.7662, |
|
"step": 401408 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3419668191642485e-05, |
|
"loss": 3.773, |
|
"step": 401920 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.34112986229357e-05, |
|
"loss": 3.7749, |
|
"step": 402432 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.340291267542518e-05, |
|
"loss": 3.7817, |
|
"step": 402944 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.339452672791466e-05, |
|
"loss": 3.7709, |
|
"step": 403456 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.338614078040414e-05, |
|
"loss": 3.7731, |
|
"step": 403968 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.337777121169735e-05, |
|
"loss": 3.7885, |
|
"step": 404480 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.336938526418683e-05, |
|
"loss": 3.7794, |
|
"step": 404992 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.336099931667631e-05, |
|
"loss": 3.7801, |
|
"step": 405504 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.335261336916579e-05, |
|
"loss": 3.7703, |
|
"step": 406016 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3344243800459e-05, |
|
"loss": 3.759, |
|
"step": 406528 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.333585785294848e-05, |
|
"loss": 3.7812, |
|
"step": 407040 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.332747190543796e-05, |
|
"loss": 3.7767, |
|
"step": 407552 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.331908595792744e-05, |
|
"loss": 3.7702, |
|
"step": 408064 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3310716389220654e-05, |
|
"loss": 3.7703, |
|
"step": 408576 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3302330441710134e-05, |
|
"loss": 3.7787, |
|
"step": 409088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3293944494199614e-05, |
|
"loss": 3.7499, |
|
"step": 409600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3285558546689094e-05, |
|
"loss": 3.7767, |
|
"step": 410112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.32771889779823e-05, |
|
"loss": 3.7621, |
|
"step": 410624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.326880303047178e-05, |
|
"loss": 3.7706, |
|
"step": 411136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.326041708296126e-05, |
|
"loss": 3.7786, |
|
"step": 411648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.325203113545074e-05, |
|
"loss": 3.7674, |
|
"step": 412160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.324366156674395e-05, |
|
"loss": 3.766, |
|
"step": 412672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.323527561923343e-05, |
|
"loss": 3.7745, |
|
"step": 413184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.322688967172291e-05, |
|
"loss": 3.7622, |
|
"step": 413696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.321850372421239e-05, |
|
"loss": 3.7619, |
|
"step": 414208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.321013415550561e-05, |
|
"loss": 3.7716, |
|
"step": 414720 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.320174820799509e-05, |
|
"loss": 3.7675, |
|
"step": 415232 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.319336226048457e-05, |
|
"loss": 3.7604, |
|
"step": 415744 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.318497631297405e-05, |
|
"loss": 3.7566, |
|
"step": 416256 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.317660674426726e-05, |
|
"loss": 3.7554, |
|
"step": 416768 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.316822079675674e-05, |
|
"loss": 3.7663, |
|
"step": 417280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3159834849246217e-05, |
|
"loss": 3.7754, |
|
"step": 417792 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3151448901735697e-05, |
|
"loss": 3.7615, |
|
"step": 418304 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3143079333028906e-05, |
|
"loss": 3.7809, |
|
"step": 418816 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3134693385518386e-05, |
|
"loss": 3.776, |
|
"step": 419328 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3126307438007866e-05, |
|
"loss": 3.7674, |
|
"step": 419840 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3117921490497346e-05, |
|
"loss": 3.7603, |
|
"step": 420352 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.310955192179056e-05, |
|
"loss": 3.7742, |
|
"step": 420864 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.310116597428004e-05, |
|
"loss": 3.7667, |
|
"step": 421376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.309278002676952e-05, |
|
"loss": 3.7604, |
|
"step": 421888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.308441045806273e-05, |
|
"loss": 3.7735, |
|
"step": 422400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.307602451055221e-05, |
|
"loss": 3.7672, |
|
"step": 422912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.306763856304169e-05, |
|
"loss": 3.7737, |
|
"step": 423424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.305925261553117e-05, |
|
"loss": 3.765, |
|
"step": 423936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.305088304682438e-05, |
|
"loss": 3.752, |
|
"step": 424448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.304249709931386e-05, |
|
"loss": 3.7496, |
|
"step": 424960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.303411115180334e-05, |
|
"loss": 3.7608, |
|
"step": 425472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.302572520429282e-05, |
|
"loss": 3.7653, |
|
"step": 425984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3017355635586035e-05, |
|
"loss": 3.7609, |
|
"step": 426496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3008969688075515e-05, |
|
"loss": 3.7614, |
|
"step": 427008 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3000583740564995e-05, |
|
"loss": 3.7608, |
|
"step": 427520 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2992197793054475e-05, |
|
"loss": 3.7524, |
|
"step": 428032 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2983828224347684e-05, |
|
"loss": 3.7635, |
|
"step": 428544 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2975442276837164e-05, |
|
"loss": 3.7571, |
|
"step": 429056 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2967056329326644e-05, |
|
"loss": 3.772, |
|
"step": 429568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2958670381816124e-05, |
|
"loss": 3.7726, |
|
"step": 430080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.295030081310933e-05, |
|
"loss": 3.7637, |
|
"step": 430592 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.294191486559881e-05, |
|
"loss": 3.7557, |
|
"step": 431104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.293352891808829e-05, |
|
"loss": 3.7603, |
|
"step": 431616 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.292514297057777e-05, |
|
"loss": 3.7576, |
|
"step": 432128 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.291677340187099e-05, |
|
"loss": 3.7726, |
|
"step": 432640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.290838745436047e-05, |
|
"loss": 3.7556, |
|
"step": 433152 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.290000150684995e-05, |
|
"loss": 3.7532, |
|
"step": 433664 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.289161555933943e-05, |
|
"loss": 3.767, |
|
"step": 434176 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.288324599063264e-05, |
|
"loss": 3.7625, |
|
"step": 434688 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.287486004312212e-05, |
|
"loss": 3.7539, |
|
"step": 435200 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.28664740956116e-05, |
|
"loss": 3.7599, |
|
"step": 435712 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.285808814810108e-05, |
|
"loss": 3.7568, |
|
"step": 436224 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.284971857939429e-05, |
|
"loss": 3.7529, |
|
"step": 436736 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.284133263188377e-05, |
|
"loss": 3.7402, |
|
"step": 437248 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2832946684373247e-05, |
|
"loss": 3.7591, |
|
"step": 437760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2824560736862727e-05, |
|
"loss": 3.7574, |
|
"step": 438272 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.281619116815594e-05, |
|
"loss": 3.7545, |
|
"step": 438784 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.280780522064542e-05, |
|
"loss": 3.7542, |
|
"step": 439296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.27994192731349e-05, |
|
"loss": 3.7494, |
|
"step": 439808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.279103332562438e-05, |
|
"loss": 3.7699, |
|
"step": 440320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.278266375691759e-05, |
|
"loss": 3.7698, |
|
"step": 440832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.277427780940707e-05, |
|
"loss": 3.7639, |
|
"step": 441344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.276589186189655e-05, |
|
"loss": 3.761, |
|
"step": 441856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.275750591438603e-05, |
|
"loss": 3.7524, |
|
"step": 442368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.274913634567924e-05, |
|
"loss": 3.7527, |
|
"step": 442880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.274075039816872e-05, |
|
"loss": 3.7487, |
|
"step": 443392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.27323644506582e-05, |
|
"loss": 3.7528, |
|
"step": 443904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.272397850314768e-05, |
|
"loss": 3.7556, |
|
"step": 444416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2715608934440896e-05, |
|
"loss": 3.7577, |
|
"step": 444928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2707222986930376e-05, |
|
"loss": 3.7584, |
|
"step": 445440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2698837039419856e-05, |
|
"loss": 3.7535, |
|
"step": 445952 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2690451091909336e-05, |
|
"loss": 3.7565, |
|
"step": 446464 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2682081523202545e-05, |
|
"loss": 3.7516, |
|
"step": 446976 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2673695575692025e-05, |
|
"loss": 3.7515, |
|
"step": 447488 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2665309628181505e-05, |
|
"loss": 3.7557, |
|
"step": 448000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2656923680670985e-05, |
|
"loss": 3.7635, |
|
"step": 448512 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2648554111964194e-05, |
|
"loss": 3.7639, |
|
"step": 449024 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2640168164453674e-05, |
|
"loss": 3.7474, |
|
"step": 449536 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2631782216943154e-05, |
|
"loss": 3.7611, |
|
"step": 450048 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2623396269432634e-05, |
|
"loss": 3.7544, |
|
"step": 450560 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.261502670072584e-05, |
|
"loss": 3.7629, |
|
"step": 451072 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.260664075321533e-05, |
|
"loss": 3.7536, |
|
"step": 451584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.259825480570481e-05, |
|
"loss": 3.7577, |
|
"step": 452096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.258986885819429e-05, |
|
"loss": 3.753, |
|
"step": 452608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.25814992894875e-05, |
|
"loss": 3.7587, |
|
"step": 453120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.257311334197698e-05, |
|
"loss": 3.7454, |
|
"step": 453632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.256472739446646e-05, |
|
"loss": 3.7514, |
|
"step": 454144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.255634144695594e-05, |
|
"loss": 3.753, |
|
"step": 454656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.254797187824915e-05, |
|
"loss": 3.7551, |
|
"step": 455168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.253958593073863e-05, |
|
"loss": 3.7563, |
|
"step": 455680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.253119998322811e-05, |
|
"loss": 3.7515, |
|
"step": 456192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.252281403571758e-05, |
|
"loss": 3.763, |
|
"step": 456704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.25144444670108e-05, |
|
"loss": 3.7472, |
|
"step": 457216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.250605851950028e-05, |
|
"loss": 3.754, |
|
"step": 457728 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 3.8698978424072266, |
|
"eval_runtime": 303.666, |
|
"eval_samples_per_second": 1256.614, |
|
"eval_steps_per_second": 39.27, |
|
"step": 457920 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.249767257198976e-05, |
|
"loss": 3.7504, |
|
"step": 458240 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.2489286624479237e-05, |
|
"loss": 3.7343, |
|
"step": 458752 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.248091705577245e-05, |
|
"loss": 3.7598, |
|
"step": 459264 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.247253110826193e-05, |
|
"loss": 3.753, |
|
"step": 459776 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.246414516075141e-05, |
|
"loss": 3.7619, |
|
"step": 460288 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.2455759213240885e-05, |
|
"loss": 3.7449, |
|
"step": 460800 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.24473896445341e-05, |
|
"loss": 3.7421, |
|
"step": 461312 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.243900369702358e-05, |
|
"loss": 3.7441, |
|
"step": 461824 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.2430617749513054e-05, |
|
"loss": 3.7453, |
|
"step": 462336 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.2422231802002534e-05, |
|
"loss": 3.7456, |
|
"step": 462848 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.241386223329575e-05, |
|
"loss": 3.7548, |
|
"step": 463360 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.240547628578524e-05, |
|
"loss": 3.7579, |
|
"step": 463872 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.2397106717078446e-05, |
|
"loss": 3.7322, |
|
"step": 464384 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.2388720769567926e-05, |
|
"loss": 3.7423, |
|
"step": 464896 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.2380334822057406e-05, |
|
"loss": 3.7468, |
|
"step": 465408 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.2371948874546886e-05, |
|
"loss": 3.7336, |
|
"step": 465920 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.236356292703636e-05, |
|
"loss": 3.7403, |
|
"step": 466432 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.235517697952584e-05, |
|
"loss": 3.7416, |
|
"step": 466944 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.234679103201532e-05, |
|
"loss": 3.7418, |
|
"step": 467456 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.23384050845048e-05, |
|
"loss": 3.7568, |
|
"step": 467968 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.233003551579801e-05, |
|
"loss": 3.7473, |
|
"step": 468480 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.232164956828749e-05, |
|
"loss": 3.7453, |
|
"step": 468992 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.2313263620776975e-05, |
|
"loss": 3.7422, |
|
"step": 469504 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.2304877673266455e-05, |
|
"loss": 3.7477, |
|
"step": 470016 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.2296508104559664e-05, |
|
"loss": 3.7395, |
|
"step": 470528 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.2288122157049144e-05, |
|
"loss": 3.7444, |
|
"step": 471040 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.2279736209538624e-05, |
|
"loss": 3.7366, |
|
"step": 471552 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.2271350262028104e-05, |
|
"loss": 3.7364, |
|
"step": 472064 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.226298069332131e-05, |
|
"loss": 3.7388, |
|
"step": 472576 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.225459474581079e-05, |
|
"loss": 3.7372, |
|
"step": 473088 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.224620879830027e-05, |
|
"loss": 3.7407, |
|
"step": 473600 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.223783922959348e-05, |
|
"loss": 3.7423, |
|
"step": 474112 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.222945328208296e-05, |
|
"loss": 3.7485, |
|
"step": 474624 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.222106733457244e-05, |
|
"loss": 3.7474, |
|
"step": 475136 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.221268138706193e-05, |
|
"loss": 3.7366, |
|
"step": 475648 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.220431181835514e-05, |
|
"loss": 3.7442, |
|
"step": 476160 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.219592587084462e-05, |
|
"loss": 3.7408, |
|
"step": 476672 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.21875399233341e-05, |
|
"loss": 3.7367, |
|
"step": 477184 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.217915397582358e-05, |
|
"loss": 3.7241, |
|
"step": 477696 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2170784407116787e-05, |
|
"loss": 3.7335, |
|
"step": 478208 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2162398459606267e-05, |
|
"loss": 3.7349, |
|
"step": 478720 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2154012512095746e-05, |
|
"loss": 3.7393, |
|
"step": 479232 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2145626564585226e-05, |
|
"loss": 3.7318, |
|
"step": 479744 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2137256995878436e-05, |
|
"loss": 3.732, |
|
"step": 480256 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2128871048367915e-05, |
|
"loss": 3.7531, |
|
"step": 480768 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2120485100857395e-05, |
|
"loss": 3.7367, |
|
"step": 481280 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.211209915334688e-05, |
|
"loss": 3.741, |
|
"step": 481792 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.210372958464009e-05, |
|
"loss": 3.7258, |
|
"step": 482304 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.209534363712957e-05, |
|
"loss": 3.7199, |
|
"step": 482816 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.208695768961905e-05, |
|
"loss": 3.7451, |
|
"step": 483328 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.207857174210853e-05, |
|
"loss": 3.7333, |
|
"step": 483840 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.207020217340174e-05, |
|
"loss": 3.7318, |
|
"step": 484352 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.206181622589122e-05, |
|
"loss": 3.7297, |
|
"step": 484864 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.20534302783807e-05, |
|
"loss": 3.7362, |
|
"step": 485376 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.204504433087018e-05, |
|
"loss": 3.7127, |
|
"step": 485888 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.203667476216339e-05, |
|
"loss": 3.7348, |
|
"step": 486400 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.202828881465287e-05, |
|
"loss": 3.7262, |
|
"step": 486912 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.201990286714235e-05, |
|
"loss": 3.724, |
|
"step": 487424 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2011516919631836e-05, |
|
"loss": 3.7438, |
|
"step": 487936 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2003147350925045e-05, |
|
"loss": 3.7296, |
|
"step": 488448 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.1994761403414525e-05, |
|
"loss": 3.7264, |
|
"step": 488960 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.1986375455904005e-05, |
|
"loss": 3.7361, |
|
"step": 489472 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.1977989508393485e-05, |
|
"loss": 3.7223, |
|
"step": 489984 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.1969619939686694e-05, |
|
"loss": 3.7244, |
|
"step": 490496 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.1961233992176174e-05, |
|
"loss": 3.7308, |
|
"step": 491008 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.1952848044665654e-05, |
|
"loss": 3.7273, |
|
"step": 491520 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.1944462097155134e-05, |
|
"loss": 3.7277, |
|
"step": 492032 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.193609252844834e-05, |
|
"loss": 3.7207, |
|
"step": 492544 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.192770658093782e-05, |
|
"loss": 3.7119, |
|
"step": 493056 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.19193206334273e-05, |
|
"loss": 3.7249, |
|
"step": 493568 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.191093468591679e-05, |
|
"loss": 3.7368, |
|
"step": 494080 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.190256511721e-05, |
|
"loss": 3.7213, |
|
"step": 494592 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.189417916969948e-05, |
|
"loss": 3.7443, |
|
"step": 495104 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.188579322218896e-05, |
|
"loss": 3.7366, |
|
"step": 495616 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.187740727467844e-05, |
|
"loss": 3.7349, |
|
"step": 496128 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.186903770597165e-05, |
|
"loss": 3.7197, |
|
"step": 496640 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.186065175846113e-05, |
|
"loss": 3.7377, |
|
"step": 497152 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.185226581095061e-05, |
|
"loss": 3.7288, |
|
"step": 497664 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.184387986344009e-05, |
|
"loss": 3.7204, |
|
"step": 498176 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.1835526673537026e-05, |
|
"loss": 3.737, |
|
"step": 498688 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.182714072602651e-05, |
|
"loss": 3.7286, |
|
"step": 499200 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.181875477851599e-05, |
|
"loss": 3.7356, |
|
"step": 499712 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.181036883100547e-05, |
|
"loss": 3.7277, |
|
"step": 500224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.180198288349495e-05, |
|
"loss": 3.7117, |
|
"step": 500736 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.179359693598443e-05, |
|
"loss": 3.7109, |
|
"step": 501248 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.178522736727764e-05, |
|
"loss": 3.719, |
|
"step": 501760 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.177684141976712e-05, |
|
"loss": 3.7281, |
|
"step": 502272 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.17684554722566e-05, |
|
"loss": 3.725, |
|
"step": 502784 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.176006952474608e-05, |
|
"loss": 3.722, |
|
"step": 503296 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.175169995603929e-05, |
|
"loss": 3.7255, |
|
"step": 503808 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.174331400852877e-05, |
|
"loss": 3.716, |
|
"step": 504320 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.173492806101825e-05, |
|
"loss": 3.7231, |
|
"step": 504832 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.172654211350773e-05, |
|
"loss": 3.718, |
|
"step": 505344 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.171815616599721e-05, |
|
"loss": 3.7394, |
|
"step": 505856 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.170977021848669e-05, |
|
"loss": 3.7373, |
|
"step": 506368 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.170138427097618e-05, |
|
"loss": 3.7257, |
|
"step": 506880 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.169299832346566e-05, |
|
"loss": 3.7186, |
|
"step": 507392 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1684628754758866e-05, |
|
"loss": 3.7263, |
|
"step": 507904 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1676242807248346e-05, |
|
"loss": 3.7162, |
|
"step": 508416 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1667856859737826e-05, |
|
"loss": 3.7376, |
|
"step": 508928 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1659470912227306e-05, |
|
"loss": 3.7184, |
|
"step": 509440 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1651101343520515e-05, |
|
"loss": 3.7193, |
|
"step": 509952 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1642715396009995e-05, |
|
"loss": 3.7278, |
|
"step": 510464 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1634329448499475e-05, |
|
"loss": 3.7289, |
|
"step": 510976 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1625943500988955e-05, |
|
"loss": 3.7151, |
|
"step": 511488 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1617573932282164e-05, |
|
"loss": 3.724, |
|
"step": 512000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1609187984771644e-05, |
|
"loss": 3.7162, |
|
"step": 512512 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.160080203726113e-05, |
|
"loss": 3.7139, |
|
"step": 513024 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.159241608975061e-05, |
|
"loss": 3.7047, |
|
"step": 513536 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.158404652104382e-05, |
|
"loss": 3.7216, |
|
"step": 514048 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.15756605735333e-05, |
|
"loss": 3.7233, |
|
"step": 514560 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.156727462602278e-05, |
|
"loss": 3.7162, |
|
"step": 515072 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.155888867851226e-05, |
|
"loss": 3.7184, |
|
"step": 515584 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.155051910980547e-05, |
|
"loss": 3.7162, |
|
"step": 516096 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.154213316229495e-05, |
|
"loss": 3.7303, |
|
"step": 516608 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.153374721478443e-05, |
|
"loss": 3.7357, |
|
"step": 517120 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.152537764607764e-05, |
|
"loss": 3.7293, |
|
"step": 517632 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.151699169856712e-05, |
|
"loss": 3.7228, |
|
"step": 518144 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.15086057510566e-05, |
|
"loss": 3.7137, |
|
"step": 518656 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1500219803546084e-05, |
|
"loss": 3.7186, |
|
"step": 519168 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.149183385603556e-05, |
|
"loss": 3.7084, |
|
"step": 519680 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.148344790852504e-05, |
|
"loss": 3.7169, |
|
"step": 520192 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.147506196101452e-05, |
|
"loss": 3.72, |
|
"step": 520704 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1466676013504e-05, |
|
"loss": 3.7192, |
|
"step": 521216 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1458306444797206e-05, |
|
"loss": 3.7235, |
|
"step": 521728 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1449920497286686e-05, |
|
"loss": 3.7168, |
|
"step": 522240 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1441534549776166e-05, |
|
"loss": 3.7263, |
|
"step": 522752 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1433148602265646e-05, |
|
"loss": 3.7144, |
|
"step": 523264 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1424779033558855e-05, |
|
"loss": 3.7185, |
|
"step": 523776 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1416393086048335e-05, |
|
"loss": 3.721, |
|
"step": 524288 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.140800713853782e-05, |
|
"loss": 3.7227, |
|
"step": 524800 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.13996211910273e-05, |
|
"loss": 3.7306, |
|
"step": 525312 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.139125162232051e-05, |
|
"loss": 3.7117, |
|
"step": 525824 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.138286567480999e-05, |
|
"loss": 3.7264, |
|
"step": 526336 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.137447972729947e-05, |
|
"loss": 3.7186, |
|
"step": 526848 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.136611015859268e-05, |
|
"loss": 3.7265, |
|
"step": 527360 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.135772421108216e-05, |
|
"loss": 3.717, |
|
"step": 527872 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.134933826357164e-05, |
|
"loss": 3.7238, |
|
"step": 528384 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.134095231606112e-05, |
|
"loss": 3.714, |
|
"step": 528896 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.133258274735433e-05, |
|
"loss": 3.7257, |
|
"step": 529408 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.132419679984381e-05, |
|
"loss": 3.708, |
|
"step": 529920 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.131581085233329e-05, |
|
"loss": 3.7156, |
|
"step": 530432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1307424904822776e-05, |
|
"loss": 3.7169, |
|
"step": 530944 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1299055336115985e-05, |
|
"loss": 3.7217, |
|
"step": 531456 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1290669388605465e-05, |
|
"loss": 3.7164, |
|
"step": 531968 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1282283441094945e-05, |
|
"loss": 3.7192, |
|
"step": 532480 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1273897493584425e-05, |
|
"loss": 3.7256, |
|
"step": 532992 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1265527924877634e-05, |
|
"loss": 3.711, |
|
"step": 533504 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1257141977367114e-05, |
|
"loss": 3.7241, |
|
"step": 534016 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 3.8603413105010986, |
|
"eval_runtime": 303.9807, |
|
"eval_samples_per_second": 1255.313, |
|
"eval_steps_per_second": 39.229, |
|
"step": 534240 |
|
} |
|
], |
|
"logging_steps": 512, |
|
"max_steps": 3052726, |
|
"num_train_epochs": 9223372036854775807, |
|
"save_steps": 10, |
|
"total_flos": 3.718175494329354e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|