|
{ |
|
"best_metric": 3.8459384441375732, |
|
"best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/binding-domain/transformer/4/checkpoints/checkpoint-686880", |
|
"epoch": 0.025000606015738065, |
|
"eval_steps": 10, |
|
"global_step": 686880, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999998362119627e-05, |
|
"loss": 10.9264, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999161405248948e-05, |
|
"loss": 6.8199, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.998322810497896e-05, |
|
"loss": 6.1893, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.997484215746844e-05, |
|
"loss": 5.9797, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.996645620995792e-05, |
|
"loss": 5.8175, |
|
"step": 2048 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.99580702624474e-05, |
|
"loss": 5.7212, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.994968431493688e-05, |
|
"loss": 5.6053, |
|
"step": 3072 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.994129836742636e-05, |
|
"loss": 5.5389, |
|
"step": 3584 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.993291241991584e-05, |
|
"loss": 5.4515, |
|
"step": 4096 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.992452647240532e-05, |
|
"loss": 5.4045, |
|
"step": 4608 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.99161405248948e-05, |
|
"loss": 5.3529, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.990775457738428e-05, |
|
"loss": 5.3134, |
|
"step": 5632 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.989936862987376e-05, |
|
"loss": 5.2625, |
|
"step": 6144 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.989099906116697e-05, |
|
"loss": 5.2117, |
|
"step": 6656 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.988261311365645e-05, |
|
"loss": 5.1768, |
|
"step": 7168 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.987422716614593e-05, |
|
"loss": 5.1354, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.986584121863541e-05, |
|
"loss": 5.1058, |
|
"step": 8192 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.985745527112489e-05, |
|
"loss": 5.0837, |
|
"step": 8704 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.984906932361437e-05, |
|
"loss": 5.0543, |
|
"step": 9216 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.984068337610385e-05, |
|
"loss": 5.0221, |
|
"step": 9728 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.983229742859333e-05, |
|
"loss": 5.0098, |
|
"step": 10240 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.982391148108281e-05, |
|
"loss": 4.9704, |
|
"step": 10752 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9815541912376026e-05, |
|
"loss": 4.9542, |
|
"step": 11264 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9807155964865506e-05, |
|
"loss": 4.93, |
|
"step": 11776 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9798770017354986e-05, |
|
"loss": 4.9181, |
|
"step": 12288 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9790384069844466e-05, |
|
"loss": 4.8909, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9781998122333946e-05, |
|
"loss": 4.8728, |
|
"step": 13312 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9773628553627155e-05, |
|
"loss": 4.8517, |
|
"step": 13824 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9765242606116635e-05, |
|
"loss": 4.8332, |
|
"step": 14336 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9756856658606115e-05, |
|
"loss": 4.8185, |
|
"step": 14848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9748470711095595e-05, |
|
"loss": 4.8001, |
|
"step": 15360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9740101142388804e-05, |
|
"loss": 4.7957, |
|
"step": 15872 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9731715194878284e-05, |
|
"loss": 4.776, |
|
"step": 16384 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9723329247367764e-05, |
|
"loss": 4.7707, |
|
"step": 16896 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9714943299857244e-05, |
|
"loss": 4.7624, |
|
"step": 17408 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9706557352346724e-05, |
|
"loss": 4.7444, |
|
"step": 17920 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.969818778363994e-05, |
|
"loss": 4.7346, |
|
"step": 18432 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.968981821493315e-05, |
|
"loss": 4.7079, |
|
"step": 18944 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.968143226742263e-05, |
|
"loss": 4.7037, |
|
"step": 19456 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.967304631991211e-05, |
|
"loss": 4.6853, |
|
"step": 19968 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.966466037240159e-05, |
|
"loss": 4.6857, |
|
"step": 20480 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.965627442489107e-05, |
|
"loss": 4.6719, |
|
"step": 20992 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.964788847738054e-05, |
|
"loss": 4.6633, |
|
"step": 21504 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.963950252987002e-05, |
|
"loss": 4.6426, |
|
"step": 22016 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.96311165823595e-05, |
|
"loss": 4.6525, |
|
"step": 22528 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.962274701365272e-05, |
|
"loss": 4.6383, |
|
"step": 23040 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.96143610661422e-05, |
|
"loss": 4.6305, |
|
"step": 23552 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.960597511863168e-05, |
|
"loss": 4.6229, |
|
"step": 24064 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.959758917112116e-05, |
|
"loss": 4.5926, |
|
"step": 24576 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9589219602414374e-05, |
|
"loss": 4.5901, |
|
"step": 25088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.958083365490385e-05, |
|
"loss": 4.5964, |
|
"step": 25600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.957244770739333e-05, |
|
"loss": 4.5881, |
|
"step": 26112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.956406175988281e-05, |
|
"loss": 4.571, |
|
"step": 26624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9555692191176016e-05, |
|
"loss": 4.5513, |
|
"step": 27136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9547306243665496e-05, |
|
"loss": 4.5545, |
|
"step": 27648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9538920296154976e-05, |
|
"loss": 4.5404, |
|
"step": 28160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9530534348644456e-05, |
|
"loss": 4.5657, |
|
"step": 28672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9522148401133936e-05, |
|
"loss": 4.5251, |
|
"step": 29184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.951376245362342e-05, |
|
"loss": 4.5367, |
|
"step": 29696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.95053765061129e-05, |
|
"loss": 4.5324, |
|
"step": 30208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.949700693740611e-05, |
|
"loss": 4.5076, |
|
"step": 30720 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.948862098989559e-05, |
|
"loss": 4.5125, |
|
"step": 31232 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.948023504238507e-05, |
|
"loss": 4.5017, |
|
"step": 31744 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.947184909487455e-05, |
|
"loss": 4.4842, |
|
"step": 32256 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.946346314736403e-05, |
|
"loss": 4.4854, |
|
"step": 32768 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.945509357865724e-05, |
|
"loss": 4.5028, |
|
"step": 33280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.944670763114672e-05, |
|
"loss": 4.4806, |
|
"step": 33792 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.94383216836362e-05, |
|
"loss": 4.4706, |
|
"step": 34304 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.942993573612568e-05, |
|
"loss": 4.4615, |
|
"step": 34816 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.942156616741889e-05, |
|
"loss": 4.4614, |
|
"step": 35328 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.941318021990837e-05, |
|
"loss": 4.4736, |
|
"step": 35840 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9404810651201585e-05, |
|
"loss": 4.4711, |
|
"step": 36352 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9396424703691065e-05, |
|
"loss": 4.458, |
|
"step": 36864 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9388038756180545e-05, |
|
"loss": 4.4632, |
|
"step": 37376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9379652808670025e-05, |
|
"loss": 4.4648, |
|
"step": 37888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9371266861159505e-05, |
|
"loss": 4.4513, |
|
"step": 38400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9362897292452714e-05, |
|
"loss": 4.4386, |
|
"step": 38912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9354511344942194e-05, |
|
"loss": 4.4299, |
|
"step": 39424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9346125397431674e-05, |
|
"loss": 4.4293, |
|
"step": 39936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9337739449921154e-05, |
|
"loss": 4.4222, |
|
"step": 40448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9329353502410634e-05, |
|
"loss": 4.4163, |
|
"step": 40960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9320967554900114e-05, |
|
"loss": 4.4192, |
|
"step": 41472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9312581607389594e-05, |
|
"loss": 4.4198, |
|
"step": 41984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9304195659879074e-05, |
|
"loss": 4.416, |
|
"step": 42496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.929582609117229e-05, |
|
"loss": 4.3902, |
|
"step": 43008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.928744014366177e-05, |
|
"loss": 4.3907, |
|
"step": 43520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.927905419615125e-05, |
|
"loss": 4.398, |
|
"step": 44032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.927066824864073e-05, |
|
"loss": 4.4007, |
|
"step": 44544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.926229867993394e-05, |
|
"loss": 4.3826, |
|
"step": 45056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.925392911122715e-05, |
|
"loss": 4.384, |
|
"step": 45568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.924554316371663e-05, |
|
"loss": 4.3869, |
|
"step": 46080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.923715721620611e-05, |
|
"loss": 4.3737, |
|
"step": 46592 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.922878764749932e-05, |
|
"loss": 4.3686, |
|
"step": 47104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.92204016999888e-05, |
|
"loss": 4.3723, |
|
"step": 47616 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.921201575247828e-05, |
|
"loss": 4.3633, |
|
"step": 48128 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.920362980496776e-05, |
|
"loss": 4.3676, |
|
"step": 48640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.919524385745724e-05, |
|
"loss": 4.3596, |
|
"step": 49152 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.918685790994672e-05, |
|
"loss": 4.344, |
|
"step": 49664 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.91784719624362e-05, |
|
"loss": 4.3532, |
|
"step": 50176 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9170086014925676e-05, |
|
"loss": 4.3539, |
|
"step": 50688 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9161700067415156e-05, |
|
"loss": 4.3485, |
|
"step": 51200 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9153314119904636e-05, |
|
"loss": 4.3486, |
|
"step": 51712 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9144928172394116e-05, |
|
"loss": 4.3428, |
|
"step": 52224 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9136542224883596e-05, |
|
"loss": 4.3362, |
|
"step": 52736 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9128172656176805e-05, |
|
"loss": 4.3393, |
|
"step": 53248 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9119786708666285e-05, |
|
"loss": 4.314, |
|
"step": 53760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.91114171399595e-05, |
|
"loss": 4.3182, |
|
"step": 54272 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.910303119244898e-05, |
|
"loss": 4.3248, |
|
"step": 54784 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.909464524493846e-05, |
|
"loss": 4.319, |
|
"step": 55296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.908625929742794e-05, |
|
"loss": 4.311, |
|
"step": 55808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.907787334991742e-05, |
|
"loss": 4.3075, |
|
"step": 56320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.90694874024069e-05, |
|
"loss": 4.3041, |
|
"step": 56832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.906110145489638e-05, |
|
"loss": 4.3087, |
|
"step": 57344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.905273188618959e-05, |
|
"loss": 4.3046, |
|
"step": 57856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.904434593867907e-05, |
|
"loss": 4.3048, |
|
"step": 58368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.903595999116855e-05, |
|
"loss": 4.2987, |
|
"step": 58880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.902757404365803e-05, |
|
"loss": 4.3041, |
|
"step": 59392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.901918809614751e-05, |
|
"loss": 4.2987, |
|
"step": 59904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.901080214863699e-05, |
|
"loss": 4.2962, |
|
"step": 60416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.900241620112647e-05, |
|
"loss": 4.2964, |
|
"step": 60928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8994046632419686e-05, |
|
"loss": 4.2962, |
|
"step": 61440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8985660684909166e-05, |
|
"loss": 4.284, |
|
"step": 61952 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8977274737398646e-05, |
|
"loss": 4.2892, |
|
"step": 62464 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8968888789888125e-05, |
|
"loss": 4.2783, |
|
"step": 62976 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8960519221181335e-05, |
|
"loss": 4.2888, |
|
"step": 63488 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8952133273670815e-05, |
|
"loss": 4.2639, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8943747326160294e-05, |
|
"loss": 4.2764, |
|
"step": 64512 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8935361378649774e-05, |
|
"loss": 4.2615, |
|
"step": 65024 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8926991809942984e-05, |
|
"loss": 4.2679, |
|
"step": 65536 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8918605862432463e-05, |
|
"loss": 4.252, |
|
"step": 66048 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8910219914921943e-05, |
|
"loss": 4.2816, |
|
"step": 66560 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8901833967411423e-05, |
|
"loss": 4.269, |
|
"step": 67072 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.88934480199009e-05, |
|
"loss": 4.2556, |
|
"step": 67584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.888506207239038e-05, |
|
"loss": 4.2574, |
|
"step": 68096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.887667612487986e-05, |
|
"loss": 4.2608, |
|
"step": 68608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.886829017736934e-05, |
|
"loss": 4.2651, |
|
"step": 69120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.885992060866256e-05, |
|
"loss": 4.2624, |
|
"step": 69632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.885155103995577e-05, |
|
"loss": 4.2485, |
|
"step": 70144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.884316509244525e-05, |
|
"loss": 4.2569, |
|
"step": 70656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.883477914493473e-05, |
|
"loss": 4.244, |
|
"step": 71168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.882639319742421e-05, |
|
"loss": 4.2561, |
|
"step": 71680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.881800724991369e-05, |
|
"loss": 4.2414, |
|
"step": 72192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.880962130240316e-05, |
|
"loss": 4.243, |
|
"step": 72704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.880123535489264e-05, |
|
"loss": 4.24, |
|
"step": 73216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.879286578618586e-05, |
|
"loss": 4.2307, |
|
"step": 73728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.878447983867534e-05, |
|
"loss": 4.2379, |
|
"step": 74240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.877609389116482e-05, |
|
"loss": 4.2342, |
|
"step": 74752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.87677079436543e-05, |
|
"loss": 4.2321, |
|
"step": 75264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.875932199614378e-05, |
|
"loss": 4.2344, |
|
"step": 75776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.875093604863326e-05, |
|
"loss": 4.226, |
|
"step": 76288 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.1915082931518555, |
|
"eval_runtime": 320.2057, |
|
"eval_samples_per_second": 1191.706, |
|
"eval_steps_per_second": 37.242, |
|
"step": 76320 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.874255010112274e-05, |
|
"loss": 4.2104, |
|
"step": 76800 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.873416415361222e-05, |
|
"loss": 4.2084, |
|
"step": 77312 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8725794584905426e-05, |
|
"loss": 4.2254, |
|
"step": 77824 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8717408637394906e-05, |
|
"loss": 4.2118, |
|
"step": 78336 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8709022689884386e-05, |
|
"loss": 4.2294, |
|
"step": 78848 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8700636742373866e-05, |
|
"loss": 4.1993, |
|
"step": 79360 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8692267173667075e-05, |
|
"loss": 4.2081, |
|
"step": 79872 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8683881226156555e-05, |
|
"loss": 4.1915, |
|
"step": 80384 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.867549527864604e-05, |
|
"loss": 4.2069, |
|
"step": 80896 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.866710933113552e-05, |
|
"loss": 4.201, |
|
"step": 81408 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8658723383625e-05, |
|
"loss": 4.2054, |
|
"step": 81920 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.865033743611448e-05, |
|
"loss": 4.2021, |
|
"step": 82432 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.864198424621142e-05, |
|
"loss": 4.1892, |
|
"step": 82944 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.86335982987009e-05, |
|
"loss": 4.1876, |
|
"step": 83456 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.862521235119038e-05, |
|
"loss": 4.1844, |
|
"step": 83968 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.861682640367986e-05, |
|
"loss": 4.1774, |
|
"step": 84480 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.860844045616934e-05, |
|
"loss": 4.187, |
|
"step": 84992 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.860005450865882e-05, |
|
"loss": 4.1841, |
|
"step": 85504 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.85916685611483e-05, |
|
"loss": 4.1778, |
|
"step": 86016 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.858329899244151e-05, |
|
"loss": 4.1982, |
|
"step": 86528 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8574913044930995e-05, |
|
"loss": 4.1713, |
|
"step": 87040 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8566527097420475e-05, |
|
"loss": 4.182, |
|
"step": 87552 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8558141149909955e-05, |
|
"loss": 4.1768, |
|
"step": 88064 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8549755202399435e-05, |
|
"loss": 4.1842, |
|
"step": 88576 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8541385633692644e-05, |
|
"loss": 4.1608, |
|
"step": 89088 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8532999686182124e-05, |
|
"loss": 4.169, |
|
"step": 89600 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8524613738671604e-05, |
|
"loss": 4.164, |
|
"step": 90112 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8516227791161084e-05, |
|
"loss": 4.1687, |
|
"step": 90624 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.850785822245429e-05, |
|
"loss": 4.1518, |
|
"step": 91136 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.849947227494377e-05, |
|
"loss": 4.1555, |
|
"step": 91648 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.849108632743325e-05, |
|
"loss": 4.1639, |
|
"step": 92160 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.848270037992273e-05, |
|
"loss": 4.1597, |
|
"step": 92672 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.847431443241221e-05, |
|
"loss": 4.1584, |
|
"step": 93184 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.846592848490169e-05, |
|
"loss": 4.1617, |
|
"step": 93696 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.845755891619491e-05, |
|
"loss": 4.1576, |
|
"step": 94208 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.844917296868439e-05, |
|
"loss": 4.1579, |
|
"step": 94720 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.844078702117387e-05, |
|
"loss": 4.1445, |
|
"step": 95232 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.843240107366335e-05, |
|
"loss": 4.1495, |
|
"step": 95744 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.842401512615282e-05, |
|
"loss": 4.1377, |
|
"step": 96256 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.841564555744604e-05, |
|
"loss": 4.1449, |
|
"step": 96768 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.840725960993552e-05, |
|
"loss": 4.1458, |
|
"step": 97280 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8398873662425e-05, |
|
"loss": 4.1455, |
|
"step": 97792 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.839048771491447e-05, |
|
"loss": 4.1321, |
|
"step": 98304 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8382118146207687e-05, |
|
"loss": 4.1413, |
|
"step": 98816 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8373732198697167e-05, |
|
"loss": 4.1448, |
|
"step": 99328 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8365346251186646e-05, |
|
"loss": 4.1368, |
|
"step": 99840 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8356960303676126e-05, |
|
"loss": 4.1405, |
|
"step": 100352 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.834859073496934e-05, |
|
"loss": 4.1127, |
|
"step": 100864 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.834020478745882e-05, |
|
"loss": 4.119, |
|
"step": 101376 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8331818839948295e-05, |
|
"loss": 4.1364, |
|
"step": 101888 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8323432892437775e-05, |
|
"loss": 4.128, |
|
"step": 102400 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.831506332373099e-05, |
|
"loss": 4.1225, |
|
"step": 102912 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.830667737622047e-05, |
|
"loss": 4.1073, |
|
"step": 103424 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8298291428709944e-05, |
|
"loss": 4.1132, |
|
"step": 103936 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8289905481199424e-05, |
|
"loss": 4.1054, |
|
"step": 104448 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.828153591249264e-05, |
|
"loss": 4.1301, |
|
"step": 104960 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.827314996498212e-05, |
|
"loss": 4.1055, |
|
"step": 105472 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.82647640174716e-05, |
|
"loss": 4.1168, |
|
"step": 105984 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.825637806996108e-05, |
|
"loss": 4.1218, |
|
"step": 106496 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8248008501254296e-05, |
|
"loss": 4.1006, |
|
"step": 107008 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.823962255374377e-05, |
|
"loss": 4.1085, |
|
"step": 107520 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.823123660623325e-05, |
|
"loss": 4.0999, |
|
"step": 108032 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.822285065872273e-05, |
|
"loss": 4.0958, |
|
"step": 108544 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8214481090015945e-05, |
|
"loss": 4.0961, |
|
"step": 109056 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.820609514250542e-05, |
|
"loss": 4.1121, |
|
"step": 109568 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.81977091949949e-05, |
|
"loss": 4.1028, |
|
"step": 110080 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.818932324748438e-05, |
|
"loss": 4.0927, |
|
"step": 110592 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8180953678777594e-05, |
|
"loss": 4.091, |
|
"step": 111104 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8172567731267074e-05, |
|
"loss": 4.0917, |
|
"step": 111616 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8164181783756554e-05, |
|
"loss": 4.1077, |
|
"step": 112128 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8155795836246034e-05, |
|
"loss": 4.1082, |
|
"step": 112640 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.814742626753924e-05, |
|
"loss": 4.1, |
|
"step": 113152 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.813904032002872e-05, |
|
"loss": 4.1103, |
|
"step": 113664 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.81306543725182e-05, |
|
"loss": 4.1093, |
|
"step": 114176 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.812226842500768e-05, |
|
"loss": 4.1043, |
|
"step": 114688 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.811389885630089e-05, |
|
"loss": 4.0937, |
|
"step": 115200 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.810551290879037e-05, |
|
"loss": 4.0918, |
|
"step": 115712 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.809712696127985e-05, |
|
"loss": 4.0898, |
|
"step": 116224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.808874101376933e-05, |
|
"loss": 4.0884, |
|
"step": 116736 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.808035506625881e-05, |
|
"loss": 4.084, |
|
"step": 117248 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.807198549755203e-05, |
|
"loss": 4.0929, |
|
"step": 117760 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.806359955004151e-05, |
|
"loss": 4.0971, |
|
"step": 118272 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.805521360253099e-05, |
|
"loss": 4.0894, |
|
"step": 118784 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.804682765502047e-05, |
|
"loss": 4.0731, |
|
"step": 119296 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8038458086313677e-05, |
|
"loss": 4.0736, |
|
"step": 119808 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8030072138803156e-05, |
|
"loss": 4.0807, |
|
"step": 120320 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8021686191292636e-05, |
|
"loss": 4.0887, |
|
"step": 120832 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8013300243782116e-05, |
|
"loss": 4.0734, |
|
"step": 121344 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8004930675075325e-05, |
|
"loss": 4.0768, |
|
"step": 121856 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7996544727564805e-05, |
|
"loss": 4.0818, |
|
"step": 122368 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7988158780054285e-05, |
|
"loss": 4.0745, |
|
"step": 122880 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7979772832543765e-05, |
|
"loss": 4.0687, |
|
"step": 123392 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.797140326383698e-05, |
|
"loss": 4.0726, |
|
"step": 123904 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.796301731632646e-05, |
|
"loss": 4.0736, |
|
"step": 124416 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.795464774761967e-05, |
|
"loss": 4.0745, |
|
"step": 124928 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.794626180010915e-05, |
|
"loss": 4.0754, |
|
"step": 125440 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.793787585259863e-05, |
|
"loss": 4.0576, |
|
"step": 125952 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.792948990508811e-05, |
|
"loss": 4.0725, |
|
"step": 126464 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.792110395757759e-05, |
|
"loss": 4.0665, |
|
"step": 126976 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.791271801006707e-05, |
|
"loss": 4.0739, |
|
"step": 127488 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.790433206255655e-05, |
|
"loss": 4.0686, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.789594611504603e-05, |
|
"loss": 4.0677, |
|
"step": 128512 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.788757654633924e-05, |
|
"loss": 4.0626, |
|
"step": 129024 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.787919059882872e-05, |
|
"loss": 4.0687, |
|
"step": 129536 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.78708046513182e-05, |
|
"loss": 4.0456, |
|
"step": 130048 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.786241870380768e-05, |
|
"loss": 4.0517, |
|
"step": 130560 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7854049135100895e-05, |
|
"loss": 4.0587, |
|
"step": 131072 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7845663187590375e-05, |
|
"loss": 4.0534, |
|
"step": 131584 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7837277240079855e-05, |
|
"loss": 4.048, |
|
"step": 132096 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7828891292569335e-05, |
|
"loss": 4.047, |
|
"step": 132608 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7820505345058815e-05, |
|
"loss": 4.0465, |
|
"step": 133120 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7812119397548295e-05, |
|
"loss": 4.0539, |
|
"step": 133632 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7803733450037775e-05, |
|
"loss": 4.0482, |
|
"step": 134144 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7795363881330984e-05, |
|
"loss": 4.0501, |
|
"step": 134656 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7786977933820464e-05, |
|
"loss": 4.0524, |
|
"step": 135168 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7778591986309944e-05, |
|
"loss": 4.0559, |
|
"step": 135680 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7770206038799423e-05, |
|
"loss": 4.0516, |
|
"step": 136192 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.776183647009263e-05, |
|
"loss": 4.0479, |
|
"step": 136704 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.775345052258212e-05, |
|
"loss": 4.0537, |
|
"step": 137216 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.77450645750716e-05, |
|
"loss": 4.0535, |
|
"step": 137728 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.773667862756108e-05, |
|
"loss": 4.0406, |
|
"step": 138240 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.772830905885429e-05, |
|
"loss": 4.0507, |
|
"step": 138752 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.771992311134377e-05, |
|
"loss": 4.0402, |
|
"step": 139264 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.771153716383325e-05, |
|
"loss": 4.0516, |
|
"step": 139776 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.770315121632272e-05, |
|
"loss": 4.0275, |
|
"step": 140288 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.769478164761594e-05, |
|
"loss": 4.0452, |
|
"step": 140800 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.768639570010542e-05, |
|
"loss": 4.0274, |
|
"step": 141312 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.76780097525949e-05, |
|
"loss": 4.0329, |
|
"step": 141824 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.766962380508437e-05, |
|
"loss": 4.0235, |
|
"step": 142336 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7661254236377586e-05, |
|
"loss": 4.0526, |
|
"step": 142848 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.765286828886707e-05, |
|
"loss": 4.042, |
|
"step": 143360 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.764448234135655e-05, |
|
"loss": 4.034, |
|
"step": 143872 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7636096393846026e-05, |
|
"loss": 4.0287, |
|
"step": 144384 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.762772682513924e-05, |
|
"loss": 4.038, |
|
"step": 144896 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.761934087762872e-05, |
|
"loss": 4.0431, |
|
"step": 145408 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7610954930118195e-05, |
|
"loss": 4.042, |
|
"step": 145920 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7602568982607675e-05, |
|
"loss": 4.0319, |
|
"step": 146432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.759419941390089e-05, |
|
"loss": 4.0369, |
|
"step": 146944 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.758581346639037e-05, |
|
"loss": 4.0307, |
|
"step": 147456 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7577427518879844e-05, |
|
"loss": 4.0443, |
|
"step": 147968 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7569041571369324e-05, |
|
"loss": 4.0239, |
|
"step": 148480 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.756067200266254e-05, |
|
"loss": 4.0287, |
|
"step": 148992 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.755228605515202e-05, |
|
"loss": 4.0257, |
|
"step": 149504 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.75439001076415e-05, |
|
"loss": 4.0216, |
|
"step": 150016 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.753551416013098e-05, |
|
"loss": 4.0321, |
|
"step": 150528 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7527144591424196e-05, |
|
"loss": 4.0255, |
|
"step": 151040 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.751875864391367e-05, |
|
"loss": 4.0274, |
|
"step": 151552 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.751037269640315e-05, |
|
"loss": 4.0254, |
|
"step": 152064 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.750198674889263e-05, |
|
"loss": 4.0213, |
|
"step": 152576 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.023377895355225, |
|
"eval_runtime": 603.3722, |
|
"eval_samples_per_second": 632.431, |
|
"eval_steps_per_second": 19.764, |
|
"step": 152640 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.749360080138211e-05, |
|
"loss": 4.0078, |
|
"step": 153088 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.748521485387159e-05, |
|
"loss": 4.0078, |
|
"step": 153600 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.747682890636107e-05, |
|
"loss": 4.0244, |
|
"step": 154112 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.746844295885055e-05, |
|
"loss": 4.012, |
|
"step": 154624 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.746005701134003e-05, |
|
"loss": 4.0348, |
|
"step": 155136 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.745167106382951e-05, |
|
"loss": 4.0028, |
|
"step": 155648 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.744328511631899e-05, |
|
"loss": 4.0128, |
|
"step": 156160 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7434899168808475e-05, |
|
"loss": 3.9979, |
|
"step": 156672 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7426513221297955e-05, |
|
"loss": 4.0145, |
|
"step": 157184 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7418127273787435e-05, |
|
"loss": 4.01, |
|
"step": 157696 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.740974132627691e-05, |
|
"loss": 4.0133, |
|
"step": 158208 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7401371757570124e-05, |
|
"loss": 4.0092, |
|
"step": 158720 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.739300218886333e-05, |
|
"loss": 3.9992, |
|
"step": 159232 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.738461624135281e-05, |
|
"loss": 4.0038, |
|
"step": 159744 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.737623029384229e-05, |
|
"loss": 4.0002, |
|
"step": 160256 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.736784434633177e-05, |
|
"loss": 3.9943, |
|
"step": 160768 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.735945839882125e-05, |
|
"loss": 3.9994, |
|
"step": 161280 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.735107245131073e-05, |
|
"loss": 4.0027, |
|
"step": 161792 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.734268650380021e-05, |
|
"loss": 3.9951, |
|
"step": 162304 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.733431693509343e-05, |
|
"loss": 4.0166, |
|
"step": 162816 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.732593098758291e-05, |
|
"loss": 3.9938, |
|
"step": 163328 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.731756141887612e-05, |
|
"loss": 4.0012, |
|
"step": 163840 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.73091754713656e-05, |
|
"loss": 3.9981, |
|
"step": 164352 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.730078952385508e-05, |
|
"loss": 4.0073, |
|
"step": 164864 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.729240357634456e-05, |
|
"loss": 3.9849, |
|
"step": 165376 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.728401762883403e-05, |
|
"loss": 3.9983, |
|
"step": 165888 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.727563168132351e-05, |
|
"loss": 3.9907, |
|
"step": 166400 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.726724573381299e-05, |
|
"loss": 3.9946, |
|
"step": 166912 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.725885978630247e-05, |
|
"loss": 3.9794, |
|
"step": 167424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.725049021759568e-05, |
|
"loss": 3.9839, |
|
"step": 167936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.724210427008517e-05, |
|
"loss": 3.9922, |
|
"step": 168448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7233718322574647e-05, |
|
"loss": 3.9925, |
|
"step": 168960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7225332375064127e-05, |
|
"loss": 3.992, |
|
"step": 169472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7216962806357336e-05, |
|
"loss": 3.9933, |
|
"step": 169984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7208576858846816e-05, |
|
"loss": 3.9882, |
|
"step": 170496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7200190911336296e-05, |
|
"loss": 3.9925, |
|
"step": 171008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7191804963825775e-05, |
|
"loss": 3.9785, |
|
"step": 171520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7183435395118985e-05, |
|
"loss": 3.9911, |
|
"step": 172032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7175049447608465e-05, |
|
"loss": 3.9716, |
|
"step": 172544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7166663500097945e-05, |
|
"loss": 3.9816, |
|
"step": 173056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7158293931391154e-05, |
|
"loss": 3.9813, |
|
"step": 173568 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7149907983880634e-05, |
|
"loss": 3.9877, |
|
"step": 174080 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.714152203637012e-05, |
|
"loss": 3.9789, |
|
"step": 174592 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.71331360888596e-05, |
|
"loss": 3.9774, |
|
"step": 175104 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.712476652015281e-05, |
|
"loss": 3.9861, |
|
"step": 175616 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.711638057264229e-05, |
|
"loss": 3.9812, |
|
"step": 176128 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.710799462513177e-05, |
|
"loss": 3.9815, |
|
"step": 176640 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.709960867762125e-05, |
|
"loss": 3.9643, |
|
"step": 177152 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.709123910891446e-05, |
|
"loss": 3.9604, |
|
"step": 177664 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.708285316140394e-05, |
|
"loss": 3.9765, |
|
"step": 178176 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.707446721389342e-05, |
|
"loss": 3.974, |
|
"step": 178688 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.70660812663829e-05, |
|
"loss": 3.9725, |
|
"step": 179200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.705771169767611e-05, |
|
"loss": 3.9524, |
|
"step": 179712 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.704932575016559e-05, |
|
"loss": 3.9602, |
|
"step": 180224 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7040939802655074e-05, |
|
"loss": 3.9601, |
|
"step": 180736 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7032553855144554e-05, |
|
"loss": 3.9716, |
|
"step": 181248 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.702418428643776e-05, |
|
"loss": 3.9562, |
|
"step": 181760 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.701579833892724e-05, |
|
"loss": 3.9692, |
|
"step": 182272 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.700741239141672e-05, |
|
"loss": 3.9707, |
|
"step": 182784 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.69990264439062e-05, |
|
"loss": 3.9558, |
|
"step": 183296 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.699065687519941e-05, |
|
"loss": 3.9599, |
|
"step": 183808 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.698227092768889e-05, |
|
"loss": 3.9571, |
|
"step": 184320 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.697388498017837e-05, |
|
"loss": 3.9475, |
|
"step": 184832 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.696549903266785e-05, |
|
"loss": 3.9566, |
|
"step": 185344 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.695712946396106e-05, |
|
"loss": 3.9623, |
|
"step": 185856 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.694875989525428e-05, |
|
"loss": 3.9668, |
|
"step": 186368 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.694037394774376e-05, |
|
"loss": 3.9441, |
|
"step": 186880 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.693198800023324e-05, |
|
"loss": 3.9502, |
|
"step": 187392 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.692360205272272e-05, |
|
"loss": 3.9479, |
|
"step": 187904 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.69152161052122e-05, |
|
"loss": 3.9729, |
|
"step": 188416 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6906830157701677e-05, |
|
"loss": 3.9585, |
|
"step": 188928 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6898460588994886e-05, |
|
"loss": 3.9586, |
|
"step": 189440 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6890074641484366e-05, |
|
"loss": 3.9729, |
|
"step": 189952 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6881688693973846e-05, |
|
"loss": 3.9698, |
|
"step": 190464 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6873302746463326e-05, |
|
"loss": 3.967, |
|
"step": 190976 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6864916798952806e-05, |
|
"loss": 3.9546, |
|
"step": 191488 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6856547230246015e-05, |
|
"loss": 3.956, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6848161282735495e-05, |
|
"loss": 3.9544, |
|
"step": 192512 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.683977533522498e-05, |
|
"loss": 3.9506, |
|
"step": 193024 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.683138938771446e-05, |
|
"loss": 3.9522, |
|
"step": 193536 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.682300344020394e-05, |
|
"loss": 3.9552, |
|
"step": 194048 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.681461749269342e-05, |
|
"loss": 3.9622, |
|
"step": 194560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.68062315451829e-05, |
|
"loss": 3.9516, |
|
"step": 195072 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.679786197647611e-05, |
|
"loss": 3.9447, |
|
"step": 195584 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.678947602896559e-05, |
|
"loss": 3.9383, |
|
"step": 196096 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.678109008145507e-05, |
|
"loss": 3.9493, |
|
"step": 196608 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.677270413394455e-05, |
|
"loss": 3.9551, |
|
"step": 197120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.676431818643403e-05, |
|
"loss": 3.9443, |
|
"step": 197632 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.675593223892351e-05, |
|
"loss": 3.9409, |
|
"step": 198144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.674756267021672e-05, |
|
"loss": 3.954, |
|
"step": 198656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.67391767227062e-05, |
|
"loss": 3.9365, |
|
"step": 199168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.673079077519568e-05, |
|
"loss": 3.9439, |
|
"step": 199680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.672240482768516e-05, |
|
"loss": 3.9436, |
|
"step": 200192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.671401888017464e-05, |
|
"loss": 3.9484, |
|
"step": 200704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6705649311467855e-05, |
|
"loss": 3.9484, |
|
"step": 201216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6697263363957335e-05, |
|
"loss": 3.9427, |
|
"step": 201728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6688877416446815e-05, |
|
"loss": 3.9324, |
|
"step": 202240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.668049146893629e-05, |
|
"loss": 3.9465, |
|
"step": 202752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.667210552142577e-05, |
|
"loss": 3.9371, |
|
"step": 203264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.666371957391525e-05, |
|
"loss": 3.9525, |
|
"step": 203776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.665535000520846e-05, |
|
"loss": 3.9437, |
|
"step": 204288 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.664696405769794e-05, |
|
"loss": 3.9442, |
|
"step": 204800 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.663857811018742e-05, |
|
"loss": 3.9342, |
|
"step": 205312 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.66301921626769e-05, |
|
"loss": 3.9491, |
|
"step": 205824 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6621806215166383e-05, |
|
"loss": 3.9195, |
|
"step": 206336 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.661343664645959e-05, |
|
"loss": 3.9287, |
|
"step": 206848 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.660505069894907e-05, |
|
"loss": 3.9354, |
|
"step": 207360 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.659666475143855e-05, |
|
"loss": 3.9347, |
|
"step": 207872 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.658827880392803e-05, |
|
"loss": 3.924, |
|
"step": 208384 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.657989285641751e-05, |
|
"loss": 3.9299, |
|
"step": 208896 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.657150690890699e-05, |
|
"loss": 3.9243, |
|
"step": 209408 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.65631373402002e-05, |
|
"loss": 3.9353, |
|
"step": 209920 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.655475139268968e-05, |
|
"loss": 3.9274, |
|
"step": 210432 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.654636544517916e-05, |
|
"loss": 3.9263, |
|
"step": 210944 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.653797949766864e-05, |
|
"loss": 3.9371, |
|
"step": 211456 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.652960992896185e-05, |
|
"loss": 3.9382, |
|
"step": 211968 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.652122398145134e-05, |
|
"loss": 3.9361, |
|
"step": 212480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.651283803394082e-05, |
|
"loss": 3.9272, |
|
"step": 212992 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.65044520864303e-05, |
|
"loss": 3.9372, |
|
"step": 213504 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6496082517723506e-05, |
|
"loss": 3.9332, |
|
"step": 214016 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6487696570212986e-05, |
|
"loss": 3.9246, |
|
"step": 214528 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6479310622702466e-05, |
|
"loss": 3.9392, |
|
"step": 215040 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6470924675191946e-05, |
|
"loss": 3.9204, |
|
"step": 215552 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6462538727681426e-05, |
|
"loss": 3.9357, |
|
"step": 216064 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6454152780170906e-05, |
|
"loss": 3.9155, |
|
"step": 216576 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6445766832660386e-05, |
|
"loss": 3.9303, |
|
"step": 217088 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6437380885149866e-05, |
|
"loss": 3.9119, |
|
"step": 217600 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6429027695246804e-05, |
|
"loss": 3.9191, |
|
"step": 218112 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.642064174773629e-05, |
|
"loss": 3.9121, |
|
"step": 218624 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.641225580022577e-05, |
|
"loss": 3.9368, |
|
"step": 219136 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.640386985271525e-05, |
|
"loss": 3.9284, |
|
"step": 219648 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.639550028400846e-05, |
|
"loss": 3.9244, |
|
"step": 220160 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.638711433649794e-05, |
|
"loss": 3.9167, |
|
"step": 220672 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.637872838898742e-05, |
|
"loss": 3.9232, |
|
"step": 221184 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.63703424414769e-05, |
|
"loss": 3.9357, |
|
"step": 221696 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.636195649396638e-05, |
|
"loss": 3.9267, |
|
"step": 222208 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.635358692525959e-05, |
|
"loss": 3.9201, |
|
"step": 222720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.634520097774907e-05, |
|
"loss": 3.9302, |
|
"step": 223232 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.633681503023855e-05, |
|
"loss": 3.9209, |
|
"step": 223744 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.632842908272803e-05, |
|
"loss": 3.9294, |
|
"step": 224256 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.632004313521751e-05, |
|
"loss": 3.9152, |
|
"step": 224768 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6311673566510724e-05, |
|
"loss": 3.9203, |
|
"step": 225280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6303287619000204e-05, |
|
"loss": 3.9179, |
|
"step": 225792 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6294901671489684e-05, |
|
"loss": 3.9158, |
|
"step": 226304 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6286515723979164e-05, |
|
"loss": 3.9176, |
|
"step": 226816 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6278146155272373e-05, |
|
"loss": 3.9235, |
|
"step": 227328 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.626976020776185e-05, |
|
"loss": 3.9141, |
|
"step": 227840 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.626137426025133e-05, |
|
"loss": 3.917, |
|
"step": 228352 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.625298831274081e-05, |
|
"loss": 3.9161, |
|
"step": 228864 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 3.9497029781341553, |
|
"eval_runtime": 306.2528, |
|
"eval_samples_per_second": 1246.0, |
|
"eval_steps_per_second": 38.938, |
|
"step": 228960 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.624461874403402e-05, |
|
"loss": 3.9081, |
|
"step": 229376 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.62362327965235e-05, |
|
"loss": 3.9031, |
|
"step": 229888 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.622784684901298e-05, |
|
"loss": 3.9181, |
|
"step": 230400 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.621946090150246e-05, |
|
"loss": 3.9044, |
|
"step": 230912 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.621109133279568e-05, |
|
"loss": 3.9323, |
|
"step": 231424 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.620270538528516e-05, |
|
"loss": 3.8967, |
|
"step": 231936 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.619431943777464e-05, |
|
"loss": 3.9087, |
|
"step": 232448 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.618593349026412e-05, |
|
"loss": 3.893, |
|
"step": 232960 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.617754754275359e-05, |
|
"loss": 3.9096, |
|
"step": 233472 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.616917797404681e-05, |
|
"loss": 3.9102, |
|
"step": 233984 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.616079202653629e-05, |
|
"loss": 3.9046, |
|
"step": 234496 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.615240607902577e-05, |
|
"loss": 3.9096, |
|
"step": 235008 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.614402013151524e-05, |
|
"loss": 3.8943, |
|
"step": 235520 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6135650562808456e-05, |
|
"loss": 3.9036, |
|
"step": 236032 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6127264615297936e-05, |
|
"loss": 3.9004, |
|
"step": 236544 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6118878667787416e-05, |
|
"loss": 3.8971, |
|
"step": 237056 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6110492720276896e-05, |
|
"loss": 3.8978, |
|
"step": 237568 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.610212315157011e-05, |
|
"loss": 3.8988, |
|
"step": 238080 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.609373720405959e-05, |
|
"loss": 3.897, |
|
"step": 238592 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6085351256549065e-05, |
|
"loss": 3.9125, |
|
"step": 239104 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6076965309038545e-05, |
|
"loss": 3.8939, |
|
"step": 239616 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6068579361528025e-05, |
|
"loss": 3.9031, |
|
"step": 240128 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.606020979282124e-05, |
|
"loss": 3.8999, |
|
"step": 240640 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6051823845310714e-05, |
|
"loss": 3.9088, |
|
"step": 241152 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6043437897800194e-05, |
|
"loss": 3.8871, |
|
"step": 241664 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6035051950289674e-05, |
|
"loss": 3.9024, |
|
"step": 242176 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.602668238158289e-05, |
|
"loss": 3.893, |
|
"step": 242688 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.601829643407237e-05, |
|
"loss": 3.8947, |
|
"step": 243200 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.600991048656185e-05, |
|
"loss": 3.8813, |
|
"step": 243712 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.600152453905133e-05, |
|
"loss": 3.8925, |
|
"step": 244224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.599315497034454e-05, |
|
"loss": 3.8926, |
|
"step": 244736 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.598476902283402e-05, |
|
"loss": 3.8979, |
|
"step": 245248 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.59763830753235e-05, |
|
"loss": 3.8946, |
|
"step": 245760 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.596799712781298e-05, |
|
"loss": 3.8997, |
|
"step": 246272 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.595962755910619e-05, |
|
"loss": 3.8933, |
|
"step": 246784 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.595124161159567e-05, |
|
"loss": 3.8977, |
|
"step": 247296 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.594285566408515e-05, |
|
"loss": 3.8869, |
|
"step": 247808 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.593446971657463e-05, |
|
"loss": 3.8937, |
|
"step": 248320 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5926100147867836e-05, |
|
"loss": 3.8774, |
|
"step": 248832 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.591771420035732e-05, |
|
"loss": 3.8873, |
|
"step": 249344 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.59093282528468e-05, |
|
"loss": 3.8892, |
|
"step": 249856 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.590094230533628e-05, |
|
"loss": 3.8947, |
|
"step": 250368 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.589257273662949e-05, |
|
"loss": 3.8863, |
|
"step": 250880 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.588418678911897e-05, |
|
"loss": 3.8857, |
|
"step": 251392 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.587580084160845e-05, |
|
"loss": 3.893, |
|
"step": 251904 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.586741489409793e-05, |
|
"loss": 3.8883, |
|
"step": 252416 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.585904532539114e-05, |
|
"loss": 3.8887, |
|
"step": 252928 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.585065937788062e-05, |
|
"loss": 3.8779, |
|
"step": 253440 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.58422734303701e-05, |
|
"loss": 3.8634, |
|
"step": 253952 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.583388748285958e-05, |
|
"loss": 3.8852, |
|
"step": 254464 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.582551791415279e-05, |
|
"loss": 3.881, |
|
"step": 254976 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.581713196664228e-05, |
|
"loss": 3.8789, |
|
"step": 255488 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.580874601913176e-05, |
|
"loss": 3.8698, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.580036007162124e-05, |
|
"loss": 3.8694, |
|
"step": 256512 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5791990502914446e-05, |
|
"loss": 3.8651, |
|
"step": 257024 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5783604555403926e-05, |
|
"loss": 3.8832, |
|
"step": 257536 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5775218607893406e-05, |
|
"loss": 3.8702, |
|
"step": 258048 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5766832660382886e-05, |
|
"loss": 3.8761, |
|
"step": 258560 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5758463091676095e-05, |
|
"loss": 3.8851, |
|
"step": 259072 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5750077144165575e-05, |
|
"loss": 3.8676, |
|
"step": 259584 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5741691196655055e-05, |
|
"loss": 3.8698, |
|
"step": 260096 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5733305249144535e-05, |
|
"loss": 3.8738, |
|
"step": 260608 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5724935680437744e-05, |
|
"loss": 3.8556, |
|
"step": 261120 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.571654973292723e-05, |
|
"loss": 3.8703, |
|
"step": 261632 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.570816378541671e-05, |
|
"loss": 3.8774, |
|
"step": 262144 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.569977783790619e-05, |
|
"loss": 3.8774, |
|
"step": 262656 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.56914082691994e-05, |
|
"loss": 3.8595, |
|
"step": 263168 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.568302232168888e-05, |
|
"loss": 3.8662, |
|
"step": 263680 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.567463637417836e-05, |
|
"loss": 3.8606, |
|
"step": 264192 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.566625042666784e-05, |
|
"loss": 3.8809, |
|
"step": 264704 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.565786447915732e-05, |
|
"loss": 3.8741, |
|
"step": 265216 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.564949491045053e-05, |
|
"loss": 3.8752, |
|
"step": 265728 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.564110896294001e-05, |
|
"loss": 3.8855, |
|
"step": 266240 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.563272301542949e-05, |
|
"loss": 3.8847, |
|
"step": 266752 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.56243534467227e-05, |
|
"loss": 3.8848, |
|
"step": 267264 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5615967499212184e-05, |
|
"loss": 3.8637, |
|
"step": 267776 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5607581551701664e-05, |
|
"loss": 3.8733, |
|
"step": 268288 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5599195604191144e-05, |
|
"loss": 3.8738, |
|
"step": 268800 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.559082603548435e-05, |
|
"loss": 3.8652, |
|
"step": 269312 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.558244008797383e-05, |
|
"loss": 3.8721, |
|
"step": 269824 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.557405414046331e-05, |
|
"loss": 3.8695, |
|
"step": 270336 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.556566819295279e-05, |
|
"loss": 3.8764, |
|
"step": 270848 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5557298624246e-05, |
|
"loss": 3.8698, |
|
"step": 271360 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.554891267673548e-05, |
|
"loss": 3.8548, |
|
"step": 271872 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.554052672922496e-05, |
|
"loss": 3.8585, |
|
"step": 272384 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.553214078171444e-05, |
|
"loss": 3.868, |
|
"step": 272896 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.552377121300765e-05, |
|
"loss": 3.8705, |
|
"step": 273408 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.551538526549714e-05, |
|
"loss": 3.8631, |
|
"step": 273920 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.550699931798662e-05, |
|
"loss": 3.8572, |
|
"step": 274432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.54986133704761e-05, |
|
"loss": 3.8703, |
|
"step": 274944 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.549024380176931e-05, |
|
"loss": 3.8569, |
|
"step": 275456 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.548185785425879e-05, |
|
"loss": 3.8589, |
|
"step": 275968 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.547347190674827e-05, |
|
"loss": 3.8656, |
|
"step": 276480 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.546508595923775e-05, |
|
"loss": 3.8699, |
|
"step": 276992 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.545670001172723e-05, |
|
"loss": 3.8696, |
|
"step": 277504 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.544831406421671e-05, |
|
"loss": 3.8612, |
|
"step": 278016 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.543992811670619e-05, |
|
"loss": 3.8472, |
|
"step": 278528 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5431542169195667e-05, |
|
"loss": 3.8685, |
|
"step": 279040 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5423172600488876e-05, |
|
"loss": 3.854, |
|
"step": 279552 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5414786652978356e-05, |
|
"loss": 3.8743, |
|
"step": 280064 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5406400705467836e-05, |
|
"loss": 3.8657, |
|
"step": 280576 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.539801475795732e-05, |
|
"loss": 3.8635, |
|
"step": 281088 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.538964518925053e-05, |
|
"loss": 3.8542, |
|
"step": 281600 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.538125924174001e-05, |
|
"loss": 3.868, |
|
"step": 282112 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.537287329422949e-05, |
|
"loss": 3.8445, |
|
"step": 282624 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.536448734671897e-05, |
|
"loss": 3.8535, |
|
"step": 283136 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.535611777801218e-05, |
|
"loss": 3.8507, |
|
"step": 283648 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.534773183050166e-05, |
|
"loss": 3.8562, |
|
"step": 284160 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.533934588299114e-05, |
|
"loss": 3.8469, |
|
"step": 284672 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5330959935480613e-05, |
|
"loss": 3.8526, |
|
"step": 285184 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.532259036677383e-05, |
|
"loss": 3.851, |
|
"step": 285696 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.531420441926331e-05, |
|
"loss": 3.8496, |
|
"step": 286208 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.530581847175279e-05, |
|
"loss": 3.8542, |
|
"step": 286720 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.529743252424227e-05, |
|
"loss": 3.8457, |
|
"step": 287232 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5289062955535485e-05, |
|
"loss": 3.8644, |
|
"step": 287744 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5280677008024965e-05, |
|
"loss": 3.8601, |
|
"step": 288256 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5272291060514445e-05, |
|
"loss": 3.8595, |
|
"step": 288768 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.526390511300392e-05, |
|
"loss": 3.8515, |
|
"step": 289280 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5255535544297134e-05, |
|
"loss": 3.8595, |
|
"step": 289792 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5247149596786614e-05, |
|
"loss": 3.8601, |
|
"step": 290304 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.523876364927609e-05, |
|
"loss": 3.8474, |
|
"step": 290816 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.523037770176557e-05, |
|
"loss": 3.8607, |
|
"step": 291328 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.522200813305878e-05, |
|
"loss": 3.8467, |
|
"step": 291840 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.521362218554826e-05, |
|
"loss": 3.8579, |
|
"step": 292352 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.520523623803774e-05, |
|
"loss": 3.843, |
|
"step": 292864 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.519685029052722e-05, |
|
"loss": 3.8544, |
|
"step": 293376 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.518848072182044e-05, |
|
"loss": 3.8353, |
|
"step": 293888 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.518009477430992e-05, |
|
"loss": 3.8441, |
|
"step": 294400 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.517170882679939e-05, |
|
"loss": 3.8409, |
|
"step": 294912 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.516332287928887e-05, |
|
"loss": 3.8575, |
|
"step": 295424 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.515495331058209e-05, |
|
"loss": 3.8497, |
|
"step": 295936 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.514656736307156e-05, |
|
"loss": 3.8526, |
|
"step": 296448 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.513818141556104e-05, |
|
"loss": 3.8433, |
|
"step": 296960 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.512979546805052e-05, |
|
"loss": 3.8513, |
|
"step": 297472 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.512142589934374e-05, |
|
"loss": 3.86, |
|
"step": 297984 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.511303995183322e-05, |
|
"loss": 3.8519, |
|
"step": 298496 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5104654004322697e-05, |
|
"loss": 3.8475, |
|
"step": 299008 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5096268056812177e-05, |
|
"loss": 3.8571, |
|
"step": 299520 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.508789848810539e-05, |
|
"loss": 3.8467, |
|
"step": 300032 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5079512540594866e-05, |
|
"loss": 3.8581, |
|
"step": 300544 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5071126593084346e-05, |
|
"loss": 3.8388, |
|
"step": 301056 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5062740645573826e-05, |
|
"loss": 3.8495, |
|
"step": 301568 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5054371076867035e-05, |
|
"loss": 3.8432, |
|
"step": 302080 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5045985129356515e-05, |
|
"loss": 3.8431, |
|
"step": 302592 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5037599181845995e-05, |
|
"loss": 3.849, |
|
"step": 303104 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.502922961313921e-05, |
|
"loss": 3.8466, |
|
"step": 303616 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5020843665628684e-05, |
|
"loss": 3.8452, |
|
"step": 304128 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.501245771811817e-05, |
|
"loss": 3.8396, |
|
"step": 304640 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.500407177060765e-05, |
|
"loss": 3.8489, |
|
"step": 305152 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 3.908376932144165, |
|
"eval_runtime": 305.9494, |
|
"eval_samples_per_second": 1247.236, |
|
"eval_steps_per_second": 38.977, |
|
"step": 305280 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.499568582309713e-05, |
|
"loss": 3.8325, |
|
"step": 305664 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.498729987558661e-05, |
|
"loss": 3.831, |
|
"step": 306176 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.497891392807609e-05, |
|
"loss": 3.8471, |
|
"step": 306688 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.497052798056557e-05, |
|
"loss": 3.8349, |
|
"step": 307200 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.496214203305505e-05, |
|
"loss": 3.8602, |
|
"step": 307712 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.495375608554453e-05, |
|
"loss": 3.8343, |
|
"step": 308224 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.494537013803401e-05, |
|
"loss": 3.8348, |
|
"step": 308736 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.493698419052349e-05, |
|
"loss": 3.8213, |
|
"step": 309248 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.49286146218167e-05, |
|
"loss": 3.838, |
|
"step": 309760 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.492024505310991e-05, |
|
"loss": 3.8449, |
|
"step": 310272 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.491185910559939e-05, |
|
"loss": 3.8333, |
|
"step": 310784 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.490347315808887e-05, |
|
"loss": 3.8347, |
|
"step": 311296 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4895087210578355e-05, |
|
"loss": 3.827, |
|
"step": 311808 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4886701263067835e-05, |
|
"loss": 3.834, |
|
"step": 312320 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4878315315557315e-05, |
|
"loss": 3.8319, |
|
"step": 312832 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4869929368046795e-05, |
|
"loss": 3.8266, |
|
"step": 313344 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4861543420536275e-05, |
|
"loss": 3.8281, |
|
"step": 313856 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4853173851829484e-05, |
|
"loss": 3.8339, |
|
"step": 314368 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4844787904318964e-05, |
|
"loss": 3.8263, |
|
"step": 314880 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4836401956808444e-05, |
|
"loss": 3.8411, |
|
"step": 315392 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4828016009297924e-05, |
|
"loss": 3.8306, |
|
"step": 315904 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.481964644059113e-05, |
|
"loss": 3.8368, |
|
"step": 316416 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.481126049308061e-05, |
|
"loss": 3.8308, |
|
"step": 316928 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.480287454557009e-05, |
|
"loss": 3.8388, |
|
"step": 317440 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.479448859805957e-05, |
|
"loss": 3.8191, |
|
"step": 317952 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.478611902935279e-05, |
|
"loss": 3.8316, |
|
"step": 318464 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.477773308184227e-05, |
|
"loss": 3.8294, |
|
"step": 318976 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.476934713433175e-05, |
|
"loss": 3.8246, |
|
"step": 319488 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.476096118682122e-05, |
|
"loss": 3.8164, |
|
"step": 320000 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.475259161811444e-05, |
|
"loss": 3.8227, |
|
"step": 320512 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.474420567060392e-05, |
|
"loss": 3.8314, |
|
"step": 321024 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.47358197230934e-05, |
|
"loss": 3.8302, |
|
"step": 321536 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.472743377558287e-05, |
|
"loss": 3.8312, |
|
"step": 322048 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4719064206876086e-05, |
|
"loss": 3.8308, |
|
"step": 322560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4710678259365566e-05, |
|
"loss": 3.8256, |
|
"step": 323072 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4702292311855046e-05, |
|
"loss": 3.83, |
|
"step": 323584 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4693906364344526e-05, |
|
"loss": 3.8247, |
|
"step": 324096 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.468553679563774e-05, |
|
"loss": 3.8244, |
|
"step": 324608 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.467715084812722e-05, |
|
"loss": 3.816, |
|
"step": 325120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4668764900616695e-05, |
|
"loss": 3.8219, |
|
"step": 325632 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.466039533190991e-05, |
|
"loss": 3.8203, |
|
"step": 326144 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.465200938439939e-05, |
|
"loss": 3.8301, |
|
"step": 326656 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.464362343688887e-05, |
|
"loss": 3.8224, |
|
"step": 327168 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4635237489378344e-05, |
|
"loss": 3.8217, |
|
"step": 327680 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.462686792067156e-05, |
|
"loss": 3.8323, |
|
"step": 328192 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.461848197316104e-05, |
|
"loss": 3.822, |
|
"step": 328704 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.461009602565052e-05, |
|
"loss": 3.8228, |
|
"step": 329216 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.460171007814e-05, |
|
"loss": 3.8129, |
|
"step": 329728 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4593340509433216e-05, |
|
"loss": 3.799, |
|
"step": 330240 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4584954561922696e-05, |
|
"loss": 3.8232, |
|
"step": 330752 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.457656861441217e-05, |
|
"loss": 3.819, |
|
"step": 331264 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.456818266690165e-05, |
|
"loss": 3.8176, |
|
"step": 331776 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4559813098194865e-05, |
|
"loss": 3.802, |
|
"step": 332288 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4551427150684345e-05, |
|
"loss": 3.81, |
|
"step": 332800 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.454304120317382e-05, |
|
"loss": 3.798, |
|
"step": 333312 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.45346552556633e-05, |
|
"loss": 3.8207, |
|
"step": 333824 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4526285686956514e-05, |
|
"loss": 3.8083, |
|
"step": 334336 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4517899739445994e-05, |
|
"loss": 3.8105, |
|
"step": 334848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.450951379193547e-05, |
|
"loss": 3.8227, |
|
"step": 335360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4501127844424954e-05, |
|
"loss": 3.8066, |
|
"step": 335872 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.449275827571816e-05, |
|
"loss": 3.8084, |
|
"step": 336384 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.448437232820764e-05, |
|
"loss": 3.8134, |
|
"step": 336896 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.447598638069712e-05, |
|
"loss": 3.7935, |
|
"step": 337408 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.44676004331866e-05, |
|
"loss": 3.8087, |
|
"step": 337920 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.445923086447982e-05, |
|
"loss": 3.8111, |
|
"step": 338432 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.445084491696929e-05, |
|
"loss": 3.8171, |
|
"step": 338944 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.444245896945877e-05, |
|
"loss": 3.7971, |
|
"step": 339456 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.443407302194825e-05, |
|
"loss": 3.8044, |
|
"step": 339968 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.442570345324147e-05, |
|
"loss": 3.797, |
|
"step": 340480 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.441731750573094e-05, |
|
"loss": 3.8212, |
|
"step": 340992 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.440893155822042e-05, |
|
"loss": 3.8132, |
|
"step": 341504 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.44005456107099e-05, |
|
"loss": 3.8107, |
|
"step": 342016 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.439219242080685e-05, |
|
"loss": 3.8255, |
|
"step": 342528 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.438380647329633e-05, |
|
"loss": 3.8175, |
|
"step": 343040 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.437542052578581e-05, |
|
"loss": 3.8315, |
|
"step": 343552 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.436703457827529e-05, |
|
"loss": 3.805, |
|
"step": 344064 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4358648630764765e-05, |
|
"loss": 3.8113, |
|
"step": 344576 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4350262683254245e-05, |
|
"loss": 3.8137, |
|
"step": 345088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4341876735743725e-05, |
|
"loss": 3.8019, |
|
"step": 345600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4333490788233205e-05, |
|
"loss": 3.8149, |
|
"step": 346112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4325121219526414e-05, |
|
"loss": 3.8084, |
|
"step": 346624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4316735272015894e-05, |
|
"loss": 3.8152, |
|
"step": 347136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4308349324505374e-05, |
|
"loss": 3.8098, |
|
"step": 347648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4299963376994854e-05, |
|
"loss": 3.7982, |
|
"step": 348160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.429159380828807e-05, |
|
"loss": 3.8054, |
|
"step": 348672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.428320786077755e-05, |
|
"loss": 3.8015, |
|
"step": 349184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.427482191326703e-05, |
|
"loss": 3.8137, |
|
"step": 349696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.426643596575651e-05, |
|
"loss": 3.8053, |
|
"step": 350208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.425806639704972e-05, |
|
"loss": 3.7959, |
|
"step": 350720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.42496804495392e-05, |
|
"loss": 3.809, |
|
"step": 351232 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.424131088083241e-05, |
|
"loss": 3.8046, |
|
"step": 351744 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.423292493332189e-05, |
|
"loss": 3.7943, |
|
"step": 352256 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.422453898581137e-05, |
|
"loss": 3.8048, |
|
"step": 352768 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.421615303830085e-05, |
|
"loss": 3.8126, |
|
"step": 353280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.420776709079033e-05, |
|
"loss": 3.8098, |
|
"step": 353792 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.419938114327981e-05, |
|
"loss": 3.8048, |
|
"step": 354304 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4190995195769294e-05, |
|
"loss": 3.7881, |
|
"step": 354816 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4182609248258774e-05, |
|
"loss": 3.8075, |
|
"step": 355328 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4174239679551984e-05, |
|
"loss": 3.7981, |
|
"step": 355840 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4165853732041464e-05, |
|
"loss": 3.8118, |
|
"step": 356352 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4157467784530943e-05, |
|
"loss": 3.8108, |
|
"step": 356864 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4149081837020423e-05, |
|
"loss": 3.8049, |
|
"step": 357376 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.414071226831363e-05, |
|
"loss": 3.7968, |
|
"step": 357888 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.413234269960684e-05, |
|
"loss": 3.8122, |
|
"step": 358400 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.412395675209632e-05, |
|
"loss": 3.7829, |
|
"step": 358912 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.41155708045858e-05, |
|
"loss": 3.7972, |
|
"step": 359424 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.410718485707528e-05, |
|
"loss": 3.7906, |
|
"step": 359936 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.409879890956476e-05, |
|
"loss": 3.8042, |
|
"step": 360448 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.409041296205425e-05, |
|
"loss": 3.7857, |
|
"step": 360960 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.408202701454373e-05, |
|
"loss": 3.7936, |
|
"step": 361472 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.407364106703321e-05, |
|
"loss": 3.794, |
|
"step": 361984 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.406527149832642e-05, |
|
"loss": 3.794, |
|
"step": 362496 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.40568855508159e-05, |
|
"loss": 3.796, |
|
"step": 363008 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.404849960330538e-05, |
|
"loss": 3.7925, |
|
"step": 363520 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4040130034598586e-05, |
|
"loss": 3.8068, |
|
"step": 364032 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4031744087088066e-05, |
|
"loss": 3.8042, |
|
"step": 364544 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4023358139577546e-05, |
|
"loss": 3.7993, |
|
"step": 365056 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4014972192067026e-05, |
|
"loss": 3.7926, |
|
"step": 365568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4006602623360235e-05, |
|
"loss": 3.8049, |
|
"step": 366080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3998216675849715e-05, |
|
"loss": 3.799, |
|
"step": 366592 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.39898307283392e-05, |
|
"loss": 3.7905, |
|
"step": 367104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.398144478082868e-05, |
|
"loss": 3.8067, |
|
"step": 367616 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.397305883331816e-05, |
|
"loss": 3.7887, |
|
"step": 368128 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.396467288580764e-05, |
|
"loss": 3.8005, |
|
"step": 368640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.395628693829712e-05, |
|
"loss": 3.7876, |
|
"step": 369152 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.39479009907866e-05, |
|
"loss": 3.7991, |
|
"step": 369664 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.393953142207981e-05, |
|
"loss": 3.785, |
|
"step": 370176 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.393114547456929e-05, |
|
"loss": 3.7864, |
|
"step": 370688 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.39227759058625e-05, |
|
"loss": 3.789, |
|
"step": 371200 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.391438995835198e-05, |
|
"loss": 3.8013, |
|
"step": 371712 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.390600401084146e-05, |
|
"loss": 3.7942, |
|
"step": 372224 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.389761806333094e-05, |
|
"loss": 3.7972, |
|
"step": 372736 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.388923211582042e-05, |
|
"loss": 3.7855, |
|
"step": 373248 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.38808461683099e-05, |
|
"loss": 3.7995, |
|
"step": 373760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.387246022079938e-05, |
|
"loss": 3.8043, |
|
"step": 374272 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3864090652092595e-05, |
|
"loss": 3.7992, |
|
"step": 374784 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3855704704582075e-05, |
|
"loss": 3.7931, |
|
"step": 375296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.384731875707155e-05, |
|
"loss": 3.7996, |
|
"step": 375808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.383893280956103e-05, |
|
"loss": 3.7925, |
|
"step": 376320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3830563240854244e-05, |
|
"loss": 3.8022, |
|
"step": 376832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3822193672147453e-05, |
|
"loss": 3.788, |
|
"step": 377344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.381380772463693e-05, |
|
"loss": 3.7961, |
|
"step": 377856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.380542177712641e-05, |
|
"loss": 3.7893, |
|
"step": 378368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.379703582961589e-05, |
|
"loss": 3.7906, |
|
"step": 378880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.378866626090911e-05, |
|
"loss": 3.7927, |
|
"step": 379392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.378028031339859e-05, |
|
"loss": 3.791, |
|
"step": 379904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.377189436588807e-05, |
|
"loss": 3.7938, |
|
"step": 380416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.376350841837754e-05, |
|
"loss": 3.7819, |
|
"step": 380928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.375512247086702e-05, |
|
"loss": 3.7957, |
|
"step": 381440 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 3.883833646774292, |
|
"eval_runtime": 305.722, |
|
"eval_samples_per_second": 1248.163, |
|
"eval_steps_per_second": 39.006, |
|
"step": 381600 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.37467365233565e-05, |
|
"loss": 3.7866, |
|
"step": 381952 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.373835057584598e-05, |
|
"loss": 3.7759, |
|
"step": 382464 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.372998100713919e-05, |
|
"loss": 3.795, |
|
"step": 382976 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.372159505962867e-05, |
|
"loss": 3.7839, |
|
"step": 383488 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.371320911211815e-05, |
|
"loss": 3.8029, |
|
"step": 384000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.370482316460763e-05, |
|
"loss": 3.7839, |
|
"step": 384512 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.369645359590085e-05, |
|
"loss": 3.7754, |
|
"step": 385024 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.368806764839033e-05, |
|
"loss": 3.7778, |
|
"step": 385536 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.367968170087981e-05, |
|
"loss": 3.7812, |
|
"step": 386048 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.367129575336929e-05, |
|
"loss": 3.7919, |
|
"step": 386560 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.366290980585877e-05, |
|
"loss": 3.78, |
|
"step": 387072 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.365452385834825e-05, |
|
"loss": 3.7857, |
|
"step": 387584 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3646154289641456e-05, |
|
"loss": 3.775, |
|
"step": 388096 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3637768342130936e-05, |
|
"loss": 3.7777, |
|
"step": 388608 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3629382394620416e-05, |
|
"loss": 3.7831, |
|
"step": 389120 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3620996447109896e-05, |
|
"loss": 3.7729, |
|
"step": 389632 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3612610499599376e-05, |
|
"loss": 3.7731, |
|
"step": 390144 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3604224552088856e-05, |
|
"loss": 3.7808, |
|
"step": 390656 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3595838604578336e-05, |
|
"loss": 3.7773, |
|
"step": 391168 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3587452657067816e-05, |
|
"loss": 3.788, |
|
"step": 391680 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3579083088361025e-05, |
|
"loss": 3.78, |
|
"step": 392192 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.357069714085051e-05, |
|
"loss": 3.7849, |
|
"step": 392704 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.356231119333999e-05, |
|
"loss": 3.7815, |
|
"step": 393216 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.355392524582947e-05, |
|
"loss": 3.785, |
|
"step": 393728 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.354555567712268e-05, |
|
"loss": 3.7667, |
|
"step": 394240 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.353716972961216e-05, |
|
"loss": 3.7863, |
|
"step": 394752 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.352880016090537e-05, |
|
"loss": 3.7739, |
|
"step": 395264 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.352041421339485e-05, |
|
"loss": 3.7731, |
|
"step": 395776 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.351202826588433e-05, |
|
"loss": 3.769, |
|
"step": 396288 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.350364231837381e-05, |
|
"loss": 3.7707, |
|
"step": 396800 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.349525637086329e-05, |
|
"loss": 3.7815, |
|
"step": 397312 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.34868868021565e-05, |
|
"loss": 3.7763, |
|
"step": 397824 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.347850085464598e-05, |
|
"loss": 3.7849, |
|
"step": 398336 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3470114907135465e-05, |
|
"loss": 3.7828, |
|
"step": 398848 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3461728959624945e-05, |
|
"loss": 3.7731, |
|
"step": 399360 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3453359390918154e-05, |
|
"loss": 3.7796, |
|
"step": 399872 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3444973443407634e-05, |
|
"loss": 3.7753, |
|
"step": 400384 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3436587495897114e-05, |
|
"loss": 3.7732, |
|
"step": 400896 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3428201548386594e-05, |
|
"loss": 3.7673, |
|
"step": 401408 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3419815600876074e-05, |
|
"loss": 3.768, |
|
"step": 401920 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3411429653365554e-05, |
|
"loss": 3.7737, |
|
"step": 402432 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.340304370585503e-05, |
|
"loss": 3.7807, |
|
"step": 402944 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.339465775834451e-05, |
|
"loss": 3.7697, |
|
"step": 403456 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.338628818963772e-05, |
|
"loss": 3.7731, |
|
"step": 403968 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.33779022421272e-05, |
|
"loss": 3.7786, |
|
"step": 404480 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.336951629461668e-05, |
|
"loss": 3.7759, |
|
"step": 404992 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.336113034710616e-05, |
|
"loss": 3.7718, |
|
"step": 405504 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.335274439959564e-05, |
|
"loss": 3.7658, |
|
"step": 406016 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.334435845208512e-05, |
|
"loss": 3.7452, |
|
"step": 406528 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.33359725045746e-05, |
|
"loss": 3.7787, |
|
"step": 407040 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.332758655706408e-05, |
|
"loss": 3.7692, |
|
"step": 407552 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.331921698835729e-05, |
|
"loss": 3.7708, |
|
"step": 408064 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.33108474196505e-05, |
|
"loss": 3.7579, |
|
"step": 408576 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.330246147213998e-05, |
|
"loss": 3.7532, |
|
"step": 409088 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.329407552462946e-05, |
|
"loss": 3.7526, |
|
"step": 409600 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.328568957711894e-05, |
|
"loss": 3.7669, |
|
"step": 410112 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.327730362960842e-05, |
|
"loss": 3.7594, |
|
"step": 410624 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.32689176820979e-05, |
|
"loss": 3.7644, |
|
"step": 411136 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3260548113391116e-05, |
|
"loss": 3.7721, |
|
"step": 411648 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3252162165880596e-05, |
|
"loss": 3.7596, |
|
"step": 412160 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3243776218370076e-05, |
|
"loss": 3.7611, |
|
"step": 412672 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3235390270859556e-05, |
|
"loss": 3.7674, |
|
"step": 413184 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3227004323349036e-05, |
|
"loss": 3.7471, |
|
"step": 413696 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3218618375838516e-05, |
|
"loss": 3.7582, |
|
"step": 414208 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3210232428327996e-05, |
|
"loss": 3.7633, |
|
"step": 414720 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3201846480817476e-05, |
|
"loss": 3.7694, |
|
"step": 415232 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3193476912110685e-05, |
|
"loss": 3.7467, |
|
"step": 415744 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3185107343403894e-05, |
|
"loss": 3.7578, |
|
"step": 416256 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3176721395893374e-05, |
|
"loss": 3.7493, |
|
"step": 416768 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3168335448382854e-05, |
|
"loss": 3.7702, |
|
"step": 417280 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.315994950087234e-05, |
|
"loss": 3.7656, |
|
"step": 417792 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.315156355336182e-05, |
|
"loss": 3.7623, |
|
"step": 418304 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.314319398465503e-05, |
|
"loss": 3.7767, |
|
"step": 418816 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.313480803714451e-05, |
|
"loss": 3.7724, |
|
"step": 419328 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.312642208963399e-05, |
|
"loss": 3.7848, |
|
"step": 419840 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.311803614212347e-05, |
|
"loss": 3.7622, |
|
"step": 420352 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.310965019461295e-05, |
|
"loss": 3.7645, |
|
"step": 420864 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.310126424710243e-05, |
|
"loss": 3.7621, |
|
"step": 421376 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.309287829959191e-05, |
|
"loss": 3.7538, |
|
"step": 421888 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.308449235208139e-05, |
|
"loss": 3.7717, |
|
"step": 422400 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.30761227833746e-05, |
|
"loss": 3.7613, |
|
"step": 422912 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.306773683586408e-05, |
|
"loss": 3.7683, |
|
"step": 423424 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.305935088835356e-05, |
|
"loss": 3.7622, |
|
"step": 423936 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.305096494084304e-05, |
|
"loss": 3.7511, |
|
"step": 424448 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3042595372136254e-05, |
|
"loss": 3.759, |
|
"step": 424960 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3034209424625734e-05, |
|
"loss": 3.7522, |
|
"step": 425472 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3025823477115214e-05, |
|
"loss": 3.7689, |
|
"step": 425984 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.301743752960469e-05, |
|
"loss": 3.7613, |
|
"step": 426496 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3009067960897903e-05, |
|
"loss": 3.7453, |
|
"step": 427008 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.3000682013387383e-05, |
|
"loss": 3.7676, |
|
"step": 427520 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.299229606587686e-05, |
|
"loss": 3.7575, |
|
"step": 428032 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2983910118366337e-05, |
|
"loss": 3.7497, |
|
"step": 428544 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.297554054965955e-05, |
|
"loss": 3.7586, |
|
"step": 429056 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.296715460214903e-05, |
|
"loss": 3.7669, |
|
"step": 429568 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.295876865463851e-05, |
|
"loss": 3.7619, |
|
"step": 430080 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.295038270712799e-05, |
|
"loss": 3.7655, |
|
"step": 430592 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.294199675961747e-05, |
|
"loss": 3.7447, |
|
"step": 431104 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.293361081210695e-05, |
|
"loss": 3.7606, |
|
"step": 431616 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.292524124340016e-05, |
|
"loss": 3.7533, |
|
"step": 432128 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.291685529588964e-05, |
|
"loss": 3.7657, |
|
"step": 432640 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.290846934837912e-05, |
|
"loss": 3.7667, |
|
"step": 433152 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.29000834008686e-05, |
|
"loss": 3.7565, |
|
"step": 433664 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.289169745335808e-05, |
|
"loss": 3.7556, |
|
"step": 434176 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.288332788465129e-05, |
|
"loss": 3.7639, |
|
"step": 434688 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.287494193714077e-05, |
|
"loss": 3.7421, |
|
"step": 435200 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.286655598963025e-05, |
|
"loss": 3.7513, |
|
"step": 435712 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.285817004211973e-05, |
|
"loss": 3.7447, |
|
"step": 436224 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2849800473412946e-05, |
|
"loss": 3.7573, |
|
"step": 436736 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2841414525902426e-05, |
|
"loss": 3.7444, |
|
"step": 437248 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2833028578391906e-05, |
|
"loss": 3.7509, |
|
"step": 437760 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2824642630881386e-05, |
|
"loss": 3.7477, |
|
"step": 438272 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2816273062174595e-05, |
|
"loss": 3.7499, |
|
"step": 438784 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2807887114664075e-05, |
|
"loss": 3.753, |
|
"step": 439296 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2799501167153555e-05, |
|
"loss": 3.7463, |
|
"step": 439808 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2791115219643035e-05, |
|
"loss": 3.7605, |
|
"step": 440320 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2782729272132515e-05, |
|
"loss": 3.758, |
|
"step": 440832 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2774359703425724e-05, |
|
"loss": 3.7582, |
|
"step": 441344 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2765973755915204e-05, |
|
"loss": 3.7477, |
|
"step": 441856 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2757587808404684e-05, |
|
"loss": 3.7588, |
|
"step": 442368 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2749201860894164e-05, |
|
"loss": 3.7587, |
|
"step": 442880 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.274083229218738e-05, |
|
"loss": 3.7431, |
|
"step": 443392 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.273244634467686e-05, |
|
"loss": 3.7658, |
|
"step": 443904 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.272406039716634e-05, |
|
"loss": 3.7426, |
|
"step": 444416 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.271567444965582e-05, |
|
"loss": 3.7582, |
|
"step": 444928 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.27072885021453e-05, |
|
"loss": 3.7412, |
|
"step": 445440 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.269890255463478e-05, |
|
"loss": 3.7582, |
|
"step": 445952 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.269051660712426e-05, |
|
"loss": 3.7395, |
|
"step": 446464 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.268214703841747e-05, |
|
"loss": 3.744, |
|
"step": 446976 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.267376109090695e-05, |
|
"loss": 3.7414, |
|
"step": 447488 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.266537514339643e-05, |
|
"loss": 3.764, |
|
"step": 448000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.265698919588591e-05, |
|
"loss": 3.747, |
|
"step": 448512 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.264861962717912e-05, |
|
"loss": 3.7553, |
|
"step": 449024 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2640233679668604e-05, |
|
"loss": 3.7422, |
|
"step": 449536 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2631847732158084e-05, |
|
"loss": 3.7578, |
|
"step": 450048 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2623461784647564e-05, |
|
"loss": 3.7596, |
|
"step": 450560 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2615075837137044e-05, |
|
"loss": 3.7568, |
|
"step": 451072 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.260670626843025e-05, |
|
"loss": 3.7531, |
|
"step": 451584 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.259832032091973e-05, |
|
"loss": 3.7504, |
|
"step": 452096 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.258993437340921e-05, |
|
"loss": 3.7504, |
|
"step": 452608 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.258154842589869e-05, |
|
"loss": 3.761, |
|
"step": 453120 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.25731788571919e-05, |
|
"loss": 3.7452, |
|
"step": 453632 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.256479290968138e-05, |
|
"loss": 3.7524, |
|
"step": 454144 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.255640696217086e-05, |
|
"loss": 3.7465, |
|
"step": 454656 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.254802101466034e-05, |
|
"loss": 3.7491, |
|
"step": 455168 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.253965144595356e-05, |
|
"loss": 3.75, |
|
"step": 455680 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.253126549844304e-05, |
|
"loss": 3.744, |
|
"step": 456192 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.252287955093252e-05, |
|
"loss": 3.7569, |
|
"step": 456704 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2514493603422e-05, |
|
"loss": 3.7349, |
|
"step": 457216 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.250612403471521e-05, |
|
"loss": 3.7572, |
|
"step": 457728 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 3.867486000061035, |
|
"eval_runtime": 310.0624, |
|
"eval_samples_per_second": 1230.691, |
|
"eval_steps_per_second": 38.46, |
|
"step": 457920 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.249773808720469e-05, |
|
"loss": 3.743, |
|
"step": 458240 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.248935213969417e-05, |
|
"loss": 3.7332, |
|
"step": 458752 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.248096619218364e-05, |
|
"loss": 3.75, |
|
"step": 459264 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.247258024467312e-05, |
|
"loss": 3.7438, |
|
"step": 459776 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.24641942971626e-05, |
|
"loss": 3.7576, |
|
"step": 460288 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2455824728455816e-05, |
|
"loss": 3.7422, |
|
"step": 460800 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2447438780945296e-05, |
|
"loss": 3.7357, |
|
"step": 461312 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2439052833434776e-05, |
|
"loss": 3.7334, |
|
"step": 461824 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2430666885924255e-05, |
|
"loss": 3.7415, |
|
"step": 462336 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.242229731721747e-05, |
|
"loss": 3.7493, |
|
"step": 462848 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2413911369706945e-05, |
|
"loss": 3.7385, |
|
"step": 463360 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2405525422196424e-05, |
|
"loss": 3.7429, |
|
"step": 463872 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2397139474685904e-05, |
|
"loss": 3.7406, |
|
"step": 464384 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2388769905979114e-05, |
|
"loss": 3.7342, |
|
"step": 464896 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2380383958468593e-05, |
|
"loss": 3.7406, |
|
"step": 465408 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2371998010958073e-05, |
|
"loss": 3.7285, |
|
"step": 465920 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2363612063447553e-05, |
|
"loss": 3.7339, |
|
"step": 466432 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.235522611593703e-05, |
|
"loss": 3.7381, |
|
"step": 466944 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.234684016842651e-05, |
|
"loss": 3.7356, |
|
"step": 467456 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.233845422091599e-05, |
|
"loss": 3.745, |
|
"step": 467968 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.233006827340547e-05, |
|
"loss": 3.745, |
|
"step": 468480 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.232171508350242e-05, |
|
"loss": 3.7392, |
|
"step": 468992 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.23133291359919e-05, |
|
"loss": 3.7382, |
|
"step": 469504 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.230494318848138e-05, |
|
"loss": 3.7457, |
|
"step": 470016 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.229655724097086e-05, |
|
"loss": 3.7228, |
|
"step": 470528 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.228818767226407e-05, |
|
"loss": 3.749, |
|
"step": 471040 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.227980172475355e-05, |
|
"loss": 3.7337, |
|
"step": 471552 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.227141577724303e-05, |
|
"loss": 3.731, |
|
"step": 472064 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.226302982973251e-05, |
|
"loss": 3.7263, |
|
"step": 472576 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2254660261025716e-05, |
|
"loss": 3.7333, |
|
"step": 473088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2246274313515196e-05, |
|
"loss": 3.7374, |
|
"step": 473600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.223788836600468e-05, |
|
"loss": 3.7399, |
|
"step": 474112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.222950241849416e-05, |
|
"loss": 3.7404, |
|
"step": 474624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.222111647098364e-05, |
|
"loss": 3.7435, |
|
"step": 475136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.221273052347312e-05, |
|
"loss": 3.7318, |
|
"step": 475648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.220436095476633e-05, |
|
"loss": 3.7411, |
|
"step": 476160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.219597500725581e-05, |
|
"loss": 3.7387, |
|
"step": 476672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.218758905974529e-05, |
|
"loss": 3.7293, |
|
"step": 477184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.217920311223477e-05, |
|
"loss": 3.7272, |
|
"step": 477696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.217081716472425e-05, |
|
"loss": 3.7315, |
|
"step": 478208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.216244759601746e-05, |
|
"loss": 3.7295, |
|
"step": 478720 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.215406164850694e-05, |
|
"loss": 3.7402, |
|
"step": 479232 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.214567570099642e-05, |
|
"loss": 3.7305, |
|
"step": 479744 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.21372897534859e-05, |
|
"loss": 3.7303, |
|
"step": 480256 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2128920184779116e-05, |
|
"loss": 3.743, |
|
"step": 480768 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2120534237268596e-05, |
|
"loss": 3.7312, |
|
"step": 481280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2112148289758076e-05, |
|
"loss": 3.7358, |
|
"step": 481792 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2103762342247556e-05, |
|
"loss": 3.7293, |
|
"step": 482304 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2095392773540765e-05, |
|
"loss": 3.6986, |
|
"step": 482816 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2087006826030245e-05, |
|
"loss": 3.7421, |
|
"step": 483328 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2078620878519725e-05, |
|
"loss": 3.7288, |
|
"step": 483840 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2070234931009205e-05, |
|
"loss": 3.7312, |
|
"step": 484352 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2061865362302414e-05, |
|
"loss": 3.7197, |
|
"step": 484864 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2053479414791894e-05, |
|
"loss": 3.7115, |
|
"step": 485376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2045093467281374e-05, |
|
"loss": 3.7166, |
|
"step": 485888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2036707519770854e-05, |
|
"loss": 3.7272, |
|
"step": 486400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2028321572260334e-05, |
|
"loss": 3.7226, |
|
"step": 486912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.201993562474982e-05, |
|
"loss": 3.7229, |
|
"step": 487424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.201156605604303e-05, |
|
"loss": 3.7357, |
|
"step": 487936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.200318010853251e-05, |
|
"loss": 3.7199, |
|
"step": 488448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.199479416102199e-05, |
|
"loss": 3.7187, |
|
"step": 488960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.198640821351147e-05, |
|
"loss": 3.7317, |
|
"step": 489472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.197802226600095e-05, |
|
"loss": 3.7058, |
|
"step": 489984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.196965269729416e-05, |
|
"loss": 3.7204, |
|
"step": 490496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.196126674978364e-05, |
|
"loss": 3.7245, |
|
"step": 491008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.195288080227312e-05, |
|
"loss": 3.7323, |
|
"step": 491520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.19444948547626e-05, |
|
"loss": 3.7065, |
|
"step": 492032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.193612528605581e-05, |
|
"loss": 3.7217, |
|
"step": 492544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.192773933854529e-05, |
|
"loss": 3.7095, |
|
"step": 493056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1919353391034775e-05, |
|
"loss": 3.7317, |
|
"step": 493568 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.191096744352425e-05, |
|
"loss": 3.7296, |
|
"step": 494080 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1902597874817464e-05, |
|
"loss": 3.7265, |
|
"step": 494592 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1894211927306944e-05, |
|
"loss": 3.7322, |
|
"step": 495104 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1885825979796424e-05, |
|
"loss": 3.7364, |
|
"step": 495616 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.18774400322859e-05, |
|
"loss": 3.747, |
|
"step": 496128 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.186907046357911e-05, |
|
"loss": 3.7233, |
|
"step": 496640 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.186068451606859e-05, |
|
"loss": 3.7267, |
|
"step": 497152 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.185229856855807e-05, |
|
"loss": 3.7244, |
|
"step": 497664 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1843912621047546e-05, |
|
"loss": 3.7143, |
|
"step": 498176 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.183554305234076e-05, |
|
"loss": 3.7289, |
|
"step": 498688 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.182715710483024e-05, |
|
"loss": 3.7255, |
|
"step": 499200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.181877115731972e-05, |
|
"loss": 3.7284, |
|
"step": 499712 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.18103852098092e-05, |
|
"loss": 3.732, |
|
"step": 500224 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.180201564110242e-05, |
|
"loss": 3.713, |
|
"step": 500736 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.17936296935919e-05, |
|
"loss": 3.715, |
|
"step": 501248 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.178524374608137e-05, |
|
"loss": 3.7183, |
|
"step": 501760 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.177685779857085e-05, |
|
"loss": 3.7337, |
|
"step": 502272 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.176847185106033e-05, |
|
"loss": 3.7215, |
|
"step": 502784 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1760102282353546e-05, |
|
"loss": 3.7125, |
|
"step": 503296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.175171633484302e-05, |
|
"loss": 3.7177, |
|
"step": 503808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.17433303873325e-05, |
|
"loss": 3.727, |
|
"step": 504320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.173494443982198e-05, |
|
"loss": 3.7131, |
|
"step": 504832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1726574871115195e-05, |
|
"loss": 3.72, |
|
"step": 505344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1718188923604675e-05, |
|
"loss": 3.733, |
|
"step": 505856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1709802976094155e-05, |
|
"loss": 3.7238, |
|
"step": 506368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1701417028583635e-05, |
|
"loss": 3.726, |
|
"step": 506880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1693031081073115e-05, |
|
"loss": 3.7084, |
|
"step": 507392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1684661512366324e-05, |
|
"loss": 3.7224, |
|
"step": 507904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1676275564855804e-05, |
|
"loss": 3.7158, |
|
"step": 508416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1667889617345284e-05, |
|
"loss": 3.7253, |
|
"step": 508928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1659503669834764e-05, |
|
"loss": 3.7288, |
|
"step": 509440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1651117722324244e-05, |
|
"loss": 3.726, |
|
"step": 509952 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.164274815361745e-05, |
|
"loss": 3.7179, |
|
"step": 510464 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.163436220610693e-05, |
|
"loss": 3.7271, |
|
"step": 510976 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.162597625859642e-05, |
|
"loss": 3.7067, |
|
"step": 511488 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.16175903110859e-05, |
|
"loss": 3.7169, |
|
"step": 512000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.160922074237911e-05, |
|
"loss": 3.7043, |
|
"step": 512512 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.160083479486859e-05, |
|
"loss": 3.7244, |
|
"step": 513024 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.159244884735807e-05, |
|
"loss": 3.7081, |
|
"step": 513536 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.158406289984755e-05, |
|
"loss": 3.7121, |
|
"step": 514048 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.157567695233703e-05, |
|
"loss": 3.7095, |
|
"step": 514560 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.156730738363024e-05, |
|
"loss": 3.7126, |
|
"step": 515072 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.155892143611972e-05, |
|
"loss": 3.7173, |
|
"step": 515584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.15505354886092e-05, |
|
"loss": 3.7102, |
|
"step": 516096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.154214954109868e-05, |
|
"loss": 3.7214, |
|
"step": 516608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.153377997239189e-05, |
|
"loss": 3.7275, |
|
"step": 517120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.152539402488137e-05, |
|
"loss": 3.7157, |
|
"step": 517632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.151700807737085e-05, |
|
"loss": 3.7192, |
|
"step": 518144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.150862212986033e-05, |
|
"loss": 3.7186, |
|
"step": 518656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.150025256115354e-05, |
|
"loss": 3.722, |
|
"step": 519168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.149186661364302e-05, |
|
"loss": 3.7137, |
|
"step": 519680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.14834806661325e-05, |
|
"loss": 3.7225, |
|
"step": 520192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.147509471862198e-05, |
|
"loss": 3.7064, |
|
"step": 520704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.146670877111146e-05, |
|
"loss": 3.7219, |
|
"step": 521216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.145833920240467e-05, |
|
"loss": 3.709, |
|
"step": 521728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.144995325489415e-05, |
|
"loss": 3.7159, |
|
"step": 522240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.144156730738363e-05, |
|
"loss": 3.7099, |
|
"step": 522752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.143318135987311e-05, |
|
"loss": 3.7091, |
|
"step": 523264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.142481179116632e-05, |
|
"loss": 3.7053, |
|
"step": 523776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.141642584365581e-05, |
|
"loss": 3.7225, |
|
"step": 524288 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.140803989614529e-05, |
|
"loss": 3.7137, |
|
"step": 524800 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.139965394863477e-05, |
|
"loss": 3.7202, |
|
"step": 525312 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1391284379927976e-05, |
|
"loss": 3.7028, |
|
"step": 525824 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1382898432417456e-05, |
|
"loss": 3.7226, |
|
"step": 526336 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1374512484906936e-05, |
|
"loss": 3.7211, |
|
"step": 526848 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1366126537396416e-05, |
|
"loss": 3.7207, |
|
"step": 527360 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1357756968689625e-05, |
|
"loss": 3.7165, |
|
"step": 527872 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1349371021179105e-05, |
|
"loss": 3.714, |
|
"step": 528384 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1340985073668585e-05, |
|
"loss": 3.716, |
|
"step": 528896 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1332599126158065e-05, |
|
"loss": 3.729, |
|
"step": 529408 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1324213178647545e-05, |
|
"loss": 3.7068, |
|
"step": 529920 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.131584360994076e-05, |
|
"loss": 3.7151, |
|
"step": 530432 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.130745766243024e-05, |
|
"loss": 3.7157, |
|
"step": 530944 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.129907171491972e-05, |
|
"loss": 3.7149, |
|
"step": 531456 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.12906857674092e-05, |
|
"loss": 3.7057, |
|
"step": 531968 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.128231619870241e-05, |
|
"loss": 3.7083, |
|
"step": 532480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.127393025119189e-05, |
|
"loss": 3.7238, |
|
"step": 532992 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.126554430368137e-05, |
|
"loss": 3.7017, |
|
"step": 533504 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.125715835617085e-05, |
|
"loss": 3.7211, |
|
"step": 534016 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 3.857250690460205, |
|
"eval_runtime": 305.8615, |
|
"eval_samples_per_second": 1247.594, |
|
"eval_steps_per_second": 38.988, |
|
"step": 534240 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.124877240866032e-05, |
|
"loss": 3.7086, |
|
"step": 534528 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.124040283995354e-05, |
|
"loss": 3.6963, |
|
"step": 535040 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.123201689244302e-05, |
|
"loss": 3.7163, |
|
"step": 535552 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.12236309449325e-05, |
|
"loss": 3.7035, |
|
"step": 536064 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.121524499742198e-05, |
|
"loss": 3.7194, |
|
"step": 536576 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1206875428715194e-05, |
|
"loss": 3.7094, |
|
"step": 537088 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1198489481204674e-05, |
|
"loss": 3.7045, |
|
"step": 537600 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1190103533694154e-05, |
|
"loss": 3.7007, |
|
"step": 538112 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.118171758618363e-05, |
|
"loss": 3.7079, |
|
"step": 538624 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.117334801747684e-05, |
|
"loss": 3.7127, |
|
"step": 539136 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.116496206996632e-05, |
|
"loss": 3.7027, |
|
"step": 539648 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1156576122455796e-05, |
|
"loss": 3.7056, |
|
"step": 540160 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1148190174945276e-05, |
|
"loss": 3.7027, |
|
"step": 540672 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.113982060623849e-05, |
|
"loss": 3.7016, |
|
"step": 541184 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.113143465872797e-05, |
|
"loss": 3.7065, |
|
"step": 541696 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.112304871121745e-05, |
|
"loss": 3.6972, |
|
"step": 542208 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.111466276370693e-05, |
|
"loss": 3.697, |
|
"step": 542720 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.110627681619641e-05, |
|
"loss": 3.7039, |
|
"step": 543232 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.109790724748963e-05, |
|
"loss": 3.7026, |
|
"step": 543744 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.10895212999791e-05, |
|
"loss": 3.7082, |
|
"step": 544256 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.108113535246858e-05, |
|
"loss": 3.7127, |
|
"step": 544768 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.107274940495806e-05, |
|
"loss": 3.7077, |
|
"step": 545280 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.106437983625127e-05, |
|
"loss": 3.7051, |
|
"step": 545792 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.105599388874075e-05, |
|
"loss": 3.7065, |
|
"step": 546304 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.104760794123023e-05, |
|
"loss": 3.6936, |
|
"step": 546816 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1039238372523446e-05, |
|
"loss": 3.7139, |
|
"step": 547328 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.103085242501292e-05, |
|
"loss": 3.6976, |
|
"step": 547840 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1022466477502406e-05, |
|
"loss": 3.702, |
|
"step": 548352 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1014080529991886e-05, |
|
"loss": 3.6956, |
|
"step": 548864 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1005694582481366e-05, |
|
"loss": 3.698, |
|
"step": 549376 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0997325013774575e-05, |
|
"loss": 3.7018, |
|
"step": 549888 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0988939066264055e-05, |
|
"loss": 3.7076, |
|
"step": 550400 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0980553118753535e-05, |
|
"loss": 3.7049, |
|
"step": 550912 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0972167171243015e-05, |
|
"loss": 3.7073, |
|
"step": 551424 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0963781223732495e-05, |
|
"loss": 3.6967, |
|
"step": 551936 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0955411655025704e-05, |
|
"loss": 3.71, |
|
"step": 552448 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0947025707515184e-05, |
|
"loss": 3.7078, |
|
"step": 552960 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0938639760004664e-05, |
|
"loss": 3.6933, |
|
"step": 553472 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0930253812494144e-05, |
|
"loss": 3.6976, |
|
"step": 553984 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.092188424378736e-05, |
|
"loss": 3.697, |
|
"step": 554496 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.091349829627684e-05, |
|
"loss": 3.698, |
|
"step": 555008 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.090511234876632e-05, |
|
"loss": 3.7052, |
|
"step": 555520 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.08967264012558e-05, |
|
"loss": 3.6982, |
|
"step": 556032 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.088835683254901e-05, |
|
"loss": 3.6975, |
|
"step": 556544 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.087997088503849e-05, |
|
"loss": 3.708, |
|
"step": 557056 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.087158493752797e-05, |
|
"loss": 3.6991, |
|
"step": 557568 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.086319899001745e-05, |
|
"loss": 3.7004, |
|
"step": 558080 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.085481304250693e-05, |
|
"loss": 3.6986, |
|
"step": 558592 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.084644347380014e-05, |
|
"loss": 3.6655, |
|
"step": 559104 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.083805752628962e-05, |
|
"loss": 3.7101, |
|
"step": 559616 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.08296715787791e-05, |
|
"loss": 3.6945, |
|
"step": 560128 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.082128563126858e-05, |
|
"loss": 3.7026, |
|
"step": 560640 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.081291606256179e-05, |
|
"loss": 3.6829, |
|
"step": 561152 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.080453011505127e-05, |
|
"loss": 3.6817, |
|
"step": 561664 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.079614416754075e-05, |
|
"loss": 3.6796, |
|
"step": 562176 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.078775822003023e-05, |
|
"loss": 3.6926, |
|
"step": 562688 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.077938865132344e-05, |
|
"loss": 3.6938, |
|
"step": 563200 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.077100270381292e-05, |
|
"loss": 3.6865, |
|
"step": 563712 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.07626167563024e-05, |
|
"loss": 3.7073, |
|
"step": 564224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.075423080879188e-05, |
|
"loss": 3.6876, |
|
"step": 564736 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.074586124008509e-05, |
|
"loss": 3.6878, |
|
"step": 565248 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.073747529257457e-05, |
|
"loss": 3.6991, |
|
"step": 565760 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.072908934506405e-05, |
|
"loss": 3.6735, |
|
"step": 566272 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.072070339755353e-05, |
|
"loss": 3.6853, |
|
"step": 566784 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.071233382884675e-05, |
|
"loss": 3.6922, |
|
"step": 567296 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.070394788133623e-05, |
|
"loss": 3.7004, |
|
"step": 567808 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.069556193382571e-05, |
|
"loss": 3.6746, |
|
"step": 568320 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.068717598631519e-05, |
|
"loss": 3.689, |
|
"step": 568832 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0678806417608396e-05, |
|
"loss": 3.6764, |
|
"step": 569344 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0670420470097876e-05, |
|
"loss": 3.7007, |
|
"step": 569856 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0662034522587356e-05, |
|
"loss": 3.6932, |
|
"step": 570368 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0653648575076836e-05, |
|
"loss": 3.695, |
|
"step": 570880 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0645279006370045e-05, |
|
"loss": 3.6984, |
|
"step": 571392 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0636893058859525e-05, |
|
"loss": 3.7035, |
|
"step": 571904 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0628507111349005e-05, |
|
"loss": 3.714, |
|
"step": 572416 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0620121163838485e-05, |
|
"loss": 3.6928, |
|
"step": 572928 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0611735216327965e-05, |
|
"loss": 3.6946, |
|
"step": 573440 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.060336564762118e-05, |
|
"loss": 3.6968, |
|
"step": 573952 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.059497970011066e-05, |
|
"loss": 3.6798, |
|
"step": 574464 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.058659375260014e-05, |
|
"loss": 3.6944, |
|
"step": 574976 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.057820780508962e-05, |
|
"loss": 3.6971, |
|
"step": 575488 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.056983823638283e-05, |
|
"loss": 3.6974, |
|
"step": 576000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.056145228887231e-05, |
|
"loss": 3.6976, |
|
"step": 576512 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.055306634136179e-05, |
|
"loss": 3.6843, |
|
"step": 577024 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.054468039385127e-05, |
|
"loss": 3.6796, |
|
"step": 577536 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.053631082514448e-05, |
|
"loss": 3.6859, |
|
"step": 578048 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.052792487763396e-05, |
|
"loss": 3.703, |
|
"step": 578560 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.051953893012344e-05, |
|
"loss": 3.6895, |
|
"step": 579072 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.051115298261292e-05, |
|
"loss": 3.6808, |
|
"step": 579584 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0502783413906134e-05, |
|
"loss": 3.687, |
|
"step": 580096 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0494397466395614e-05, |
|
"loss": 3.6953, |
|
"step": 580608 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0486011518885094e-05, |
|
"loss": 3.6847, |
|
"step": 581120 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0477625571374574e-05, |
|
"loss": 3.6876, |
|
"step": 581632 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.046925600266778e-05, |
|
"loss": 3.7, |
|
"step": 582144 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.046087005515726e-05, |
|
"loss": 3.6918, |
|
"step": 582656 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.045248410764674e-05, |
|
"loss": 3.6925, |
|
"step": 583168 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.044409816013622e-05, |
|
"loss": 3.6822, |
|
"step": 583680 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.043572859142943e-05, |
|
"loss": 3.6872, |
|
"step": 584192 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.042734264391891e-05, |
|
"loss": 3.6837, |
|
"step": 584704 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.041895669640839e-05, |
|
"loss": 3.6971, |
|
"step": 585216 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.041057074889787e-05, |
|
"loss": 3.6927, |
|
"step": 585728 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.040218480138735e-05, |
|
"loss": 3.6963, |
|
"step": 586240 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.039381523268057e-05, |
|
"loss": 3.689, |
|
"step": 586752 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.038542928517005e-05, |
|
"loss": 3.6924, |
|
"step": 587264 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.037704333765953e-05, |
|
"loss": 3.6798, |
|
"step": 587776 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0368657390149e-05, |
|
"loss": 3.681, |
|
"step": 588288 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.036028782144222e-05, |
|
"loss": 3.677, |
|
"step": 588800 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.03519018739317e-05, |
|
"loss": 3.6934, |
|
"step": 589312 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0343515926421177e-05, |
|
"loss": 3.6787, |
|
"step": 589824 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.033512997891065e-05, |
|
"loss": 3.6777, |
|
"step": 590336 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0326760410203866e-05, |
|
"loss": 3.6762, |
|
"step": 590848 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0318374462693346e-05, |
|
"loss": 3.6858, |
|
"step": 591360 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0309988515182826e-05, |
|
"loss": 3.6858, |
|
"step": 591872 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0301602567672305e-05, |
|
"loss": 3.6761, |
|
"step": 592384 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.029323299896552e-05, |
|
"loss": 3.6954, |
|
"step": 592896 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0284847051455e-05, |
|
"loss": 3.6914, |
|
"step": 593408 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0276461103944475e-05, |
|
"loss": 3.6846, |
|
"step": 593920 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0268075156433954e-05, |
|
"loss": 3.6901, |
|
"step": 594432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.025970558772717e-05, |
|
"loss": 3.6884, |
|
"step": 594944 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.025131964021665e-05, |
|
"loss": 3.6862, |
|
"step": 595456 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0242933692706123e-05, |
|
"loss": 3.6872, |
|
"step": 595968 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0234547745195603e-05, |
|
"loss": 3.689, |
|
"step": 596480 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.022617817648882e-05, |
|
"loss": 3.678, |
|
"step": 596992 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.02177922289783e-05, |
|
"loss": 3.6901, |
|
"step": 597504 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.020940628146778e-05, |
|
"loss": 3.682, |
|
"step": 598016 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.020102033395726e-05, |
|
"loss": 3.6855, |
|
"step": 598528 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.019263438644674e-05, |
|
"loss": 3.6767, |
|
"step": 599040 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.018426481773995e-05, |
|
"loss": 3.6761, |
|
"step": 599552 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.017587887022943e-05, |
|
"loss": 3.6775, |
|
"step": 600064 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.016749292271891e-05, |
|
"loss": 3.6896, |
|
"step": 600576 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.015910697520839e-05, |
|
"loss": 3.6828, |
|
"step": 601088 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.01507374065016e-05, |
|
"loss": 3.6916, |
|
"step": 601600 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.014235145899108e-05, |
|
"loss": 3.6705, |
|
"step": 602112 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.013396551148056e-05, |
|
"loss": 3.697, |
|
"step": 602624 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.012557956397004e-05, |
|
"loss": 3.6863, |
|
"step": 603136 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.011720999526325e-05, |
|
"loss": 3.6914, |
|
"step": 603648 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.010882404775273e-05, |
|
"loss": 3.6876, |
|
"step": 604160 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.010043810024221e-05, |
|
"loss": 3.682, |
|
"step": 604672 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.009205215273169e-05, |
|
"loss": 3.6854, |
|
"step": 605184 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.00836825840249e-05, |
|
"loss": 3.6956, |
|
"step": 605696 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.007529663651438e-05, |
|
"loss": 3.6774, |
|
"step": 606208 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.006691068900386e-05, |
|
"loss": 3.6872, |
|
"step": 606720 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.005852474149334e-05, |
|
"loss": 3.6854, |
|
"step": 607232 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.005015517278655e-05, |
|
"loss": 3.6802, |
|
"step": 607744 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.004176922527603e-05, |
|
"loss": 3.6774, |
|
"step": 608256 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.003338327776551e-05, |
|
"loss": 3.6808, |
|
"step": 608768 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.002499733025499e-05, |
|
"loss": 3.6903, |
|
"step": 609280 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.001661138274447e-05, |
|
"loss": 3.6757, |
|
"step": 609792 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0008241814037687e-05, |
|
"loss": 3.6912, |
|
"step": 610304 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 3.8499464988708496, |
|
"eval_runtime": 303.8172, |
|
"eval_samples_per_second": 1255.989, |
|
"eval_steps_per_second": 39.251, |
|
"step": 610560 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.9999855866527166e-05, |
|
"loss": 3.6754, |
|
"step": 610816 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.9991469919016646e-05, |
|
"loss": 3.671, |
|
"step": 611328 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.9983083971506126e-05, |
|
"loss": 3.6799, |
|
"step": 611840 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.9974698023995606e-05, |
|
"loss": 3.679, |
|
"step": 612352 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.9966328455288815e-05, |
|
"loss": 3.6891, |
|
"step": 612864 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.9957942507778295e-05, |
|
"loss": 3.6785, |
|
"step": 613376 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.9949556560267775e-05, |
|
"loss": 3.6763, |
|
"step": 613888 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.9941170612757255e-05, |
|
"loss": 3.672, |
|
"step": 614400 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.9932801044050464e-05, |
|
"loss": 3.6732, |
|
"step": 614912 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.9924415096539944e-05, |
|
"loss": 3.6851, |
|
"step": 615424 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.9916029149029424e-05, |
|
"loss": 3.6727, |
|
"step": 615936 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.9907643201518904e-05, |
|
"loss": 3.6731, |
|
"step": 616448 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.9899257254008384e-05, |
|
"loss": 3.6827, |
|
"step": 616960 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.98908876853016e-05, |
|
"loss": 3.6646, |
|
"step": 617472 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.988250173779108e-05, |
|
"loss": 3.6749, |
|
"step": 617984 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.987411579028056e-05, |
|
"loss": 3.6702, |
|
"step": 618496 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.986572984277004e-05, |
|
"loss": 3.6673, |
|
"step": 619008 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.985736027406325e-05, |
|
"loss": 3.6774, |
|
"step": 619520 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.984897432655273e-05, |
|
"loss": 3.6739, |
|
"step": 620032 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.984058837904221e-05, |
|
"loss": 3.6761, |
|
"step": 620544 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.983220243153169e-05, |
|
"loss": 3.6848, |
|
"step": 621056 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.98238328628249e-05, |
|
"loss": 3.679, |
|
"step": 621568 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.981544691531438e-05, |
|
"loss": 3.6787, |
|
"step": 622080 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.980706096780386e-05, |
|
"loss": 3.6733, |
|
"step": 622592 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.979867502029334e-05, |
|
"loss": 3.6699, |
|
"step": 623104 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.9790305451586554e-05, |
|
"loss": 3.6813, |
|
"step": 623616 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.9781919504076034e-05, |
|
"loss": 3.67, |
|
"step": 624128 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.9773533556565514e-05, |
|
"loss": 3.6754, |
|
"step": 624640 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.9765147609054994e-05, |
|
"loss": 3.661, |
|
"step": 625152 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.97567780403482e-05, |
|
"loss": 3.6712, |
|
"step": 625664 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.974839209283768e-05, |
|
"loss": 3.6692, |
|
"step": 626176 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.974000614532716e-05, |
|
"loss": 3.677, |
|
"step": 626688 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.973162019781664e-05, |
|
"loss": 3.6804, |
|
"step": 627200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.972325062910985e-05, |
|
"loss": 3.6803, |
|
"step": 627712 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.971486468159933e-05, |
|
"loss": 3.6696, |
|
"step": 628224 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.970647873408881e-05, |
|
"loss": 3.6748, |
|
"step": 628736 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.969809278657829e-05, |
|
"loss": 3.6805, |
|
"step": 629248 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.968972321787151e-05, |
|
"loss": 3.6687, |
|
"step": 629760 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.968133727036099e-05, |
|
"loss": 3.6678, |
|
"step": 630272 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.967295132285047e-05, |
|
"loss": 3.6666, |
|
"step": 630784 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.966456537533995e-05, |
|
"loss": 3.6712, |
|
"step": 631296 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.965617942782943e-05, |
|
"loss": 3.6701, |
|
"step": 631808 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9647809859122636e-05, |
|
"loss": 3.676, |
|
"step": 632320 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9639423911612116e-05, |
|
"loss": 3.6704, |
|
"step": 632832 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9631037964101596e-05, |
|
"loss": 3.676, |
|
"step": 633344 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9622652016591076e-05, |
|
"loss": 3.6732, |
|
"step": 633856 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9614282447884285e-05, |
|
"loss": 3.6705, |
|
"step": 634368 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9605896500373765e-05, |
|
"loss": 3.6691, |
|
"step": 634880 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9597510552863245e-05, |
|
"loss": 3.635, |
|
"step": 635392 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.958912460535273e-05, |
|
"loss": 3.6812, |
|
"step": 635904 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.958075503664594e-05, |
|
"loss": 3.6642, |
|
"step": 636416 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.957236908913542e-05, |
|
"loss": 3.6749, |
|
"step": 636928 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.95639831416249e-05, |
|
"loss": 3.6597, |
|
"step": 637440 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9555597194114374e-05, |
|
"loss": 3.653, |
|
"step": 637952 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.954722762540759e-05, |
|
"loss": 3.6492, |
|
"step": 638464 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.953884167789707e-05, |
|
"loss": 3.6617, |
|
"step": 638976 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.953045573038655e-05, |
|
"loss": 3.6664, |
|
"step": 639488 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.952206978287602e-05, |
|
"loss": 3.6606, |
|
"step": 640000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.951370021416924e-05, |
|
"loss": 3.6736, |
|
"step": 640512 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.950531426665872e-05, |
|
"loss": 3.6625, |
|
"step": 641024 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.94969283191482e-05, |
|
"loss": 3.6578, |
|
"step": 641536 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.948854237163768e-05, |
|
"loss": 3.6696, |
|
"step": 642048 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.948015642412716e-05, |
|
"loss": 3.6492, |
|
"step": 642560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9471786855420375e-05, |
|
"loss": 3.6545, |
|
"step": 643072 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.946340090790985e-05, |
|
"loss": 3.6666, |
|
"step": 643584 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.945501496039933e-05, |
|
"loss": 3.6699, |
|
"step": 644096 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.944662901288881e-05, |
|
"loss": 3.6505, |
|
"step": 644608 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9438259444182024e-05, |
|
"loss": 3.6581, |
|
"step": 645120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.94298734966715e-05, |
|
"loss": 3.6486, |
|
"step": 645632 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.942148754916098e-05, |
|
"loss": 3.6707, |
|
"step": 646144 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.941310160165046e-05, |
|
"loss": 3.6685, |
|
"step": 646656 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.940473203294367e-05, |
|
"loss": 3.6647, |
|
"step": 647168 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.939634608543315e-05, |
|
"loss": 3.6716, |
|
"step": 647680 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.938796013792263e-05, |
|
"loss": 3.673, |
|
"step": 648192 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.937957419041211e-05, |
|
"loss": 3.6876, |
|
"step": 648704 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.937120462170532e-05, |
|
"loss": 3.6669, |
|
"step": 649216 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.93628186741948e-05, |
|
"loss": 3.6679, |
|
"step": 649728 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.935443272668428e-05, |
|
"loss": 3.664, |
|
"step": 650240 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.934604677917376e-05, |
|
"loss": 3.6587, |
|
"step": 650752 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.933767721046697e-05, |
|
"loss": 3.6642, |
|
"step": 651264 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.932929126295645e-05, |
|
"loss": 3.6689, |
|
"step": 651776 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.932090531544593e-05, |
|
"loss": 3.669, |
|
"step": 652288 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.931251936793541e-05, |
|
"loss": 3.6728, |
|
"step": 652800 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9304149799228626e-05, |
|
"loss": 3.6537, |
|
"step": 653312 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9295763851718106e-05, |
|
"loss": 3.6538, |
|
"step": 653824 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9287377904207586e-05, |
|
"loss": 3.6563, |
|
"step": 654336 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9278991956697066e-05, |
|
"loss": 3.6751, |
|
"step": 654848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9270622387990275e-05, |
|
"loss": 3.6656, |
|
"step": 655360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9262236440479755e-05, |
|
"loss": 3.6512, |
|
"step": 655872 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.9253850492969235e-05, |
|
"loss": 3.6611, |
|
"step": 656384 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.9245464545458715e-05, |
|
"loss": 3.6656, |
|
"step": 656896 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.9237094976751924e-05, |
|
"loss": 3.6586, |
|
"step": 657408 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.9228709029241404e-05, |
|
"loss": 3.6567, |
|
"step": 657920 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.9220323081730884e-05, |
|
"loss": 3.6733, |
|
"step": 658432 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.9211937134220364e-05, |
|
"loss": 3.6652, |
|
"step": 658944 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.920356756551358e-05, |
|
"loss": 3.6658, |
|
"step": 659456 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.919518161800306e-05, |
|
"loss": 3.6549, |
|
"step": 659968 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.918679567049254e-05, |
|
"loss": 3.6584, |
|
"step": 660480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.917840972298202e-05, |
|
"loss": 3.6587, |
|
"step": 660992 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.917004015427523e-05, |
|
"loss": 3.6691, |
|
"step": 661504 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.916165420676471e-05, |
|
"loss": 3.6655, |
|
"step": 662016 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.915326825925419e-05, |
|
"loss": 3.6677, |
|
"step": 662528 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.914488231174367e-05, |
|
"loss": 3.6658, |
|
"step": 663040 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.913651274303688e-05, |
|
"loss": 3.6603, |
|
"step": 663552 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.912812679552636e-05, |
|
"loss": 3.6598, |
|
"step": 664064 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.911974084801584e-05, |
|
"loss": 3.6505, |
|
"step": 664576 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.911135490050532e-05, |
|
"loss": 3.6468, |
|
"step": 665088 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.91029689529948e-05, |
|
"loss": 3.6682, |
|
"step": 665600 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.9094599384288014e-05, |
|
"loss": 3.6552, |
|
"step": 666112 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.9086213436777494e-05, |
|
"loss": 3.648, |
|
"step": 666624 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.9077827489266973e-05, |
|
"loss": 3.6501, |
|
"step": 667136 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.9069441541756453e-05, |
|
"loss": 3.6509, |
|
"step": 667648 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.906107197304966e-05, |
|
"loss": 3.6628, |
|
"step": 668160 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.905268602553914e-05, |
|
"loss": 3.6531, |
|
"step": 668672 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.904430007802862e-05, |
|
"loss": 3.6648, |
|
"step": 669184 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.903593050932183e-05, |
|
"loss": 3.6651, |
|
"step": 669696 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.902754456181131e-05, |
|
"loss": 3.6612, |
|
"step": 670208 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.901915861430079e-05, |
|
"loss": 3.6623, |
|
"step": 670720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.901077266679027e-05, |
|
"loss": 3.6596, |
|
"step": 671232 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.900240309808349e-05, |
|
"loss": 3.66, |
|
"step": 671744 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.899401715057297e-05, |
|
"loss": 3.6602, |
|
"step": 672256 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.898563120306245e-05, |
|
"loss": 3.6613, |
|
"step": 672768 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.897724525555193e-05, |
|
"loss": 3.6551, |
|
"step": 673280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8968875686845136e-05, |
|
"loss": 3.6634, |
|
"step": 673792 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8960489739334616e-05, |
|
"loss": 3.6565, |
|
"step": 674304 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8952103791824096e-05, |
|
"loss": 3.6561, |
|
"step": 674816 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8943717844313576e-05, |
|
"loss": 3.651, |
|
"step": 675328 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8935348275606785e-05, |
|
"loss": 3.6504, |
|
"step": 675840 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8926962328096265e-05, |
|
"loss": 3.6496, |
|
"step": 676352 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8918576380585745e-05, |
|
"loss": 3.6632, |
|
"step": 676864 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8910190433075225e-05, |
|
"loss": 3.6543, |
|
"step": 677376 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.890182086436844e-05, |
|
"loss": 3.6696, |
|
"step": 677888 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.889343491685792e-05, |
|
"loss": 3.6444, |
|
"step": 678400 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.88850489693474e-05, |
|
"loss": 3.6663, |
|
"step": 678912 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.887666302183688e-05, |
|
"loss": 3.6597, |
|
"step": 679424 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.886829345313009e-05, |
|
"loss": 3.6649, |
|
"step": 679936 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.885990750561957e-05, |
|
"loss": 3.6619, |
|
"step": 680448 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.885152155810905e-05, |
|
"loss": 3.6554, |
|
"step": 680960 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.884313561059853e-05, |
|
"loss": 3.6596, |
|
"step": 681472 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.883476604189174e-05, |
|
"loss": 3.6708, |
|
"step": 681984 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.882638009438122e-05, |
|
"loss": 3.652, |
|
"step": 682496 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.88179941468707e-05, |
|
"loss": 3.6617, |
|
"step": 683008 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.880960819936018e-05, |
|
"loss": 3.6578, |
|
"step": 683520 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8801238630653395e-05, |
|
"loss": 3.6538, |
|
"step": 684032 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8792852683142875e-05, |
|
"loss": 3.6534, |
|
"step": 684544 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8784466735632355e-05, |
|
"loss": 3.6533, |
|
"step": 685056 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8776080788121834e-05, |
|
"loss": 3.6627, |
|
"step": 685568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8767694840611314e-05, |
|
"loss": 3.6534, |
|
"step": 686080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8759325271904524e-05, |
|
"loss": 3.6594, |
|
"step": 686592 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 3.8459384441375732, |
|
"eval_runtime": 304.3449, |
|
"eval_samples_per_second": 1253.811, |
|
"eval_steps_per_second": 39.183, |
|
"step": 686880 |
|
} |
|
], |
|
"logging_steps": 512, |
|
"max_steps": 3052726, |
|
"num_train_epochs": 9223372036854775807, |
|
"save_steps": 10, |
|
"total_flos": 4.7769765998363136e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|