|
{ |
|
"best_metric": 0.6617, |
|
"best_model_checkpoint": "distilbert-base-uncased-finetuned-mnli/checkpoint-71404", |
|
"epoch": 2.0, |
|
"global_step": 71404, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.9859951823427263e-05, |
|
"loss": 1.1951, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.971990364685452e-05, |
|
"loss": 1.1418, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9579855470281778e-05, |
|
"loss": 1.099, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9439807293709036e-05, |
|
"loss": 1.0961, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9299759117136297e-05, |
|
"loss": 1.0836, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9159710940563555e-05, |
|
"loss": 1.0721, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9019662763990812e-05, |
|
"loss": 1.0654, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.8879614587418074e-05, |
|
"loss": 1.0439, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.873956641084533e-05, |
|
"loss": 1.0453, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.8599518234272592e-05, |
|
"loss": 1.0399, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.845947005769985e-05, |
|
"loss": 1.0295, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.8319421881127108e-05, |
|
"loss": 1.0417, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.817937370455437e-05, |
|
"loss": 1.0327, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8039325527981627e-05, |
|
"loss": 1.0148, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.7899277351408884e-05, |
|
"loss": 1.0171, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.7759229174836145e-05, |
|
"loss": 1.0113, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7619180998263403e-05, |
|
"loss": 1.0062, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7479132821690664e-05, |
|
"loss": 1.0022, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7339084645117922e-05, |
|
"loss": 0.9908, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.719903646854518e-05, |
|
"loss": 0.9919, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.705898829197244e-05, |
|
"loss": 0.9875, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.69189401153997e-05, |
|
"loss": 0.9908, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.6778891938826956e-05, |
|
"loss": 0.9807, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.6638843762254217e-05, |
|
"loss": 0.9622, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.6498795585681475e-05, |
|
"loss": 0.9698, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.6358747409108736e-05, |
|
"loss": 0.9611, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.6218699232535994e-05, |
|
"loss": 0.9844, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.6078651055963252e-05, |
|
"loss": 0.9717, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.5938602879390513e-05, |
|
"loss": 0.978, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.579855470281777e-05, |
|
"loss": 0.9684, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.5658506526245028e-05, |
|
"loss": 0.9518, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.551845834967229e-05, |
|
"loss": 0.9592, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.5378410173099547e-05, |
|
"loss": 0.9482, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.5238361996526806e-05, |
|
"loss": 0.9565, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.5098313819954064e-05, |
|
"loss": 0.956, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.4958265643381324e-05, |
|
"loss": 0.9432, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.4818217466808585e-05, |
|
"loss": 0.943, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.4678169290235842e-05, |
|
"loss": 0.9354, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.4538121113663102e-05, |
|
"loss": 0.958, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.439807293709036e-05, |
|
"loss": 0.9358, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.4258024760517619e-05, |
|
"loss": 0.9395, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.4117976583944878e-05, |
|
"loss": 0.9448, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.3977928407372136e-05, |
|
"loss": 0.9349, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.3837880230799397e-05, |
|
"loss": 0.9444, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.3697832054226653e-05, |
|
"loss": 0.929, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.3557783877653914e-05, |
|
"loss": 0.933, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.3417735701081174e-05, |
|
"loss": 0.9357, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.3277687524508432e-05, |
|
"loss": 0.9182, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.3137639347935691e-05, |
|
"loss": 0.9279, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.299759117136295e-05, |
|
"loss": 0.9245, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.2857542994790208e-05, |
|
"loss": 0.9205, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.271749481821747e-05, |
|
"loss": 0.9214, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.2577446641644725e-05, |
|
"loss": 0.9388, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2437398465071986e-05, |
|
"loss": 0.9256, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.2297350288499246e-05, |
|
"loss": 0.9206, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.2157302111926503e-05, |
|
"loss": 0.9091, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.2017253935353763e-05, |
|
"loss": 0.9267, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.187720575878102e-05, |
|
"loss": 0.9103, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.173715758220828e-05, |
|
"loss": 0.9032, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.1597109405635541e-05, |
|
"loss": 0.9075, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1457061229062799e-05, |
|
"loss": 0.9016, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.1317013052490058e-05, |
|
"loss": 0.9119, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.1176964875917316e-05, |
|
"loss": 0.9085, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1036916699344575e-05, |
|
"loss": 0.894, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.0896868522771835e-05, |
|
"loss": 0.9156, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.0756820346199093e-05, |
|
"loss": 0.8944, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.0616772169626352e-05, |
|
"loss": 0.8824, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.047672399305361e-05, |
|
"loss": 0.9014, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.033667581648087e-05, |
|
"loss": 0.9022, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.019662763990813e-05, |
|
"loss": 0.8888, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0056579463335388e-05, |
|
"loss": 0.8981, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.63705, |
|
"eval_loss": 0.8662445545196533, |
|
"eval_runtime": 112.0196, |
|
"eval_samples_per_second": 178.54, |
|
"eval_steps_per_second": 11.159, |
|
"step": 35702 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.916531286762647e-06, |
|
"loss": 0.8662, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.776483110189907e-06, |
|
"loss": 0.8255, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.636434933617164e-06, |
|
"loss": 0.8377, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.496386757044424e-06, |
|
"loss": 0.8203, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.356338580471683e-06, |
|
"loss": 0.8399, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 9.216290403898943e-06, |
|
"loss": 0.8215, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 9.0762422273262e-06, |
|
"loss": 0.8266, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 8.93619405075346e-06, |
|
"loss": 0.8445, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 8.79614587418072e-06, |
|
"loss": 0.8159, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 8.656097697607979e-06, |
|
"loss": 0.8139, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 8.516049521035236e-06, |
|
"loss": 0.8303, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 8.376001344462496e-06, |
|
"loss": 0.8004, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 8.235953167889755e-06, |
|
"loss": 0.8373, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 8.095904991317015e-06, |
|
"loss": 0.8121, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 7.955856814744272e-06, |
|
"loss": 0.8083, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 7.815808638171532e-06, |
|
"loss": 0.8287, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 7.67576046159879e-06, |
|
"loss": 0.8251, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 7.53571228502605e-06, |
|
"loss": 0.8286, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 7.395664108453308e-06, |
|
"loss": 0.8139, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 7.255615931880568e-06, |
|
"loss": 0.8093, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 7.115567755307826e-06, |
|
"loss": 0.8185, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 6.975519578735085e-06, |
|
"loss": 0.8182, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 6.835471402162345e-06, |
|
"loss": 0.8235, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 6.695423225589604e-06, |
|
"loss": 0.8165, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 6.555375049016862e-06, |
|
"loss": 0.8013, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 6.415326872444121e-06, |
|
"loss": 0.8126, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 6.275278695871381e-06, |
|
"loss": 0.8178, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 6.13523051929864e-06, |
|
"loss": 0.8052, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 5.995182342725898e-06, |
|
"loss": 0.8167, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 5.855134166153157e-06, |
|
"loss": 0.8193, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 5.715085989580415e-06, |
|
"loss": 0.8114, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 5.575037813007676e-06, |
|
"loss": 0.7964, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 5.434989636434934e-06, |
|
"loss": 0.8061, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 5.294941459862193e-06, |
|
"loss": 0.8148, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 5.154893283289451e-06, |
|
"loss": 0.7987, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 5.014845106716712e-06, |
|
"loss": 0.7882, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 4.87479693014397e-06, |
|
"loss": 0.8347, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.734748753571229e-06, |
|
"loss": 0.8005, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.594700576998487e-06, |
|
"loss": 0.7995, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.454652400425747e-06, |
|
"loss": 0.8096, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.314604223853005e-06, |
|
"loss": 0.788, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.174556047280265e-06, |
|
"loss": 0.7972, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.034507870707523e-06, |
|
"loss": 0.7841, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 3.894459694134783e-06, |
|
"loss": 0.8075, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 3.7544115175620417e-06, |
|
"loss": 0.8039, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 3.6143633409893007e-06, |
|
"loss": 0.7813, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 3.4743151644165597e-06, |
|
"loss": 0.7884, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 3.3342669878438182e-06, |
|
"loss": 0.7981, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 3.1942188112710776e-06, |
|
"loss": 0.7857, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 3.0541706346983362e-06, |
|
"loss": 0.8166, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.9141224581255956e-06, |
|
"loss": 0.8139, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.774074281552854e-06, |
|
"loss": 0.7986, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.6340261049801136e-06, |
|
"loss": 0.8084, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.493977928407372e-06, |
|
"loss": 0.7958, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.353929751834631e-06, |
|
"loss": 0.8037, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.21388157526189e-06, |
|
"loss": 0.8021, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.073833398689149e-06, |
|
"loss": 0.8079, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.933785222116408e-06, |
|
"loss": 0.7904, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.7937370455436674e-06, |
|
"loss": 0.8015, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.653688868970926e-06, |
|
"loss": 0.8057, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.513640692398185e-06, |
|
"loss": 0.7901, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.373592515825444e-06, |
|
"loss": 0.7925, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.233544339252703e-06, |
|
"loss": 0.8031, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.093496162679962e-06, |
|
"loss": 0.7847, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 9.53447986107221e-07, |
|
"loss": 0.792, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 8.133998095344799e-07, |
|
"loss": 0.7927, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.733516329617389e-07, |
|
"loss": 0.7759, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.333034563889979e-07, |
|
"loss": 0.7915, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 3.9325527981625684e-07, |
|
"loss": 0.7801, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 2.532071032435158e-07, |
|
"loss": 0.787, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.1315892667077475e-07, |
|
"loss": 0.7837, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.6617, |
|
"eval_loss": 0.8244466781616211, |
|
"eval_runtime": 111.9282, |
|
"eval_samples_per_second": 178.686, |
|
"eval_steps_per_second": 11.168, |
|
"step": 71404 |
|
} |
|
], |
|
"max_steps": 71404, |
|
"num_train_epochs": 2, |
|
"total_flos": 1.5134224748069683e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|