|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.6506710044733631, |
|
"global_step": 2000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999967339434433e-05, |
|
"loss": 1.343, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9998820960283476e-05, |
|
"loss": 1.1858, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.99974394497948e-05, |
|
"loss": 1.1057, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9995528892116685e-05, |
|
"loss": 1.0698, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.999308932768425e-05, |
|
"loss": 1.0316, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.99897583236926e-05, |
|
"loss": 1.0046, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9985774375988764e-05, |
|
"loss": 0.9848, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.998113758866712e-05, |
|
"loss": 0.9656, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.997584808287975e-05, |
|
"loss": 0.9648, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.996990599683325e-05, |
|
"loss": 0.9415, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.996331148578514e-05, |
|
"loss": 0.9388, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.995606472203979e-05, |
|
"loss": 0.9289, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.994816589494391e-05, |
|
"loss": 0.9252, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.993961521088163e-05, |
|
"loss": 0.91, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9930412893269106e-05, |
|
"loss": 0.9033, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.992055918254865e-05, |
|
"loss": 0.9064, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9910054336182467e-05, |
|
"loss": 0.8946, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.989889862864594e-05, |
|
"loss": 0.8914, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.988709235142044e-05, |
|
"loss": 0.9007, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9874635812985734e-05, |
|
"loss": 0.9008, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9861529338811874e-05, |
|
"loss": 0.8714, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.984777327135076e-05, |
|
"loss": 0.8865, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9833367970027145e-05, |
|
"loss": 0.8716, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.981831381122926e-05, |
|
"loss": 0.8817, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.980261118829896e-05, |
|
"loss": 0.8658, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.978626051152152e-05, |
|
"loss": 0.8722, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.976926220811479e-05, |
|
"loss": 0.8541, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.975161672221813e-05, |
|
"loss": 0.8504, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.97333245148808e-05, |
|
"loss": 0.8467, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.971438606404986e-05, |
|
"loss": 0.8533, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.969480186455771e-05, |
|
"loss": 0.853, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.9674572428109205e-05, |
|
"loss": 0.8432, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.9653698283268183e-05, |
|
"loss": 0.854, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.963217997544376e-05, |
|
"loss": 0.8435, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.961001806687602e-05, |
|
"loss": 0.8518, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.958721313662133e-05, |
|
"loss": 0.848, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.956376578053723e-05, |
|
"loss": 0.8436, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.9539676611266846e-05, |
|
"loss": 0.8405, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.9514946258222895e-05, |
|
"loss": 0.838, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.948957536757123e-05, |
|
"loss": 0.8458, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.946356460221395e-05, |
|
"loss": 0.8379, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.943691464177212e-05, |
|
"loss": 0.827, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.940962618256796e-05, |
|
"loss": 0.8208, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.9381699937606674e-05, |
|
"loss": 0.8242, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.935313663655782e-05, |
|
"loss": 0.8362, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.932393702573627e-05, |
|
"loss": 0.8363, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.929410186808264e-05, |
|
"loss": 0.8275, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.926363194314344e-05, |
|
"loss": 0.821, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.923252804705068e-05, |
|
"loss": 0.8271, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.920079099250099e-05, |
|
"loss": 0.8178, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.916842160873451e-05, |
|
"loss": 0.8293, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.913542074151315e-05, |
|
"loss": 0.8294, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.910178925309849e-05, |
|
"loss": 0.8202, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.906752802222928e-05, |
|
"loss": 0.8301, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.903263794409846e-05, |
|
"loss": 0.8031, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.8997119930329785e-05, |
|
"loss": 0.821, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.896097490895398e-05, |
|
"loss": 0.8132, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.8924203824384516e-05, |
|
"loss": 0.8113, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.888680763739294e-05, |
|
"loss": 0.8199, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.8848787325083734e-05, |
|
"loss": 0.8196, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.8810143880868816e-05, |
|
"loss": 0.8128, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.8770878314441606e-05, |
|
"loss": 0.8123, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.8730991651750565e-05, |
|
"loss": 0.8048, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.869048493497247e-05, |
|
"loss": 0.8119, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.864935922248515e-05, |
|
"loss": 0.8141, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.860761558883981e-05, |
|
"loss": 0.8, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.856525512473302e-05, |
|
"loss": 0.816, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.852227893697814e-05, |
|
"loss": 0.7976, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.847868814847646e-05, |
|
"loss": 0.8121, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.84344838981878e-05, |
|
"loss": 0.8175, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.8389667341100844e-05, |
|
"loss": 0.8109, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.834423964820284e-05, |
|
"loss": 0.7948, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.8298202006449136e-05, |
|
"loss": 0.7986, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.8251555618732036e-05, |
|
"loss": 0.8049, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.820430170384947e-05, |
|
"loss": 0.7981, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.8156441496473126e-05, |
|
"loss": 0.799, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.8107976247116145e-05, |
|
"loss": 0.804, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.805890722210048e-05, |
|
"loss": 0.8053, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.8009235703523824e-05, |
|
"loss": 0.7969, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.79589629892261e-05, |
|
"loss": 0.8051, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.7908090392755526e-05, |
|
"loss": 0.796, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.785661924333431e-05, |
|
"loss": 0.7928, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.780455088582394e-05, |
|
"loss": 0.8017, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.775188668069002e-05, |
|
"loss": 0.7969, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.769862800396674e-05, |
|
"loss": 0.7942, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.764477624722088e-05, |
|
"loss": 0.7877, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.759033281751553e-05, |
|
"loss": 0.7979, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.7535299137373236e-05, |
|
"loss": 0.7986, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.7479676644738894e-05, |
|
"loss": 0.8124, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.742346679294217e-05, |
|
"loss": 0.7959, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.7366671050659505e-05, |
|
"loss": 0.7884, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.730929090187573e-05, |
|
"loss": 0.7911, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.725132784584536e-05, |
|
"loss": 0.7937, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.719278339705333e-05, |
|
"loss": 0.7994, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.71336590851755e-05, |
|
"loss": 0.7905, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.707395645503863e-05, |
|
"loss": 0.7924, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.701367706658006e-05, |
|
"loss": 0.7891, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.695282249480692e-05, |
|
"loss": 0.7962, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.6891394329755e-05, |
|
"loss": 0.7882, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.6829394176447195e-05, |
|
"loss": 0.8011, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.676682365485154e-05, |
|
"loss": 0.7955, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.670368439983896e-05, |
|
"loss": 0.7906, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.663997806114048e-05, |
|
"loss": 0.7853, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.657570630330411e-05, |
|
"loss": 0.7825, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.6510870805651444e-05, |
|
"loss": 0.7768, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.6445473262233696e-05, |
|
"loss": 0.7795, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.637951538178746e-05, |
|
"loss": 0.7832, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.63129988876901e-05, |
|
"loss": 0.7934, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.624592551791465e-05, |
|
"loss": 0.7892, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.617829702498446e-05, |
|
"loss": 0.7819, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.611011517592741e-05, |
|
"loss": 0.7868, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.604138175222968e-05, |
|
"loss": 0.7914, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.597209854978929e-05, |
|
"loss": 0.7817, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.590226737886908e-05, |
|
"loss": 0.7916, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.583189006404949e-05, |
|
"loss": 0.7799, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.576096844418083e-05, |
|
"loss": 0.7776, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.568950437233529e-05, |
|
"loss": 0.7803, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.561749971575846e-05, |
|
"loss": 0.7884, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.5544956355820596e-05, |
|
"loss": 0.7827, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.547187618796742e-05, |
|
"loss": 0.7857, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.5398261121670634e-05, |
|
"loss": 0.7797, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.532411308037798e-05, |
|
"loss": 0.7821, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.524943400146306e-05, |
|
"loss": 0.7747, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.51742258361746e-05, |
|
"loss": 0.7737, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.509849054958559e-05, |
|
"loss": 0.7868, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.502223012054186e-05, |
|
"loss": 0.7818, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.4945446541610414e-05, |
|
"loss": 0.7804, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.486814181902734e-05, |
|
"loss": 0.7759, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.4790317972645396e-05, |
|
"loss": 0.7744, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.4711977035881256e-05, |
|
"loss": 0.7814, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.463312105566237e-05, |
|
"loss": 0.7611, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.4553752092373465e-05, |
|
"loss": 0.7796, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.447387221980272e-05, |
|
"loss": 0.7839, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.4393483525087584e-05, |
|
"loss": 0.7726, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.4312588108660246e-05, |
|
"loss": 0.7763, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.423118808419276e-05, |
|
"loss": 0.766, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.4149285578541785e-05, |
|
"loss": 0.7887, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.406688273169305e-05, |
|
"loss": 0.7637, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.3983981696705415e-05, |
|
"loss": 0.7609, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.390058463965464e-05, |
|
"loss": 0.7729, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.381669373957675e-05, |
|
"loss": 0.7751, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.3732311188411154e-05, |
|
"loss": 0.7877, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.3647439190943315e-05, |
|
"loss": 0.7738, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.3562079964747207e-05, |
|
"loss": 0.7834, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.347623574012728e-05, |
|
"loss": 0.7835, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.338990876006031e-05, |
|
"loss": 0.7707, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.330310128013666e-05, |
|
"loss": 0.7674, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.321581556850146e-05, |
|
"loss": 0.7687, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.312805390579527e-05, |
|
"loss": 0.7663, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.3039818585094524e-05, |
|
"loss": 0.7662, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.29511119118516e-05, |
|
"loss": 0.7688, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.28619362038346e-05, |
|
"loss": 0.7652, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.277229379106676e-05, |
|
"loss": 0.7683, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.2682187015765615e-05, |
|
"loss": 0.7621, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.259161823228174e-05, |
|
"loss": 0.7778, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.2500589807037306e-05, |
|
"loss": 0.7809, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.240910411846418e-05, |
|
"loss": 0.7717, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.231716355694184e-05, |
|
"loss": 0.7584, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.222477052473486e-05, |
|
"loss": 0.7631, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.21319274359302e-05, |
|
"loss": 0.7681, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.203863671637409e-05, |
|
"loss": 0.7739, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.194490080360866e-05, |
|
"loss": 0.7646, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.1850722146808254e-05, |
|
"loss": 0.7647, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.1756103206715416e-05, |
|
"loss": 0.7675, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.166104645557662e-05, |
|
"loss": 0.7652, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.15655543770777e-05, |
|
"loss": 0.765, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.146962946627886e-05, |
|
"loss": 0.7614, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.137327422954958e-05, |
|
"loss": 0.7544, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.127649118450308e-05, |
|
"loss": 0.7673, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.117928285993055e-05, |
|
"loss": 0.7593, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.108165179573508e-05, |
|
"loss": 0.7735, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.0983600542865285e-05, |
|
"loss": 0.7666, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.088513166324867e-05, |
|
"loss": 0.7627, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.078624772972467e-05, |
|
"loss": 0.77, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.068695132597744e-05, |
|
"loss": 0.7569, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.058724504646834e-05, |
|
"loss": 0.7654, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.048713149636816e-05, |
|
"loss": 0.753, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.038661329148904e-05, |
|
"loss": 0.7705, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.02856930582161e-05, |
|
"loss": 0.7542, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.018437343343886e-05, |
|
"loss": 0.7532, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.008265706448234e-05, |
|
"loss": 0.7662, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 3.9980546609037825e-05, |
|
"loss": 0.7607, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.9878044735093505e-05, |
|
"loss": 0.7552, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.977515412086472e-05, |
|
"loss": 0.7645, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.9671877454723985e-05, |
|
"loss": 0.7689, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.956821743513078e-05, |
|
"loss": 0.7615, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.946417677056097e-05, |
|
"loss": 0.7703, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.935975817943614e-05, |
|
"loss": 0.766, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.925496439005246e-05, |
|
"loss": 0.7632, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.914979814050949e-05, |
|
"loss": 0.7646, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.904426217863858e-05, |
|
"loss": 0.7624, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.8938359261931076e-05, |
|
"loss": 0.7498, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.883209215746633e-05, |
|
"loss": 0.7541, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.8725463641839296e-05, |
|
"loss": 0.7622, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.8618476501088094e-05, |
|
"loss": 0.7536, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.851113353062115e-05, |
|
"loss": 0.7419, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.840343753514414e-05, |
|
"loss": 0.756, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.829539132858678e-05, |
|
"loss": 0.7511, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.818699773402925e-05, |
|
"loss": 0.7505, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.80782595836284e-05, |
|
"loss": 0.7396, |
|
"step": 2000 |
|
} |
|
], |
|
"max_steps": 6146, |
|
"num_train_epochs": 2, |
|
"total_flos": 3.592019131911635e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|