|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 1008, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02976190476190476, |
|
"grad_norm": 9.855416467128451, |
|
"learning_rate": 3.2894736842105264e-07, |
|
"loss": 0.8831, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05952380952380952, |
|
"grad_norm": 4.058333207297266, |
|
"learning_rate": 6.578947368421053e-07, |
|
"loss": 0.7946, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.08928571428571429, |
|
"grad_norm": 1.7570375433828875, |
|
"learning_rate": 9.86842105263158e-07, |
|
"loss": 0.7122, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.11904761904761904, |
|
"grad_norm": 1.1575290406793173, |
|
"learning_rate": 1.3157894736842106e-06, |
|
"loss": 0.6601, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.1488095238095238, |
|
"grad_norm": 1.1945148190883437, |
|
"learning_rate": 1.6447368421052635e-06, |
|
"loss": 0.6275, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.17857142857142858, |
|
"grad_norm": 1.221041026154983, |
|
"learning_rate": 1.973684210526316e-06, |
|
"loss": 0.6052, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.20833333333333334, |
|
"grad_norm": 3.3209006702030646, |
|
"learning_rate": 2.3026315789473684e-06, |
|
"loss": 0.586, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.23809523809523808, |
|
"grad_norm": 3.6681811496303838, |
|
"learning_rate": 2.631578947368421e-06, |
|
"loss": 0.5798, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.26785714285714285, |
|
"grad_norm": 3.2185752709902724, |
|
"learning_rate": 2.960526315789474e-06, |
|
"loss": 0.5666, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.2976190476190476, |
|
"grad_norm": 3.4706862638119795, |
|
"learning_rate": 3.289473684210527e-06, |
|
"loss": 0.5638, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.3273809523809524, |
|
"grad_norm": 3.6113112350238055, |
|
"learning_rate": 3.618421052631579e-06, |
|
"loss": 0.5522, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.35714285714285715, |
|
"grad_norm": 3.2305389516520107, |
|
"learning_rate": 3.947368421052632e-06, |
|
"loss": 0.5463, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.3869047619047619, |
|
"grad_norm": 3.038105776417889, |
|
"learning_rate": 4.276315789473684e-06, |
|
"loss": 0.5435, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.4166666666666667, |
|
"grad_norm": 2.8452115838960004, |
|
"learning_rate": 4.605263157894737e-06, |
|
"loss": 0.5368, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.44642857142857145, |
|
"grad_norm": 2.885654936231309, |
|
"learning_rate": 4.9342105263157895e-06, |
|
"loss": 0.5371, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.47619047619047616, |
|
"grad_norm": 2.4395389590432695, |
|
"learning_rate": 4.998922515567496e-06, |
|
"loss": 0.5311, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.5059523809523809, |
|
"grad_norm": 3.316027131916539, |
|
"learning_rate": 4.994546826814266e-06, |
|
"loss": 0.5306, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.5357142857142857, |
|
"grad_norm": 2.1091015101835064, |
|
"learning_rate": 4.986811479610213e-06, |
|
"loss": 0.5235, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.5654761904761905, |
|
"grad_norm": 3.105111788839091, |
|
"learning_rate": 4.975726891929585e-06, |
|
"loss": 0.5172, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.5952380952380952, |
|
"grad_norm": 2.4351797747389288, |
|
"learning_rate": 4.9613079925074435e-06, |
|
"loss": 0.5127, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.625, |
|
"grad_norm": 2.058969332069431, |
|
"learning_rate": 4.943574200733625e-06, |
|
"loss": 0.5135, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.6547619047619048, |
|
"grad_norm": 1.777838918535288, |
|
"learning_rate": 4.922549400498685e-06, |
|
"loss": 0.5089, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.6845238095238095, |
|
"grad_norm": 2.7154588748327986, |
|
"learning_rate": 4.89826190802705e-06, |
|
"loss": 0.5037, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.7142857142857143, |
|
"grad_norm": 1.5679948903593761, |
|
"learning_rate": 4.870744433740688e-06, |
|
"loss": 0.4986, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.7440476190476191, |
|
"grad_norm": 1.4962958992598898, |
|
"learning_rate": 4.840034038204687e-06, |
|
"loss": 0.4972, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.7738095238095238, |
|
"grad_norm": 2.122366808210638, |
|
"learning_rate": 4.806172082214032e-06, |
|
"loss": 0.4989, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.8035714285714286, |
|
"grad_norm": 2.0302822908345153, |
|
"learning_rate": 4.7692041710888495e-06, |
|
"loss": 0.4901, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.8333333333333334, |
|
"grad_norm": 3.5745905459632543, |
|
"learning_rate": 4.729180093253106e-06, |
|
"loss": 0.4916, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.8630952380952381, |
|
"grad_norm": 1.8908937013266554, |
|
"learning_rate": 4.6861537531795095e-06, |
|
"loss": 0.4883, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.8928571428571429, |
|
"grad_norm": 2.6221469442693652, |
|
"learning_rate": 4.640183098790896e-06, |
|
"loss": 0.4867, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.9226190476190477, |
|
"grad_norm": 2.112945737548425, |
|
"learning_rate": 4.5913300434159095e-06, |
|
"loss": 0.4808, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.9523809523809523, |
|
"grad_norm": 2.167157341115666, |
|
"learning_rate": 4.53966038240406e-06, |
|
"loss": 0.4829, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.9821428571428571, |
|
"grad_norm": 1.7665074082761922, |
|
"learning_rate": 4.485243704512474e-06, |
|
"loss": 0.4775, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.05986911430954933, |
|
"eval_runtime": 228.6618, |
|
"eval_samples_per_second": 79.169, |
|
"eval_steps_per_second": 0.621, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.0119047619047619, |
|
"grad_norm": 2.7005444699805397, |
|
"learning_rate": 4.428153298183676e-06, |
|
"loss": 0.4615, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.0416666666666667, |
|
"grad_norm": 2.1637697857642766, |
|
"learning_rate": 4.368466052840636e-06, |
|
"loss": 0.4222, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.0714285714285714, |
|
"grad_norm": 2.1155220244099318, |
|
"learning_rate": 4.306262355332006e-06, |
|
"loss": 0.4226, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.1011904761904763, |
|
"grad_norm": 2.9522060556993805, |
|
"learning_rate": 4.2416259816670235e-06, |
|
"loss": 0.4231, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.130952380952381, |
|
"grad_norm": 3.069829782500402, |
|
"learning_rate": 4.17464398418589e-06, |
|
"loss": 0.424, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.1607142857142858, |
|
"grad_norm": 3.0267979137491263, |
|
"learning_rate": 4.105406574317579e-06, |
|
"loss": 0.4244, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.1904761904761905, |
|
"grad_norm": 2.1977669399883695, |
|
"learning_rate": 4.034007001082985e-06, |
|
"loss": 0.418, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.2202380952380953, |
|
"grad_norm": 2.697858786471712, |
|
"learning_rate": 3.960541425507039e-06, |
|
"loss": 0.4233, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 2.493946030937562, |
|
"learning_rate": 3.8851087911089315e-06, |
|
"loss": 0.4198, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.2797619047619047, |
|
"grad_norm": 2.2545695426043544, |
|
"learning_rate": 3.8078106906448684e-06, |
|
"loss": 0.4183, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.3095238095238095, |
|
"grad_norm": 2.375162881348148, |
|
"learning_rate": 3.7287512292828364e-06, |
|
"loss": 0.4168, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.3392857142857144, |
|
"grad_norm": 1.5621396833558368, |
|
"learning_rate": 3.6480368843936463e-06, |
|
"loss": 0.4163, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.369047619047619, |
|
"grad_norm": 1.3598263826513288, |
|
"learning_rate": 3.5657763621470905e-06, |
|
"loss": 0.4197, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.3988095238095237, |
|
"grad_norm": 1.6629757133775533, |
|
"learning_rate": 3.48208045110635e-06, |
|
"loss": 0.4187, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.4285714285714286, |
|
"grad_norm": 2.2093813175502968, |
|
"learning_rate": 3.39706187301784e-06, |
|
"loss": 0.4166, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.4583333333333333, |
|
"grad_norm": 1.884934841996157, |
|
"learning_rate": 3.3108351309974286e-06, |
|
"loss": 0.4154, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.4880952380952381, |
|
"grad_norm": 1.5082137043399462, |
|
"learning_rate": 3.2235163553175174e-06, |
|
"loss": 0.417, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.5178571428571428, |
|
"grad_norm": 1.3657040531080291, |
|
"learning_rate": 3.1352231470026583e-06, |
|
"loss": 0.416, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.5476190476190477, |
|
"grad_norm": 1.2254626355269007, |
|
"learning_rate": 3.0460744194443658e-06, |
|
"loss": 0.4142, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.5773809523809523, |
|
"grad_norm": 1.1398076994088004, |
|
"learning_rate": 2.956190238248425e-06, |
|
"loss": 0.4149, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.6071428571428572, |
|
"grad_norm": 1.1825076561434265, |
|
"learning_rate": 2.8656916595304026e-06, |
|
"loss": 0.4133, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.6369047619047619, |
|
"grad_norm": 1.5151800119422074, |
|
"learning_rate": 2.7747005668771293e-06, |
|
"loss": 0.4173, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.6666666666666665, |
|
"grad_norm": 1.4847041684984181, |
|
"learning_rate": 2.68333950719376e-06, |
|
"loss": 0.4122, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.6964285714285714, |
|
"grad_norm": 1.4926221075919288, |
|
"learning_rate": 2.5917315256574543e-06, |
|
"loss": 0.4138, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.7261904761904763, |
|
"grad_norm": 1.4851596357393222, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.4146, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.755952380952381, |
|
"grad_norm": 1.4494102675836165, |
|
"learning_rate": 2.4082684743425457e-06, |
|
"loss": 0.41, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.7857142857142856, |
|
"grad_norm": 1.096907686976151, |
|
"learning_rate": 2.3166604928062407e-06, |
|
"loss": 0.4083, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.8154761904761905, |
|
"grad_norm": 1.1069324511062155, |
|
"learning_rate": 2.2252994331228715e-06, |
|
"loss": 0.4092, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.8452380952380953, |
|
"grad_norm": 1.5676344586960804, |
|
"learning_rate": 2.1343083404695987e-06, |
|
"loss": 0.4071, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.875, |
|
"grad_norm": 1.5244672991700199, |
|
"learning_rate": 2.0438097617515744e-06, |
|
"loss": 0.4092, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.9047619047619047, |
|
"grad_norm": 1.1711984219863045, |
|
"learning_rate": 1.9539255805556346e-06, |
|
"loss": 0.4056, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.9345238095238095, |
|
"grad_norm": 1.2364277364431069, |
|
"learning_rate": 1.8647768529973426e-06, |
|
"loss": 0.4065, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.9642857142857144, |
|
"grad_norm": 1.173689997741917, |
|
"learning_rate": 1.7764836446824835e-06, |
|
"loss": 0.4066, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.994047619047619, |
|
"grad_norm": 1.0429379558488223, |
|
"learning_rate": 1.689164869002572e-06, |
|
"loss": 0.4039, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 0.05654621869325638, |
|
"eval_runtime": 233.1622, |
|
"eval_samples_per_second": 77.641, |
|
"eval_steps_per_second": 0.609, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 2.0238095238095237, |
|
"grad_norm": 1.8042620783640224, |
|
"learning_rate": 1.6029381269821607e-06, |
|
"loss": 0.355, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.0535714285714284, |
|
"grad_norm": 1.3103423601245003, |
|
"learning_rate": 1.5179195488936505e-06, |
|
"loss": 0.342, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.0833333333333335, |
|
"grad_norm": 1.1981284388735467, |
|
"learning_rate": 1.4342236378529106e-06, |
|
"loss": 0.3394, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.113095238095238, |
|
"grad_norm": 1.3227357738839063, |
|
"learning_rate": 1.351963115606354e-06, |
|
"loss": 0.3363, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.142857142857143, |
|
"grad_norm": 1.0826862483712594, |
|
"learning_rate": 1.2712487707171645e-06, |
|
"loss": 0.3378, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.1726190476190474, |
|
"grad_norm": 1.2571941714304598, |
|
"learning_rate": 1.1921893093551324e-06, |
|
"loss": 0.338, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.2023809523809526, |
|
"grad_norm": 1.168368127805181, |
|
"learning_rate": 1.1148912088910687e-06, |
|
"loss": 0.3363, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.232142857142857, |
|
"grad_norm": 1.1226203835961706, |
|
"learning_rate": 1.0394585744929605e-06, |
|
"loss": 0.3393, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.261904761904762, |
|
"grad_norm": 1.0656940582932617, |
|
"learning_rate": 9.659929989170156e-07, |
|
"loss": 0.3401, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.2916666666666665, |
|
"grad_norm": 1.074973225972827, |
|
"learning_rate": 8.945934256824218e-07, |
|
"loss": 0.3356, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.3214285714285716, |
|
"grad_norm": 1.1457434350978388, |
|
"learning_rate": 8.253560158141111e-07, |
|
"loss": 0.3372, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.3511904761904763, |
|
"grad_norm": 1.0748233318983023, |
|
"learning_rate": 7.583740183329769e-07, |
|
"loss": 0.3339, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.380952380952381, |
|
"grad_norm": 1.0162550586451078, |
|
"learning_rate": 6.93737644667995e-07, |
|
"loss": 0.3366, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.4107142857142856, |
|
"grad_norm": 1.0398744748365716, |
|
"learning_rate": 6.315339471593646e-07, |
|
"loss": 0.3359, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.4404761904761907, |
|
"grad_norm": 1.0255604222182617, |
|
"learning_rate": 5.718467018163243e-07, |
|
"loss": 0.3344, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.4702380952380953, |
|
"grad_norm": 0.9872536325493508, |
|
"learning_rate": 5.147562954875269e-07, |
|
"loss": 0.3362, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 1.0132034371898346, |
|
"learning_rate": 4.6033961759594045e-07, |
|
"loss": 0.3337, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.5297619047619047, |
|
"grad_norm": 1.0177216034721577, |
|
"learning_rate": 4.0866995658409155e-07, |
|
"loss": 0.3363, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.5595238095238093, |
|
"grad_norm": 0.9981956934484016, |
|
"learning_rate": 3.598169012091049e-07, |
|
"loss": 0.3362, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.5892857142857144, |
|
"grad_norm": 0.9529369028261363, |
|
"learning_rate": 3.1384624682049144e-07, |
|
"loss": 0.3394, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.619047619047619, |
|
"grad_norm": 0.9766851469747638, |
|
"learning_rate": 2.708199067468939e-07, |
|
"loss": 0.3343, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.6488095238095237, |
|
"grad_norm": 0.9534173170238686, |
|
"learning_rate": 2.3079582891115144e-07, |
|
"loss": 0.3358, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.678571428571429, |
|
"grad_norm": 0.9571473294443575, |
|
"learning_rate": 1.9382791778596864e-07, |
|
"loss": 0.3343, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.7083333333333335, |
|
"grad_norm": 0.9382822776109654, |
|
"learning_rate": 1.5996596179531365e-07, |
|
"loss": 0.3348, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.738095238095238, |
|
"grad_norm": 0.9309707054846061, |
|
"learning_rate": 1.2925556625931174e-07, |
|
"loss": 0.3375, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.767857142857143, |
|
"grad_norm": 0.9621376118686727, |
|
"learning_rate": 1.0173809197295075e-07, |
|
"loss": 0.336, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.7976190476190474, |
|
"grad_norm": 0.9169046599509947, |
|
"learning_rate": 7.7450599501315e-08, |
|
"loss": 0.3339, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.8273809523809526, |
|
"grad_norm": 0.9398290467609749, |
|
"learning_rate": 5.6425799266375534e-08, |
|
"loss": 0.335, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.857142857142857, |
|
"grad_norm": 0.9100820752220524, |
|
"learning_rate": 3.869200749255703e-08, |
|
"loss": 0.334, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.886904761904762, |
|
"grad_norm": 0.904751932290303, |
|
"learning_rate": 2.427310807041561e-08, |
|
"loss": 0.3327, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.9166666666666665, |
|
"grad_norm": 0.913682536330684, |
|
"learning_rate": 1.3188520389787462e-08, |
|
"loss": 0.3313, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.946428571428571, |
|
"grad_norm": 0.9340585620250436, |
|
"learning_rate": 5.453173185734073e-09, |
|
"loss": 0.336, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.9761904761904763, |
|
"grad_norm": 0.9406350686317787, |
|
"learning_rate": 1.0774844325039946e-09, |
|
"loss": 0.3325, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 0.057458870112895966, |
|
"eval_runtime": 228.1192, |
|
"eval_samples_per_second": 79.358, |
|
"eval_steps_per_second": 0.622, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 1008, |
|
"total_flos": 3376456327495680.0, |
|
"train_loss": 0.4351037277115716, |
|
"train_runtime": 33019.7588, |
|
"train_samples_per_second": 31.25, |
|
"train_steps_per_second": 0.031 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 1008, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3376456327495680.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|