|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.994991652754591, |
|
"eval_steps": 500, |
|
"global_step": 897, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00333889816360601, |
|
"grad_norm": 1633.0780614838177, |
|
"learning_rate": 2.2222222222222224e-07, |
|
"loss": 14.7478, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01669449081803005, |
|
"grad_norm": 1232.852374646303, |
|
"learning_rate": 1.111111111111111e-06, |
|
"loss": 13.7637, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0333889816360601, |
|
"grad_norm": 160.62292486481945, |
|
"learning_rate": 2.222222222222222e-06, |
|
"loss": 6.5941, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05008347245409015, |
|
"grad_norm": 54.67306425849189, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 2.4942, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0667779632721202, |
|
"grad_norm": 36.70425670339494, |
|
"learning_rate": 4.444444444444444e-06, |
|
"loss": 1.7415, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.08347245409015025, |
|
"grad_norm": 31.233563869291352, |
|
"learning_rate": 5.555555555555557e-06, |
|
"loss": 1.5583, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.1001669449081803, |
|
"grad_norm": 11.808038087592866, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 1.465, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.11686143572621036, |
|
"grad_norm": 11.136891922183436, |
|
"learning_rate": 7.77777777777778e-06, |
|
"loss": 1.4276, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.1335559265442404, |
|
"grad_norm": 35.559026101254695, |
|
"learning_rate": 8.888888888888888e-06, |
|
"loss": 1.2878, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.15025041736227046, |
|
"grad_norm": 24.433680953972228, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2278, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.1669449081803005, |
|
"grad_norm": 124.38615525639153, |
|
"learning_rate": 1.1111111111111113e-05, |
|
"loss": 1.1305, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.18363939899833054, |
|
"grad_norm": 123.27724020667802, |
|
"learning_rate": 1.2222222222222224e-05, |
|
"loss": 1.2199, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.2003338898163606, |
|
"grad_norm": 57.91104969220954, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 1.2104, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.21702838063439064, |
|
"grad_norm": 18.21646289641897, |
|
"learning_rate": 1.4444444444444446e-05, |
|
"loss": 1.1268, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.2337228714524207, |
|
"grad_norm": 9.526734443694286, |
|
"learning_rate": 1.555555555555556e-05, |
|
"loss": 1.1197, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.25041736227045075, |
|
"grad_norm": 82.75544838534852, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 1.1559, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.2671118530884808, |
|
"grad_norm": 49.643780627497094, |
|
"learning_rate": 1.7777777777777777e-05, |
|
"loss": 1.172, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.2838063439065108, |
|
"grad_norm": 27.77568045936215, |
|
"learning_rate": 1.888888888888889e-05, |
|
"loss": 1.1498, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.3005008347245409, |
|
"grad_norm": 85.83380293575804, |
|
"learning_rate": 2e-05, |
|
"loss": 1.1418, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.31719532554257096, |
|
"grad_norm": 9.632034343502017, |
|
"learning_rate": 1.9998105699049984e-05, |
|
"loss": 1.1007, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.333889816360601, |
|
"grad_norm": 37.22916540635626, |
|
"learning_rate": 1.9992423513875158e-05, |
|
"loss": 1.0916, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.35058430717863104, |
|
"grad_norm": 49.23626406084266, |
|
"learning_rate": 1.9982955597229275e-05, |
|
"loss": 1.1476, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.3672787979966611, |
|
"grad_norm": 41.53491105452397, |
|
"learning_rate": 1.9969705536129033e-05, |
|
"loss": 1.1108, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.38397328881469117, |
|
"grad_norm": 52.40878364354266, |
|
"learning_rate": 1.9952678350495104e-05, |
|
"loss": 1.0695, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.4006677796327212, |
|
"grad_norm": 35.305587442500695, |
|
"learning_rate": 1.9931880491250263e-05, |
|
"loss": 1.0682, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.41736227045075125, |
|
"grad_norm": 37.14178936776283, |
|
"learning_rate": 1.990731983787542e-05, |
|
"loss": 1.0501, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.4340567612687813, |
|
"grad_norm": 67.45550897017867, |
|
"learning_rate": 1.987900569542438e-05, |
|
"loss": 1.054, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.4507512520868113, |
|
"grad_norm": 14.900523343688862, |
|
"learning_rate": 1.9846948790998532e-05, |
|
"loss": 1.047, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.4674457429048414, |
|
"grad_norm": 26.97945797218196, |
|
"learning_rate": 1.9811161269682776e-05, |
|
"loss": 1.0417, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.48414023372287146, |
|
"grad_norm": 31.768395936058706, |
|
"learning_rate": 1.9771656689944238e-05, |
|
"loss": 1.0622, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.5008347245409015, |
|
"grad_norm": 17.22340247377649, |
|
"learning_rate": 1.9728450018495506e-05, |
|
"loss": 1.0399, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.5175292153589316, |
|
"grad_norm": 20.785198792338, |
|
"learning_rate": 1.968155762462433e-05, |
|
"loss": 1.0335, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.5342237061769616, |
|
"grad_norm": 12.868219548980708, |
|
"learning_rate": 1.9630997273991964e-05, |
|
"loss": 1.0585, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.5509181969949917, |
|
"grad_norm": 16.52094280846651, |
|
"learning_rate": 1.9576788121902457e-05, |
|
"loss": 1.0563, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.5676126878130217, |
|
"grad_norm": 15.154512778467309, |
|
"learning_rate": 1.951895070604547e-05, |
|
"loss": 1.052, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.5843071786310517, |
|
"grad_norm": 15.109216310230805, |
|
"learning_rate": 1.9457506938715357e-05, |
|
"loss": 1.0353, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.6010016694490818, |
|
"grad_norm": 21.543252819256704, |
|
"learning_rate": 1.9392480098509488e-05, |
|
"loss": 1.0182, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.6176961602671118, |
|
"grad_norm": 15.449989929271629, |
|
"learning_rate": 1.93238948215089e-05, |
|
"loss": 1.0341, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.6343906510851419, |
|
"grad_norm": 23.23349123860765, |
|
"learning_rate": 1.9251777091944665e-05, |
|
"loss": 1.0261, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.6510851419031719, |
|
"grad_norm": 16.41219778505035, |
|
"learning_rate": 1.9176154232353513e-05, |
|
"loss": 1.0388, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.667779632721202, |
|
"grad_norm": 18.296566123425983, |
|
"learning_rate": 1.9097054893226395e-05, |
|
"loss": 1.0254, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.6844741235392321, |
|
"grad_norm": 36.60365240585806, |
|
"learning_rate": 1.9014509042153964e-05, |
|
"loss": 1.0338, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.7011686143572621, |
|
"grad_norm": 14.989102480397309, |
|
"learning_rate": 1.8928547952473037e-05, |
|
"loss": 1.052, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.7178631051752922, |
|
"grad_norm": 16.72561045227947, |
|
"learning_rate": 1.8839204191418386e-05, |
|
"loss": 1.0237, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.7345575959933222, |
|
"grad_norm": 31.90133374330817, |
|
"learning_rate": 1.8746511607784298e-05, |
|
"loss": 1.0203, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.7512520868113522, |
|
"grad_norm": 19.165418054306723, |
|
"learning_rate": 1.865050531910062e-05, |
|
"loss": 1.0213, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.7679465776293823, |
|
"grad_norm": 34.80379707869377, |
|
"learning_rate": 1.855122169832813e-05, |
|
"loss": 1.0056, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.7846410684474123, |
|
"grad_norm": 24.103854680859914, |
|
"learning_rate": 1.844869836007825e-05, |
|
"loss": 1.0089, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.8013355592654424, |
|
"grad_norm": 54.705273129166606, |
|
"learning_rate": 1.8342974146362397e-05, |
|
"loss": 0.9912, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.8180300500834724, |
|
"grad_norm": 15.340510497628932, |
|
"learning_rate": 1.8234089111876256e-05, |
|
"loss": 0.9708, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.8347245409015025, |
|
"grad_norm": 22.865867473032072, |
|
"learning_rate": 1.8122084508824692e-05, |
|
"loss": 0.9491, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.8514190317195326, |
|
"grad_norm": 28.125454120215036, |
|
"learning_rate": 1.80070027712929e-05, |
|
"loss": 0.9687, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.8681135225375626, |
|
"grad_norm": 48.320081372428405, |
|
"learning_rate": 1.7888887499169816e-05, |
|
"loss": 0.993, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.8848080133555927, |
|
"grad_norm": 17.216083695624388, |
|
"learning_rate": 1.7767783441629883e-05, |
|
"loss": 1.0175, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.9015025041736227, |
|
"grad_norm": 20.542693182690996, |
|
"learning_rate": 1.7643736480179353e-05, |
|
"loss": 0.9942, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.9181969949916527, |
|
"grad_norm": 12.90866880913044, |
|
"learning_rate": 1.7516793611273614e-05, |
|
"loss": 1.0009, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.9348914858096828, |
|
"grad_norm": 32.0014460896073, |
|
"learning_rate": 1.7387002928512093e-05, |
|
"loss": 0.9653, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.9515859766277128, |
|
"grad_norm": 17.082534238862834, |
|
"learning_rate": 1.725441360441752e-05, |
|
"loss": 0.9329, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.9682804674457429, |
|
"grad_norm": 18.406642188469615, |
|
"learning_rate": 1.711907587180642e-05, |
|
"loss": 0.9704, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.9849749582637729, |
|
"grad_norm": 25.006696471743908, |
|
"learning_rate": 1.698104100475788e-05, |
|
"loss": 0.9246, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.998330550918197, |
|
"eval_loss": 1.026776909828186, |
|
"eval_runtime": 21.5058, |
|
"eval_samples_per_second": 88.953, |
|
"eval_steps_per_second": 2.79, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 1.001669449081803, |
|
"grad_norm": 15.244399722195986, |
|
"learning_rate": 1.684036129918786e-05, |
|
"loss": 0.9362, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.018363939899833, |
|
"grad_norm": 5.806089115410544, |
|
"learning_rate": 1.6697090053036344e-05, |
|
"loss": 0.7907, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.0350584307178632, |
|
"grad_norm": 5.440155995126969, |
|
"learning_rate": 1.6551281546074863e-05, |
|
"loss": 0.8424, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.0517529215358932, |
|
"grad_norm": 7.168040489651867, |
|
"learning_rate": 1.6402991019342073e-05, |
|
"loss": 0.7992, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.0684474123539232, |
|
"grad_norm": 11.878722031675423, |
|
"learning_rate": 1.625227465421511e-05, |
|
"loss": 0.847, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.0851419031719534, |
|
"grad_norm": 7.545679524982675, |
|
"learning_rate": 1.60991895511247e-05, |
|
"loss": 0.8243, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.1018363939899833, |
|
"grad_norm": 13.016594535944764, |
|
"learning_rate": 1.5943793707922086e-05, |
|
"loss": 0.806, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.1185308848080133, |
|
"grad_norm": 7.3498644599599885, |
|
"learning_rate": 1.5786145997905952e-05, |
|
"loss": 0.8011, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.1352253756260433, |
|
"grad_norm": 9.105183728580222, |
|
"learning_rate": 1.5626306147517665e-05, |
|
"loss": 0.8177, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.1519198664440735, |
|
"grad_norm": 25.008986087097295, |
|
"learning_rate": 1.5464334713713312e-05, |
|
"loss": 0.8145, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.1686143572621035, |
|
"grad_norm": 12.377325631412589, |
|
"learning_rate": 1.5300293061021084e-05, |
|
"loss": 0.8642, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.1853088480801335, |
|
"grad_norm": 11.191522601640232, |
|
"learning_rate": 1.5134243338292686e-05, |
|
"loss": 0.8359, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.2020033388981637, |
|
"grad_norm": 17.12714029774076, |
|
"learning_rate": 1.4966248455157622e-05, |
|
"loss": 0.833, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.2186978297161937, |
|
"grad_norm": 6.348885873172548, |
|
"learning_rate": 1.4796372058189235e-05, |
|
"loss": 0.7872, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.2353923205342237, |
|
"grad_norm": 9.944628643661803, |
|
"learning_rate": 1.4624678506791556e-05, |
|
"loss": 0.8057, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.2520868113522536, |
|
"grad_norm": 18.07552847892337, |
|
"learning_rate": 1.445123284881609e-05, |
|
"loss": 0.7829, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.2687813021702838, |
|
"grad_norm": 30.335726590753946, |
|
"learning_rate": 1.4276100795917777e-05, |
|
"loss": 0.8013, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.2854757929883138, |
|
"grad_norm": 16.608038839902616, |
|
"learning_rate": 1.409934869865945e-05, |
|
"loss": 0.831, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.302170283806344, |
|
"grad_norm": 14.333469737425801, |
|
"learning_rate": 1.392104352137426e-05, |
|
"loss": 0.8015, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.318864774624374, |
|
"grad_norm": 13.451868406991572, |
|
"learning_rate": 1.3741252816795552e-05, |
|
"loss": 0.8014, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.335559265442404, |
|
"grad_norm": 5.9058948036424, |
|
"learning_rate": 1.3560044700463824e-05, |
|
"loss": 0.8, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.352253756260434, |
|
"grad_norm": 53.944440853433036, |
|
"learning_rate": 1.3377487824920459e-05, |
|
"loss": 0.8174, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.3689482470784642, |
|
"grad_norm": 18.071190540649788, |
|
"learning_rate": 1.3193651353698012e-05, |
|
"loss": 0.8252, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.3856427378964942, |
|
"grad_norm": 35.62084029605276, |
|
"learning_rate": 1.30086049351169e-05, |
|
"loss": 0.8221, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.4023372287145242, |
|
"grad_norm": 19.99178320201263, |
|
"learning_rate": 1.2822418675898428e-05, |
|
"loss": 0.806, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.4190317195325544, |
|
"grad_norm": 27.2927325619504, |
|
"learning_rate": 1.2635163114604131e-05, |
|
"loss": 0.7997, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.4357262103505843, |
|
"grad_norm": 28.98127275070747, |
|
"learning_rate": 1.2446909194911552e-05, |
|
"loss": 0.8117, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.4524207011686143, |
|
"grad_norm": 12.946012794793734, |
|
"learning_rate": 1.2257728238736468e-05, |
|
"loss": 0.7836, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.4691151919866443, |
|
"grad_norm": 11.264077156777665, |
|
"learning_rate": 1.2067691919211879e-05, |
|
"loss": 0.7604, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.4858096828046745, |
|
"grad_norm": 21.934680069997075, |
|
"learning_rate": 1.1876872233533909e-05, |
|
"loss": 0.7897, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.5025041736227045, |
|
"grad_norm": 15.129234971208845, |
|
"learning_rate": 1.1685341475684935e-05, |
|
"loss": 0.7706, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.5191986644407347, |
|
"grad_norm": 18.77833994006315, |
|
"learning_rate": 1.1493172209044259e-05, |
|
"loss": 0.7391, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.5358931552587647, |
|
"grad_norm": 34.24893404758549, |
|
"learning_rate": 1.1300437238896758e-05, |
|
"loss": 0.7497, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.5525876460767947, |
|
"grad_norm": 20.076138032953008, |
|
"learning_rate": 1.1107209584849845e-05, |
|
"loss": 0.7466, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.5692821368948247, |
|
"grad_norm": 16.836099480285057, |
|
"learning_rate": 1.0913562453169241e-05, |
|
"loss": 0.7659, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.5859766277128546, |
|
"grad_norm": 22.86729702065199, |
|
"learning_rate": 1.0719569209044047e-05, |
|
"loss": 0.734, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.6026711185308848, |
|
"grad_norm": 9.979667643277459, |
|
"learning_rate": 1.0525303348791599e-05, |
|
"loss": 0.7673, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.6193656093489148, |
|
"grad_norm": 11.926547743437098, |
|
"learning_rate": 1.0330838472012617e-05, |
|
"loss": 0.7677, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.636060100166945, |
|
"grad_norm": 20.884548842874665, |
|
"learning_rate": 1.0136248253707267e-05, |
|
"loss": 0.7589, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.652754590984975, |
|
"grad_norm": 20.320143837413003, |
|
"learning_rate": 9.94160641636263e-06, |
|
"loss": 0.7317, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.669449081803005, |
|
"grad_norm": 9.784131496022562, |
|
"learning_rate": 9.74698670202218e-06, |
|
"loss": 0.7471, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.686143572621035, |
|
"grad_norm": 20.927503924872337, |
|
"learning_rate": 9.552462844347883e-06, |
|
"loss": 0.7476, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.702838063439065, |
|
"grad_norm": 16.01855549874665, |
|
"learning_rate": 9.358108540685406e-06, |
|
"loss": 0.7219, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.7195325542570952, |
|
"grad_norm": 7.286899337512383, |
|
"learning_rate": 9.163997424143167e-06, |
|
"loss": 0.7291, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.7362270450751254, |
|
"grad_norm": 26.516549504841837, |
|
"learning_rate": 8.970203035695662e-06, |
|
"loss": 0.7392, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.7529215358931554, |
|
"grad_norm": 17.76687131779593, |
|
"learning_rate": 8.776798796321715e-06, |
|
"loss": 0.7315, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.7696160267111853, |
|
"grad_norm": 11.57930379098666, |
|
"learning_rate": 8.583857979188203e-06, |
|
"loss": 0.7289, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.7863105175292153, |
|
"grad_norm": 17.537227925038266, |
|
"learning_rate": 8.391453681889772e-06, |
|
"loss": 0.7469, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.8030050083472453, |
|
"grad_norm": 10.621593546803865, |
|
"learning_rate": 8.199658798755048e-06, |
|
"loss": 0.777, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.8196994991652755, |
|
"grad_norm": 9.251016723609432, |
|
"learning_rate": 8.008545993229897e-06, |
|
"loss": 0.7286, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.8363939899833055, |
|
"grad_norm": 15.490160829475185, |
|
"learning_rate": 7.818187670348133e-06, |
|
"loss": 0.7222, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.8530884808013357, |
|
"grad_norm": 6.9953239485991325, |
|
"learning_rate": 7.628655949300133e-06, |
|
"loss": 0.7537, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.8697829716193657, |
|
"grad_norm": 3.102208689939814, |
|
"learning_rate": 7.440022636109742e-06, |
|
"loss": 0.7422, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.8864774624373957, |
|
"grad_norm": 5.758208330036847, |
|
"learning_rate": 7.2523591964298345e-06, |
|
"loss": 0.7179, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.9031719532554257, |
|
"grad_norm": 3.812195776537815, |
|
"learning_rate": 7.065736728466832e-06, |
|
"loss": 0.7451, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.9198664440734556, |
|
"grad_norm": 6.263849267436109, |
|
"learning_rate": 6.880225936044402e-06, |
|
"loss": 0.7557, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.9365609348914858, |
|
"grad_norm": 6.282114744972934, |
|
"learning_rate": 6.695897101816606e-06, |
|
"loss": 0.7483, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.9532554257095158, |
|
"grad_norm": 11.855358236520228, |
|
"learning_rate": 6.512820060640608e-06, |
|
"loss": 0.776, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.969949916527546, |
|
"grad_norm": 6.706340476505237, |
|
"learning_rate": 6.331064173119008e-06, |
|
"loss": 0.7526, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.986644407345576, |
|
"grad_norm": 2.315599430989982, |
|
"learning_rate": 6.150698299321889e-06, |
|
"loss": 0.7512, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 1.042004108428955, |
|
"eval_runtime": 21.4365, |
|
"eval_samples_per_second": 89.24, |
|
"eval_steps_per_second": 2.799, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 2.003338898163606, |
|
"grad_norm": 7.306932166126154, |
|
"learning_rate": 5.971790772698467e-06, |
|
"loss": 0.6991, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.020033388981636, |
|
"grad_norm": 4.3017516619955645, |
|
"learning_rate": 5.794409374188272e-06, |
|
"loss": 0.496, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 2.036727879799666, |
|
"grad_norm": 3.6156424881801286, |
|
"learning_rate": 5.61862130654165e-06, |
|
"loss": 0.4997, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.053422370617696, |
|
"grad_norm": 2.694317641326709, |
|
"learning_rate": 5.444493168859304e-06, |
|
"loss": 0.4718, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 2.0701168614357264, |
|
"grad_norm": 2.613652033920123, |
|
"learning_rate": 5.272090931360564e-06, |
|
"loss": 0.4831, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.0868113522537564, |
|
"grad_norm": 2.813633749303979, |
|
"learning_rate": 5.101479910389888e-06, |
|
"loss": 0.5005, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 2.1035058430717863, |
|
"grad_norm": 2.5181028219855235, |
|
"learning_rate": 4.932724743671089e-06, |
|
"loss": 0.5251, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.1202003338898163, |
|
"grad_norm": 2.4641627321242785, |
|
"learning_rate": 4.765889365818708e-06, |
|
"loss": 0.4994, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 2.1368948247078463, |
|
"grad_norm": 2.2239295194273097, |
|
"learning_rate": 4.601036984115684e-06, |
|
"loss": 0.51, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.1535893155258763, |
|
"grad_norm": 4.030891483235892, |
|
"learning_rate": 4.438230054566678e-06, |
|
"loss": 0.4823, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 2.1702838063439067, |
|
"grad_norm": 3.4317624941352762, |
|
"learning_rate": 4.277530258235955e-06, |
|
"loss": 0.5121, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.1869782971619367, |
|
"grad_norm": 3.4339442550558377, |
|
"learning_rate": 4.118998477878879e-06, |
|
"loss": 0.5038, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 2.2036727879799667, |
|
"grad_norm": 3.7518155349947526, |
|
"learning_rate": 3.96269477487588e-06, |
|
"loss": 0.5255, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.2203672787979967, |
|
"grad_norm": 3.208806245499332, |
|
"learning_rate": 3.8086783664775827e-06, |
|
"loss": 0.4813, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 2.2370617696160267, |
|
"grad_norm": 2.2156022295938635, |
|
"learning_rate": 3.657007603369728e-06, |
|
"loss": 0.4832, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.2537562604340566, |
|
"grad_norm": 3.0012443950273804, |
|
"learning_rate": 3.5077399475664474e-06, |
|
"loss": 0.4841, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 2.2704507512520866, |
|
"grad_norm": 3.2803718521147323, |
|
"learning_rate": 3.360931950640185e-06, |
|
"loss": 0.5116, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.287145242070117, |
|
"grad_norm": 2.0114178970483985, |
|
"learning_rate": 3.2166392322965423e-06, |
|
"loss": 0.493, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 2.303839732888147, |
|
"grad_norm": 2.409104814481249, |
|
"learning_rate": 3.074916459302211e-06, |
|
"loss": 0.4808, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.320534223706177, |
|
"grad_norm": 1.8952913155646967, |
|
"learning_rate": 2.935817324773893e-06, |
|
"loss": 0.485, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 2.337228714524207, |
|
"grad_norm": 2.091074617432718, |
|
"learning_rate": 2.799394527836129e-06, |
|
"loss": 0.482, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.353923205342237, |
|
"grad_norm": 2.286915096478497, |
|
"learning_rate": 2.665699753655684e-06, |
|
"loss": 0.4816, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 2.370617696160267, |
|
"grad_norm": 2.9960367822706475, |
|
"learning_rate": 2.5347836538601113e-06, |
|
"loss": 0.4669, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.3873121869782974, |
|
"grad_norm": 4.617799542764574, |
|
"learning_rate": 2.406695827347848e-06, |
|
"loss": 0.4802, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 2.4040066777963274, |
|
"grad_norm": 2.6788734186896077, |
|
"learning_rate": 2.281484801497186e-06, |
|
"loss": 0.4777, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.4207011686143574, |
|
"grad_norm": 2.217425451473198, |
|
"learning_rate": 2.1591980137811684e-06, |
|
"loss": 0.4762, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 2.4373956594323873, |
|
"grad_norm": 2.5079137151103863, |
|
"learning_rate": 2.0398817937954275e-06, |
|
"loss": 0.4791, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.4540901502504173, |
|
"grad_norm": 1.832302823771755, |
|
"learning_rate": 1.923581345705736e-06, |
|
"loss": 0.4604, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 2.4707846410684473, |
|
"grad_norm": 3.3464829987759375, |
|
"learning_rate": 1.8103407311219523e-06, |
|
"loss": 0.4773, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.4874791318864773, |
|
"grad_norm": 3.1999456095425995, |
|
"learning_rate": 1.7002028524048354e-06, |
|
"loss": 0.4628, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 2.5041736227045073, |
|
"grad_norm": 2.4738950163595024, |
|
"learning_rate": 1.5932094364120453e-06, |
|
"loss": 0.4759, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.5208681135225377, |
|
"grad_norm": 5.463944004312446, |
|
"learning_rate": 1.489401018689488e-06, |
|
"loss": 0.4692, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 2.5375626043405677, |
|
"grad_norm": 3.785050339076787, |
|
"learning_rate": 1.3888169281140284e-06, |
|
"loss": 0.4605, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.5542570951585977, |
|
"grad_norm": 2.672219415465402, |
|
"learning_rate": 1.291495271993337e-06, |
|
"loss": 0.4636, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 2.5709515859766277, |
|
"grad_norm": 1.815658692342384, |
|
"learning_rate": 1.1974729216285386e-06, |
|
"loss": 0.4639, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.5876460767946576, |
|
"grad_norm": 2.721770682379103, |
|
"learning_rate": 1.1067854983451575e-06, |
|
"loss": 0.4564, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 2.604340567612688, |
|
"grad_norm": 2.722974978900685, |
|
"learning_rate": 1.0194673599976134e-06, |
|
"loss": 0.4504, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.621035058430718, |
|
"grad_norm": 2.5630900969446837, |
|
"learning_rate": 9.355515879523858e-07, |
|
"loss": 0.4536, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 2.637729549248748, |
|
"grad_norm": 2.1055399141098032, |
|
"learning_rate": 8.550699745548196e-07, |
|
"loss": 0.4639, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.654424040066778, |
|
"grad_norm": 1.7279709418461875, |
|
"learning_rate": 7.780530110842566e-07, |
|
"loss": 0.4796, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 2.671118530884808, |
|
"grad_norm": 1.7404953519601456, |
|
"learning_rate": 7.045298762021125e-07, |
|
"loss": 0.4459, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.687813021702838, |
|
"grad_norm": 1.9897437836650076, |
|
"learning_rate": 6.345284248972383e-07, |
|
"loss": 0.4573, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 2.704507512520868, |
|
"grad_norm": 1.7374239194544907, |
|
"learning_rate": 5.680751779327742e-07, |
|
"loss": 0.4481, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.721202003338898, |
|
"grad_norm": 1.9063993408126028, |
|
"learning_rate": 5.05195311798491e-07, |
|
"loss": 0.4476, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 2.7378964941569284, |
|
"grad_norm": 2.134180540126151, |
|
"learning_rate": 4.4591264917242195e-07, |
|
"loss": 0.4463, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.7545909849749584, |
|
"grad_norm": 1.70679451789591, |
|
"learning_rate": 3.9024964989539227e-07, |
|
"loss": 0.4731, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 2.7712854757929883, |
|
"grad_norm": 1.8624687631277015, |
|
"learning_rate": 3.3822740246188477e-07, |
|
"loss": 0.441, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.7879799666110183, |
|
"grad_norm": 1.7487434265005612, |
|
"learning_rate": 2.8986561603044694e-07, |
|
"loss": 0.4634, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 2.8046744574290483, |
|
"grad_norm": 1.7206438184487136, |
|
"learning_rate": 2.4518261295667255e-07, |
|
"loss": 0.4544, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.8213689482470787, |
|
"grad_norm": 1.6616178181435346, |
|
"learning_rate": 2.0419532185159796e-07, |
|
"loss": 0.4605, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 2.8380634390651087, |
|
"grad_norm": 2.151120696939262, |
|
"learning_rate": 1.6691927116812002e-07, |
|
"loss": 0.446, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.8547579298831387, |
|
"grad_norm": 1.7230509982028626, |
|
"learning_rate": 1.3336858331787993e-07, |
|
"loss": 0.4519, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 2.8714524207011687, |
|
"grad_norm": 1.7412338171439155, |
|
"learning_rate": 1.0355596932085432e-07, |
|
"loss": 0.459, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.8881469115191987, |
|
"grad_norm": 2.021167741282332, |
|
"learning_rate": 7.749272398964613e-08, |
|
"loss": 0.4382, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 2.9048414023372287, |
|
"grad_norm": 1.7263112044181872, |
|
"learning_rate": 5.518872165033329e-08, |
|
"loss": 0.4381, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.9215358931552586, |
|
"grad_norm": 1.8622740089312937, |
|
"learning_rate": 3.6652412401478875e-08, |
|
"loss": 0.4348, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 2.9382303839732886, |
|
"grad_norm": 1.6946497921936468, |
|
"learning_rate": 2.1890818912728706e-08, |
|
"loss": 0.4498, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.9549248747913186, |
|
"grad_norm": 1.6754781633135705, |
|
"learning_rate": 1.0909533764194013e-08, |
|
"loss": 0.4424, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 2.971619365609349, |
|
"grad_norm": 1.7509693982942596, |
|
"learning_rate": 3.7127173276563234e-09, |
|
"loss": 0.4396, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.988313856427379, |
|
"grad_norm": 1.688514503466763, |
|
"learning_rate": 3.0309619035495675e-10, |
|
"loss": 0.4573, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 2.994991652754591, |
|
"eval_loss": 1.0773704051971436, |
|
"eval_runtime": 21.4317, |
|
"eval_samples_per_second": 89.26, |
|
"eval_steps_per_second": 2.8, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 2.994991652754591, |
|
"step": 897, |
|
"total_flos": 246978202042368.0, |
|
"train_loss": 0.8908544659880359, |
|
"train_runtime": 5202.4388, |
|
"train_samples_per_second": 22.101, |
|
"train_steps_per_second": 0.172 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 897, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": false, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 246978202042368.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|