| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 3.0, |
| "eval_steps": 500, |
| "global_step": 51504, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.029123951537744643, |
| "grad_norm": 8.23077392578125, |
| "learning_rate": 4.951557160608885e-05, |
| "loss": 0.8096, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.058247903075489285, |
| "grad_norm": 7.531521797180176, |
| "learning_rate": 4.9030172413793105e-05, |
| "loss": 0.5976, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.08737185461323392, |
| "grad_norm": 7.849789142608643, |
| "learning_rate": 4.854477322149736e-05, |
| "loss": 0.5605, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.11649580615097857, |
| "grad_norm": 10.54165267944336, |
| "learning_rate": 4.805937402920162e-05, |
| "loss": 0.5217, |
| "step": 2000 |
| }, |
| { |
| "epoch": 0.1456197576887232, |
| "grad_norm": 5.914309024810791, |
| "learning_rate": 4.757397483690587e-05, |
| "loss": 0.5199, |
| "step": 2500 |
| }, |
| { |
| "epoch": 0.17474370922646784, |
| "grad_norm": 4.58743143081665, |
| "learning_rate": 4.7088575644610125e-05, |
| "loss": 0.5066, |
| "step": 3000 |
| }, |
| { |
| "epoch": 0.20386766076421248, |
| "grad_norm": 7.739262104034424, |
| "learning_rate": 4.660317645231438e-05, |
| "loss": 0.4885, |
| "step": 3500 |
| }, |
| { |
| "epoch": 0.23299161230195714, |
| "grad_norm": 7.6342034339904785, |
| "learning_rate": 4.611777726001864e-05, |
| "loss": 0.4702, |
| "step": 4000 |
| }, |
| { |
| "epoch": 0.26211556383970175, |
| "grad_norm": 4.743926525115967, |
| "learning_rate": 4.56323780677229e-05, |
| "loss": 0.4668, |
| "step": 4500 |
| }, |
| { |
| "epoch": 0.2912395153774464, |
| "grad_norm": 6.3329596519470215, |
| "learning_rate": 4.514697887542715e-05, |
| "loss": 0.4621, |
| "step": 5000 |
| }, |
| { |
| "epoch": 0.3203634669151911, |
| "grad_norm": 6.19777774810791, |
| "learning_rate": 4.466157968313141e-05, |
| "loss": 0.4548, |
| "step": 5500 |
| }, |
| { |
| "epoch": 0.3494874184529357, |
| "grad_norm": 5.018973350524902, |
| "learning_rate": 4.417618049083567e-05, |
| "loss": 0.4546, |
| "step": 6000 |
| }, |
| { |
| "epoch": 0.37861136999068035, |
| "grad_norm": 8.148200035095215, |
| "learning_rate": 4.369078129853993e-05, |
| "loss": 0.4488, |
| "step": 6500 |
| }, |
| { |
| "epoch": 0.40773532152842495, |
| "grad_norm": 5.203968524932861, |
| "learning_rate": 4.320538210624418e-05, |
| "loss": 0.4359, |
| "step": 7000 |
| }, |
| { |
| "epoch": 0.4368592730661696, |
| "grad_norm": 5.1505231857299805, |
| "learning_rate": 4.271998291394843e-05, |
| "loss": 0.4356, |
| "step": 7500 |
| }, |
| { |
| "epoch": 0.4659832246039143, |
| "grad_norm": 7.869897365570068, |
| "learning_rate": 4.223458372165269e-05, |
| "loss": 0.4297, |
| "step": 8000 |
| }, |
| { |
| "epoch": 0.4951071761416589, |
| "grad_norm": 5.874831676483154, |
| "learning_rate": 4.1749184529356947e-05, |
| "loss": 0.4227, |
| "step": 8500 |
| }, |
| { |
| "epoch": 0.5242311276794035, |
| "grad_norm": 3.657247543334961, |
| "learning_rate": 4.12637853370612e-05, |
| "loss": 0.4304, |
| "step": 9000 |
| }, |
| { |
| "epoch": 0.5533550792171482, |
| "grad_norm": 5.709639072418213, |
| "learning_rate": 4.0778386144765457e-05, |
| "loss": 0.4307, |
| "step": 9500 |
| }, |
| { |
| "epoch": 0.5824790307548928, |
| "grad_norm": 7.34050178527832, |
| "learning_rate": 4.0292986952469715e-05, |
| "loss": 0.4188, |
| "step": 10000 |
| }, |
| { |
| "epoch": 0.6116029822926374, |
| "grad_norm": 4.709152698516846, |
| "learning_rate": 3.980758776017397e-05, |
| "loss": 0.4218, |
| "step": 10500 |
| }, |
| { |
| "epoch": 0.6407269338303822, |
| "grad_norm": 4.877218246459961, |
| "learning_rate": 3.9322188567878225e-05, |
| "loss": 0.4139, |
| "step": 11000 |
| }, |
| { |
| "epoch": 0.6698508853681268, |
| "grad_norm": 5.153838634490967, |
| "learning_rate": 3.883678937558248e-05, |
| "loss": 0.4214, |
| "step": 11500 |
| }, |
| { |
| "epoch": 0.6989748369058714, |
| "grad_norm": 6.150764465332031, |
| "learning_rate": 3.835139018328674e-05, |
| "loss": 0.4042, |
| "step": 12000 |
| }, |
| { |
| "epoch": 0.7280987884436161, |
| "grad_norm": 6.341733932495117, |
| "learning_rate": 3.786599099099099e-05, |
| "loss": 0.4063, |
| "step": 12500 |
| }, |
| { |
| "epoch": 0.7572227399813607, |
| "grad_norm": 4.536073684692383, |
| "learning_rate": 3.7380591798695245e-05, |
| "loss": 0.4016, |
| "step": 13000 |
| }, |
| { |
| "epoch": 0.7863466915191053, |
| "grad_norm": 3.744781255722046, |
| "learning_rate": 3.68951926063995e-05, |
| "loss": 0.4055, |
| "step": 13500 |
| }, |
| { |
| "epoch": 0.8154706430568499, |
| "grad_norm": 6.1853132247924805, |
| "learning_rate": 3.640979341410376e-05, |
| "loss": 0.3963, |
| "step": 14000 |
| }, |
| { |
| "epoch": 0.8445945945945946, |
| "grad_norm": 7.477056503295898, |
| "learning_rate": 3.592439422180802e-05, |
| "loss": 0.4004, |
| "step": 14500 |
| }, |
| { |
| "epoch": 0.8737185461323392, |
| "grad_norm": 5.708552837371826, |
| "learning_rate": 3.543899502951227e-05, |
| "loss": 0.3987, |
| "step": 15000 |
| }, |
| { |
| "epoch": 0.9028424976700838, |
| "grad_norm": 4.550114154815674, |
| "learning_rate": 3.495359583721653e-05, |
| "loss": 0.4115, |
| "step": 15500 |
| }, |
| { |
| "epoch": 0.9319664492078286, |
| "grad_norm": 5.300821781158447, |
| "learning_rate": 3.446819664492079e-05, |
| "loss": 0.3934, |
| "step": 16000 |
| }, |
| { |
| "epoch": 0.9610904007455732, |
| "grad_norm": 4.5532402992248535, |
| "learning_rate": 3.398279745262504e-05, |
| "loss": 0.3985, |
| "step": 16500 |
| }, |
| { |
| "epoch": 0.9902143522833178, |
| "grad_norm": 6.533886909484863, |
| "learning_rate": 3.34973982603293e-05, |
| "loss": 0.3987, |
| "step": 17000 |
| }, |
| { |
| "epoch": 1.0193383038210624, |
| "grad_norm": 8.014822959899902, |
| "learning_rate": 3.301199906803355e-05, |
| "loss": 0.365, |
| "step": 17500 |
| }, |
| { |
| "epoch": 1.048462255358807, |
| "grad_norm": 6.704738140106201, |
| "learning_rate": 3.252659987573781e-05, |
| "loss": 0.3469, |
| "step": 18000 |
| }, |
| { |
| "epoch": 1.0775862068965518, |
| "grad_norm": 4.034560680389404, |
| "learning_rate": 3.204120068344206e-05, |
| "loss": 0.3492, |
| "step": 18500 |
| }, |
| { |
| "epoch": 1.1067101584342964, |
| "grad_norm": 4.0876665115356445, |
| "learning_rate": 3.155580149114632e-05, |
| "loss": 0.3514, |
| "step": 19000 |
| }, |
| { |
| "epoch": 1.135834109972041, |
| "grad_norm": 5.907679557800293, |
| "learning_rate": 3.1070402298850576e-05, |
| "loss": 0.3524, |
| "step": 19500 |
| }, |
| { |
| "epoch": 1.1649580615097856, |
| "grad_norm": 4.2778520584106445, |
| "learning_rate": 3.0585003106554835e-05, |
| "loss": 0.3572, |
| "step": 20000 |
| }, |
| { |
| "epoch": 1.1940820130475303, |
| "grad_norm": 7.92435884475708, |
| "learning_rate": 3.009960391425909e-05, |
| "loss": 0.3504, |
| "step": 20500 |
| }, |
| { |
| "epoch": 1.2232059645852749, |
| "grad_norm": 4.7413740158081055, |
| "learning_rate": 2.9614204721963345e-05, |
| "loss": 0.3529, |
| "step": 21000 |
| }, |
| { |
| "epoch": 1.2523299161230197, |
| "grad_norm": 5.323243141174316, |
| "learning_rate": 2.9128805529667603e-05, |
| "loss": 0.3496, |
| "step": 21500 |
| }, |
| { |
| "epoch": 1.281453867660764, |
| "grad_norm": 6.30069637298584, |
| "learning_rate": 2.8643406337371858e-05, |
| "loss": 0.3496, |
| "step": 22000 |
| }, |
| { |
| "epoch": 1.310577819198509, |
| "grad_norm": 6.415484428405762, |
| "learning_rate": 2.815800714507611e-05, |
| "loss": 0.3497, |
| "step": 22500 |
| }, |
| { |
| "epoch": 1.3397017707362535, |
| "grad_norm": 7.066019058227539, |
| "learning_rate": 2.7672607952780365e-05, |
| "loss": 0.3469, |
| "step": 23000 |
| }, |
| { |
| "epoch": 1.3688257222739981, |
| "grad_norm": 6.447554111480713, |
| "learning_rate": 2.7187208760484623e-05, |
| "loss": 0.3519, |
| "step": 23500 |
| }, |
| { |
| "epoch": 1.3979496738117427, |
| "grad_norm": 6.272543430328369, |
| "learning_rate": 2.6701809568188878e-05, |
| "loss": 0.3416, |
| "step": 24000 |
| }, |
| { |
| "epoch": 1.4270736253494873, |
| "grad_norm": 4.0889668464660645, |
| "learning_rate": 2.6216410375893136e-05, |
| "loss": 0.3432, |
| "step": 24500 |
| }, |
| { |
| "epoch": 1.4561975768872322, |
| "grad_norm": 4.522316932678223, |
| "learning_rate": 2.573101118359739e-05, |
| "loss": 0.3457, |
| "step": 25000 |
| }, |
| { |
| "epoch": 1.4853215284249768, |
| "grad_norm": 6.167509078979492, |
| "learning_rate": 2.524561199130165e-05, |
| "loss": 0.3486, |
| "step": 25500 |
| }, |
| { |
| "epoch": 1.5144454799627214, |
| "grad_norm": 4.351974964141846, |
| "learning_rate": 2.47602127990059e-05, |
| "loss": 0.3534, |
| "step": 26000 |
| }, |
| { |
| "epoch": 1.543569431500466, |
| "grad_norm": 6.637437343597412, |
| "learning_rate": 2.427481360671016e-05, |
| "loss": 0.3404, |
| "step": 26500 |
| }, |
| { |
| "epoch": 1.5726933830382106, |
| "grad_norm": 8.26878833770752, |
| "learning_rate": 2.3789414414414415e-05, |
| "loss": 0.3406, |
| "step": 27000 |
| }, |
| { |
| "epoch": 1.6018173345759554, |
| "grad_norm": 3.1995205879211426, |
| "learning_rate": 2.3304015222118673e-05, |
| "loss": 0.3494, |
| "step": 27500 |
| }, |
| { |
| "epoch": 1.6309412861136998, |
| "grad_norm": 8.005794525146484, |
| "learning_rate": 2.2818616029822928e-05, |
| "loss": 0.3471, |
| "step": 28000 |
| }, |
| { |
| "epoch": 1.6600652376514446, |
| "grad_norm": 3.5494773387908936, |
| "learning_rate": 2.2333216837527183e-05, |
| "loss": 0.3422, |
| "step": 28500 |
| }, |
| { |
| "epoch": 1.689189189189189, |
| "grad_norm": 6.0129780769348145, |
| "learning_rate": 2.1847817645231438e-05, |
| "loss": 0.352, |
| "step": 29000 |
| }, |
| { |
| "epoch": 1.7183131407269339, |
| "grad_norm": 5.822954177856445, |
| "learning_rate": 2.1362418452935696e-05, |
| "loss": 0.3439, |
| "step": 29500 |
| }, |
| { |
| "epoch": 1.7474370922646785, |
| "grad_norm": 6.08833646774292, |
| "learning_rate": 2.087701926063995e-05, |
| "loss": 0.3458, |
| "step": 30000 |
| }, |
| { |
| "epoch": 1.776561043802423, |
| "grad_norm": 4.251603603363037, |
| "learning_rate": 2.039162006834421e-05, |
| "loss": 0.3445, |
| "step": 30500 |
| }, |
| { |
| "epoch": 1.805684995340168, |
| "grad_norm": 3.8352158069610596, |
| "learning_rate": 1.990622087604846e-05, |
| "loss": 0.3367, |
| "step": 31000 |
| }, |
| { |
| "epoch": 1.8348089468779123, |
| "grad_norm": 3.8754615783691406, |
| "learning_rate": 1.942082168375272e-05, |
| "loss": 0.3318, |
| "step": 31500 |
| }, |
| { |
| "epoch": 1.8639328984156571, |
| "grad_norm": 6.027359485626221, |
| "learning_rate": 1.8935422491456975e-05, |
| "loss": 0.3392, |
| "step": 32000 |
| }, |
| { |
| "epoch": 1.8930568499534017, |
| "grad_norm": 3.559069871902466, |
| "learning_rate": 1.8450023299161233e-05, |
| "loss": 0.3364, |
| "step": 32500 |
| }, |
| { |
| "epoch": 1.9221808014911463, |
| "grad_norm": 4.697865962982178, |
| "learning_rate": 1.7964624106865488e-05, |
| "loss": 0.3403, |
| "step": 33000 |
| }, |
| { |
| "epoch": 1.951304753028891, |
| "grad_norm": 9.035033226013184, |
| "learning_rate": 1.7479224914569743e-05, |
| "loss": 0.3414, |
| "step": 33500 |
| }, |
| { |
| "epoch": 1.9804287045666356, |
| "grad_norm": 6.350938320159912, |
| "learning_rate": 1.6993825722273998e-05, |
| "loss": 0.3359, |
| "step": 34000 |
| }, |
| { |
| "epoch": 2.0095526561043804, |
| "grad_norm": 3.888198137283325, |
| "learning_rate": 1.6508426529978256e-05, |
| "loss": 0.3263, |
| "step": 34500 |
| }, |
| { |
| "epoch": 2.0386766076421248, |
| "grad_norm": 11.91583251953125, |
| "learning_rate": 1.602302733768251e-05, |
| "loss": 0.2927, |
| "step": 35000 |
| }, |
| { |
| "epoch": 2.0678005591798696, |
| "grad_norm": 5.267765045166016, |
| "learning_rate": 1.5537628145386766e-05, |
| "loss": 0.3013, |
| "step": 35500 |
| }, |
| { |
| "epoch": 2.096924510717614, |
| "grad_norm": 5.365530967712402, |
| "learning_rate": 1.5052228953091021e-05, |
| "loss": 0.299, |
| "step": 36000 |
| }, |
| { |
| "epoch": 2.126048462255359, |
| "grad_norm": 3.0507187843322754, |
| "learning_rate": 1.4566829760795278e-05, |
| "loss": 0.3068, |
| "step": 36500 |
| }, |
| { |
| "epoch": 2.1551724137931036, |
| "grad_norm": 3.4967715740203857, |
| "learning_rate": 1.4081430568499535e-05, |
| "loss": 0.3012, |
| "step": 37000 |
| }, |
| { |
| "epoch": 2.184296365330848, |
| "grad_norm": 7.34511137008667, |
| "learning_rate": 1.3596031376203791e-05, |
| "loss": 0.2981, |
| "step": 37500 |
| }, |
| { |
| "epoch": 2.213420316868593, |
| "grad_norm": 4.908951759338379, |
| "learning_rate": 1.3110632183908048e-05, |
| "loss": 0.3, |
| "step": 38000 |
| }, |
| { |
| "epoch": 2.2425442684063372, |
| "grad_norm": 4.048547744750977, |
| "learning_rate": 1.2625232991612301e-05, |
| "loss": 0.2918, |
| "step": 38500 |
| }, |
| { |
| "epoch": 2.271668219944082, |
| "grad_norm": 6.06612491607666, |
| "learning_rate": 1.2139833799316558e-05, |
| "loss": 0.2901, |
| "step": 39000 |
| }, |
| { |
| "epoch": 2.3007921714818265, |
| "grad_norm": 3.469628095626831, |
| "learning_rate": 1.1654434607020815e-05, |
| "loss": 0.304, |
| "step": 39500 |
| }, |
| { |
| "epoch": 2.3299161230195713, |
| "grad_norm": 8.2776460647583, |
| "learning_rate": 1.1169035414725071e-05, |
| "loss": 0.2986, |
| "step": 40000 |
| }, |
| { |
| "epoch": 2.359040074557316, |
| "grad_norm": 6.029278755187988, |
| "learning_rate": 1.0683636222429326e-05, |
| "loss": 0.2981, |
| "step": 40500 |
| }, |
| { |
| "epoch": 2.3881640260950605, |
| "grad_norm": 5.872582912445068, |
| "learning_rate": 1.0198237030133583e-05, |
| "loss": 0.298, |
| "step": 41000 |
| }, |
| { |
| "epoch": 2.4172879776328053, |
| "grad_norm": 3.5587821006774902, |
| "learning_rate": 9.712837837837838e-06, |
| "loss": 0.2836, |
| "step": 41500 |
| }, |
| { |
| "epoch": 2.4464119291705497, |
| "grad_norm": 3.2673676013946533, |
| "learning_rate": 9.227438645542095e-06, |
| "loss": 0.293, |
| "step": 42000 |
| }, |
| { |
| "epoch": 2.4755358807082946, |
| "grad_norm": 2.4192936420440674, |
| "learning_rate": 8.742039453246351e-06, |
| "loss": 0.3051, |
| "step": 42500 |
| }, |
| { |
| "epoch": 2.5046598322460394, |
| "grad_norm": 2.684082269668579, |
| "learning_rate": 8.256640260950606e-06, |
| "loss": 0.2865, |
| "step": 43000 |
| }, |
| { |
| "epoch": 2.5337837837837838, |
| "grad_norm": 7.754036903381348, |
| "learning_rate": 7.771241068654863e-06, |
| "loss": 0.2844, |
| "step": 43500 |
| }, |
| { |
| "epoch": 2.562907735321528, |
| "grad_norm": 7.571073532104492, |
| "learning_rate": 7.285841876359117e-06, |
| "loss": 0.2867, |
| "step": 44000 |
| }, |
| { |
| "epoch": 2.592031686859273, |
| "grad_norm": 4.301662921905518, |
| "learning_rate": 6.800442684063374e-06, |
| "loss": 0.2909, |
| "step": 44500 |
| }, |
| { |
| "epoch": 2.621155638397018, |
| "grad_norm": 9.151103019714355, |
| "learning_rate": 6.31504349176763e-06, |
| "loss": 0.2953, |
| "step": 45000 |
| }, |
| { |
| "epoch": 2.650279589934762, |
| "grad_norm": 2.850679636001587, |
| "learning_rate": 5.829644299471886e-06, |
| "loss": 0.2953, |
| "step": 45500 |
| }, |
| { |
| "epoch": 2.679403541472507, |
| "grad_norm": 6.948458194732666, |
| "learning_rate": 5.344245107176142e-06, |
| "loss": 0.2959, |
| "step": 46000 |
| }, |
| { |
| "epoch": 2.7085274930102514, |
| "grad_norm": 2.6653831005096436, |
| "learning_rate": 4.858845914880398e-06, |
| "loss": 0.303, |
| "step": 46500 |
| }, |
| { |
| "epoch": 2.7376514445479962, |
| "grad_norm": 6.758817195892334, |
| "learning_rate": 4.373446722584654e-06, |
| "loss": 0.2956, |
| "step": 47000 |
| }, |
| { |
| "epoch": 2.766775396085741, |
| "grad_norm": 4.432290077209473, |
| "learning_rate": 3.8880475302889095e-06, |
| "loss": 0.2906, |
| "step": 47500 |
| }, |
| { |
| "epoch": 2.7958993476234855, |
| "grad_norm": 2.109107255935669, |
| "learning_rate": 3.402648337993166e-06, |
| "loss": 0.2934, |
| "step": 48000 |
| }, |
| { |
| "epoch": 2.8250232991612303, |
| "grad_norm": 7.6626715660095215, |
| "learning_rate": 2.917249145697422e-06, |
| "loss": 0.2979, |
| "step": 48500 |
| }, |
| { |
| "epoch": 2.8541472506989747, |
| "grad_norm": 4.676031112670898, |
| "learning_rate": 2.431849953401678e-06, |
| "loss": 0.2898, |
| "step": 49000 |
| }, |
| { |
| "epoch": 2.8832712022367195, |
| "grad_norm": 7.312727928161621, |
| "learning_rate": 1.9464507611059337e-06, |
| "loss": 0.2887, |
| "step": 49500 |
| }, |
| { |
| "epoch": 2.9123951537744643, |
| "grad_norm": 7.303893089294434, |
| "learning_rate": 1.4610515688101895e-06, |
| "loss": 0.2888, |
| "step": 50000 |
| }, |
| { |
| "epoch": 2.9415191053122087, |
| "grad_norm": 7.751052379608154, |
| "learning_rate": 9.756523765144455e-07, |
| "loss": 0.2909, |
| "step": 50500 |
| }, |
| { |
| "epoch": 2.9706430568499536, |
| "grad_norm": 6.1867756843566895, |
| "learning_rate": 4.902531842187015e-07, |
| "loss": 0.2964, |
| "step": 51000 |
| }, |
| { |
| "epoch": 2.999767008387698, |
| "grad_norm": 6.950267314910889, |
| "learning_rate": 4.85399192295744e-09, |
| "loss": 0.2921, |
| "step": 51500 |
| } |
| ], |
| "logging_steps": 500, |
| "max_steps": 51504, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 1.2121938098975232e+16, |
| "train_batch_size": 32, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|