|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.996011396011396, |
|
"eval_steps": 500, |
|
"global_step": 1314, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.022792022792022793, |
|
"grad_norm": 13.82025069118623, |
|
"learning_rate": 3.787878787878788e-07, |
|
"loss": 0.8308, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.045584045584045586, |
|
"grad_norm": 5.1319737789095115, |
|
"learning_rate": 7.575757575757576e-07, |
|
"loss": 0.8105, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.06837606837606838, |
|
"grad_norm": 1.5861641573263008, |
|
"learning_rate": 1.1363636363636364e-06, |
|
"loss": 0.755, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.09116809116809117, |
|
"grad_norm": 1.0106444704636741, |
|
"learning_rate": 1.5151515151515152e-06, |
|
"loss": 0.7305, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.11396011396011396, |
|
"grad_norm": 0.9175747364460073, |
|
"learning_rate": 1.8939393939393941e-06, |
|
"loss": 0.7157, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.13675213675213677, |
|
"grad_norm": 0.858266416727724, |
|
"learning_rate": 2.2727272727272728e-06, |
|
"loss": 0.6902, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.15954415954415954, |
|
"grad_norm": 0.731139318802147, |
|
"learning_rate": 2.6515151515151514e-06, |
|
"loss": 0.6845, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.18233618233618235, |
|
"grad_norm": 0.8705067202079199, |
|
"learning_rate": 3.0303030303030305e-06, |
|
"loss": 0.6844, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.20512820512820512, |
|
"grad_norm": 0.7992611765746963, |
|
"learning_rate": 3.409090909090909e-06, |
|
"loss": 0.6652, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.22792022792022792, |
|
"grad_norm": 0.8520679580860977, |
|
"learning_rate": 3.7878787878787882e-06, |
|
"loss": 0.6616, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.25071225071225073, |
|
"grad_norm": 0.6676395021182464, |
|
"learning_rate": 4.166666666666667e-06, |
|
"loss": 0.6479, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.27350427350427353, |
|
"grad_norm": 0.5238136155306522, |
|
"learning_rate": 4.5454545454545455e-06, |
|
"loss": 0.6519, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.2962962962962963, |
|
"grad_norm": 0.3749174945670082, |
|
"learning_rate": 4.924242424242425e-06, |
|
"loss": 0.6501, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.3190883190883191, |
|
"grad_norm": 0.34219492738596635, |
|
"learning_rate": 4.966159052453469e-06, |
|
"loss": 0.6506, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.3418803418803419, |
|
"grad_norm": 0.3221830208753367, |
|
"learning_rate": 4.923857868020305e-06, |
|
"loss": 0.6424, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.3646723646723647, |
|
"grad_norm": 0.3211404670331441, |
|
"learning_rate": 4.881556683587141e-06, |
|
"loss": 0.6469, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.38746438746438744, |
|
"grad_norm": 0.36044993850226115, |
|
"learning_rate": 4.839255499153977e-06, |
|
"loss": 0.6384, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.41025641025641024, |
|
"grad_norm": 0.30662481311131046, |
|
"learning_rate": 4.796954314720812e-06, |
|
"loss": 0.6436, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.43304843304843305, |
|
"grad_norm": 0.30073652663910616, |
|
"learning_rate": 4.754653130287649e-06, |
|
"loss": 0.6357, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.45584045584045585, |
|
"grad_norm": 0.3159734259222124, |
|
"learning_rate": 4.712351945854484e-06, |
|
"loss": 0.6436, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.47863247863247865, |
|
"grad_norm": 0.30021799995563564, |
|
"learning_rate": 4.67005076142132e-06, |
|
"loss": 0.6397, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.5014245014245015, |
|
"grad_norm": 0.31027086609771876, |
|
"learning_rate": 4.6277495769881554e-06, |
|
"loss": 0.6433, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.5242165242165242, |
|
"grad_norm": 0.325996231603359, |
|
"learning_rate": 4.585448392554992e-06, |
|
"loss": 0.6386, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.5470085470085471, |
|
"grad_norm": 0.30447517334932556, |
|
"learning_rate": 4.543147208121828e-06, |
|
"loss": 0.6344, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.5698005698005698, |
|
"grad_norm": 0.31759320117316103, |
|
"learning_rate": 4.500846023688664e-06, |
|
"loss": 0.6392, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.5925925925925926, |
|
"grad_norm": 0.300042168313145, |
|
"learning_rate": 4.4585448392555e-06, |
|
"loss": 0.6373, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.6153846153846154, |
|
"grad_norm": 0.3103670553421643, |
|
"learning_rate": 4.4162436548223355e-06, |
|
"loss": 0.6343, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.6381766381766382, |
|
"grad_norm": 0.31371459349820763, |
|
"learning_rate": 4.373942470389172e-06, |
|
"loss": 0.645, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.6609686609686609, |
|
"grad_norm": 0.3177909350422278, |
|
"learning_rate": 4.331641285956007e-06, |
|
"loss": 0.6336, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.6837606837606838, |
|
"grad_norm": 0.2863080125345834, |
|
"learning_rate": 4.289340101522843e-06, |
|
"loss": 0.6316, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.7065527065527065, |
|
"grad_norm": 0.2967357656574432, |
|
"learning_rate": 4.2470389170896785e-06, |
|
"loss": 0.6314, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.7293447293447294, |
|
"grad_norm": 0.30731521673513923, |
|
"learning_rate": 4.204737732656515e-06, |
|
"loss": 0.6319, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.7521367521367521, |
|
"grad_norm": 0.2853284520723167, |
|
"learning_rate": 4.162436548223351e-06, |
|
"loss": 0.6285, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.7749287749287749, |
|
"grad_norm": 0.29173766094469944, |
|
"learning_rate": 4.120135363790186e-06, |
|
"loss": 0.6342, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.7977207977207977, |
|
"grad_norm": 0.31186096126009083, |
|
"learning_rate": 4.0778341793570224e-06, |
|
"loss": 0.6413, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.8205128205128205, |
|
"grad_norm": 0.31433308913944874, |
|
"learning_rate": 4.035532994923858e-06, |
|
"loss": 0.633, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.8433048433048433, |
|
"grad_norm": 0.30255898197587633, |
|
"learning_rate": 3.993231810490694e-06, |
|
"loss": 0.6267, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.8660968660968661, |
|
"grad_norm": 0.31742329834518057, |
|
"learning_rate": 3.95093062605753e-06, |
|
"loss": 0.639, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 0.29347055532062, |
|
"learning_rate": 3.9086294416243655e-06, |
|
"loss": 0.6375, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.9116809116809117, |
|
"grad_norm": 0.304756355630385, |
|
"learning_rate": 3.866328257191202e-06, |
|
"loss": 0.6407, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.9344729344729344, |
|
"grad_norm": 0.27202304716855863, |
|
"learning_rate": 3.824027072758038e-06, |
|
"loss": 0.6369, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.9572649572649573, |
|
"grad_norm": 0.29824485775957277, |
|
"learning_rate": 3.7817258883248736e-06, |
|
"loss": 0.6238, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.98005698005698, |
|
"grad_norm": 0.316389910270352, |
|
"learning_rate": 3.7394247038917094e-06, |
|
"loss": 0.6369, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.9982905982905983, |
|
"eval_loss": 0.6274411082267761, |
|
"eval_runtime": 441.3291, |
|
"eval_samples_per_second": 26.79, |
|
"eval_steps_per_second": 0.419, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.0034188034188034, |
|
"grad_norm": 0.2974179402853732, |
|
"learning_rate": 3.697123519458545e-06, |
|
"loss": 0.6504, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.0262108262108263, |
|
"grad_norm": 0.3328108426705853, |
|
"learning_rate": 3.654822335025381e-06, |
|
"loss": 0.6094, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.049002849002849, |
|
"grad_norm": 0.3468657832861117, |
|
"learning_rate": 3.612521150592217e-06, |
|
"loss": 0.603, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.0717948717948718, |
|
"grad_norm": 0.28130223618914374, |
|
"learning_rate": 3.5702199661590524e-06, |
|
"loss": 0.604, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.0945868945868946, |
|
"grad_norm": 0.2865481944718312, |
|
"learning_rate": 3.5279187817258886e-06, |
|
"loss": 0.6114, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.1173789173789175, |
|
"grad_norm": 0.3093784116984744, |
|
"learning_rate": 3.4856175972927248e-06, |
|
"loss": 0.6165, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.1401709401709401, |
|
"grad_norm": 0.27699299838421976, |
|
"learning_rate": 3.4433164128595605e-06, |
|
"loss": 0.6062, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.162962962962963, |
|
"grad_norm": 0.3008059331559106, |
|
"learning_rate": 3.4010152284263963e-06, |
|
"loss": 0.6039, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.1857549857549858, |
|
"grad_norm": 0.2944027438621198, |
|
"learning_rate": 3.358714043993232e-06, |
|
"loss": 0.6095, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.2085470085470085, |
|
"grad_norm": 0.28077870584088543, |
|
"learning_rate": 3.3164128595600682e-06, |
|
"loss": 0.6025, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.2313390313390313, |
|
"grad_norm": 0.30300062147380974, |
|
"learning_rate": 3.2741116751269036e-06, |
|
"loss": 0.6127, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.2541310541310542, |
|
"grad_norm": 0.2884948651021871, |
|
"learning_rate": 3.2318104906937397e-06, |
|
"loss": 0.6091, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.2769230769230768, |
|
"grad_norm": 0.29423227376682365, |
|
"learning_rate": 3.1895093062605755e-06, |
|
"loss": 0.6107, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.2997150997150997, |
|
"grad_norm": 0.2883105301897019, |
|
"learning_rate": 3.1472081218274113e-06, |
|
"loss": 0.6046, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.3225071225071225, |
|
"grad_norm": 0.2669594766065209, |
|
"learning_rate": 3.1049069373942474e-06, |
|
"loss": 0.6029, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.3452991452991454, |
|
"grad_norm": 0.2705243942286286, |
|
"learning_rate": 3.062605752961083e-06, |
|
"loss": 0.6028, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.368091168091168, |
|
"grad_norm": 0.3071464216067808, |
|
"learning_rate": 3.0203045685279194e-06, |
|
"loss": 0.6083, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.390883190883191, |
|
"grad_norm": 0.28705868161520925, |
|
"learning_rate": 2.9780033840947547e-06, |
|
"loss": 0.6084, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.4136752136752135, |
|
"grad_norm": 0.28840712136018964, |
|
"learning_rate": 2.935702199661591e-06, |
|
"loss": 0.611, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.4364672364672364, |
|
"grad_norm": 0.27939993462491286, |
|
"learning_rate": 2.8934010152284262e-06, |
|
"loss": 0.6088, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.4592592592592593, |
|
"grad_norm": 0.29201487779439644, |
|
"learning_rate": 2.8510998307952624e-06, |
|
"loss": 0.6036, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.4820512820512821, |
|
"grad_norm": 0.2962841211526053, |
|
"learning_rate": 2.808798646362098e-06, |
|
"loss": 0.6013, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.504843304843305, |
|
"grad_norm": 0.275474422497229, |
|
"learning_rate": 2.7664974619289344e-06, |
|
"loss": 0.6012, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.5276353276353276, |
|
"grad_norm": 0.2951169219112843, |
|
"learning_rate": 2.72419627749577e-06, |
|
"loss": 0.6088, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.5504273504273505, |
|
"grad_norm": 0.2664647901919578, |
|
"learning_rate": 2.681895093062606e-06, |
|
"loss": 0.5973, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.573219373219373, |
|
"grad_norm": 0.3278845948919461, |
|
"learning_rate": 2.639593908629442e-06, |
|
"loss": 0.6012, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.596011396011396, |
|
"grad_norm": 0.2834806718036075, |
|
"learning_rate": 2.5972927241962774e-06, |
|
"loss": 0.617, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.6188034188034188, |
|
"grad_norm": 0.2834945453709173, |
|
"learning_rate": 2.5549915397631136e-06, |
|
"loss": 0.6039, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.6415954415954417, |
|
"grad_norm": 0.2873120009439402, |
|
"learning_rate": 2.5126903553299493e-06, |
|
"loss": 0.6083, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.6643874643874645, |
|
"grad_norm": 0.29239006173806986, |
|
"learning_rate": 2.470389170896785e-06, |
|
"loss": 0.6014, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.6871794871794872, |
|
"grad_norm": 0.28777240203969917, |
|
"learning_rate": 2.4280879864636213e-06, |
|
"loss": 0.6023, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.7099715099715098, |
|
"grad_norm": 0.287251015630762, |
|
"learning_rate": 2.385786802030457e-06, |
|
"loss": 0.5995, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.7327635327635327, |
|
"grad_norm": 0.2833754954830982, |
|
"learning_rate": 2.343485617597293e-06, |
|
"loss": 0.6038, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.7555555555555555, |
|
"grad_norm": 0.29523465034573726, |
|
"learning_rate": 2.3011844331641286e-06, |
|
"loss": 0.6003, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.7783475783475784, |
|
"grad_norm": 0.2835164587155957, |
|
"learning_rate": 2.2588832487309648e-06, |
|
"loss": 0.6074, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.8011396011396013, |
|
"grad_norm": 0.2756554371766609, |
|
"learning_rate": 2.2165820642978005e-06, |
|
"loss": 0.6048, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.823931623931624, |
|
"grad_norm": 0.28250442965630257, |
|
"learning_rate": 2.1742808798646363e-06, |
|
"loss": 0.5916, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.8467236467236468, |
|
"grad_norm": 0.2972957948191103, |
|
"learning_rate": 2.1319796954314725e-06, |
|
"loss": 0.5995, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.8695156695156694, |
|
"grad_norm": 0.28171695683839987, |
|
"learning_rate": 2.0896785109983082e-06, |
|
"loss": 0.5972, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.8923076923076922, |
|
"grad_norm": 0.27265878199095367, |
|
"learning_rate": 2.047377326565144e-06, |
|
"loss": 0.601, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.915099715099715, |
|
"grad_norm": 0.2843738370580399, |
|
"learning_rate": 2.0050761421319797e-06, |
|
"loss": 0.6057, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.937891737891738, |
|
"grad_norm": 0.2867696310340715, |
|
"learning_rate": 1.9627749576988155e-06, |
|
"loss": 0.6036, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.9606837606837608, |
|
"grad_norm": 0.2783755363399772, |
|
"learning_rate": 1.9204737732656517e-06, |
|
"loss": 0.6048, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.9834757834757835, |
|
"grad_norm": 0.30245355783375416, |
|
"learning_rate": 1.8781725888324874e-06, |
|
"loss": 0.6024, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.9994301994301993, |
|
"eval_loss": 0.6210305690765381, |
|
"eval_runtime": 444.0845, |
|
"eval_samples_per_second": 26.623, |
|
"eval_steps_per_second": 0.417, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 2.006837606837607, |
|
"grad_norm": 0.2701897779005862, |
|
"learning_rate": 1.8358714043993234e-06, |
|
"loss": 0.6254, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.0296296296296297, |
|
"grad_norm": 0.2822641812974091, |
|
"learning_rate": 1.7935702199661592e-06, |
|
"loss": 0.5721, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.0524216524216525, |
|
"grad_norm": 0.28903196675368076, |
|
"learning_rate": 1.7512690355329951e-06, |
|
"loss": 0.5881, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.0752136752136754, |
|
"grad_norm": 0.3078673635479466, |
|
"learning_rate": 1.708967851099831e-06, |
|
"loss": 0.586, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.098005698005698, |
|
"grad_norm": 0.2785177807663215, |
|
"learning_rate": 1.6666666666666667e-06, |
|
"loss": 0.589, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.1207977207977207, |
|
"grad_norm": 0.2774331957705802, |
|
"learning_rate": 1.6243654822335026e-06, |
|
"loss": 0.5866, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.1435897435897435, |
|
"grad_norm": 0.2798671208355392, |
|
"learning_rate": 1.5820642978003386e-06, |
|
"loss": 0.5812, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.1663817663817664, |
|
"grad_norm": 0.27132799501645843, |
|
"learning_rate": 1.5397631133671746e-06, |
|
"loss": 0.5822, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.1891737891737892, |
|
"grad_norm": 0.277820696734235, |
|
"learning_rate": 1.4974619289340103e-06, |
|
"loss": 0.5871, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.211965811965812, |
|
"grad_norm": 0.27608147560234303, |
|
"learning_rate": 1.455160744500846e-06, |
|
"loss": 0.5859, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.234757834757835, |
|
"grad_norm": 0.285099746375909, |
|
"learning_rate": 1.412859560067682e-06, |
|
"loss": 0.5755, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.2575498575498574, |
|
"grad_norm": 0.2834009068864172, |
|
"learning_rate": 1.3705583756345178e-06, |
|
"loss": 0.5845, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.2803418803418802, |
|
"grad_norm": 0.2659315972803874, |
|
"learning_rate": 1.3282571912013536e-06, |
|
"loss": 0.5847, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.303133903133903, |
|
"grad_norm": 0.2842998910857186, |
|
"learning_rate": 1.2859560067681896e-06, |
|
"loss": 0.5822, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.325925925925926, |
|
"grad_norm": 0.2925272042167459, |
|
"learning_rate": 1.2436548223350255e-06, |
|
"loss": 0.5851, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.348717948717949, |
|
"grad_norm": 0.2730212322505555, |
|
"learning_rate": 1.2013536379018613e-06, |
|
"loss": 0.5866, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.3715099715099717, |
|
"grad_norm": 0.2659809610224369, |
|
"learning_rate": 1.1590524534686973e-06, |
|
"loss": 0.5841, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.394301994301994, |
|
"grad_norm": 0.2887950743404575, |
|
"learning_rate": 1.116751269035533e-06, |
|
"loss": 0.5834, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.417094017094017, |
|
"grad_norm": 0.2748521691064405, |
|
"learning_rate": 1.074450084602369e-06, |
|
"loss": 0.5917, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.43988603988604, |
|
"grad_norm": 0.2647499378345614, |
|
"learning_rate": 1.0321489001692048e-06, |
|
"loss": 0.5948, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.4626780626780627, |
|
"grad_norm": 0.27739116649461487, |
|
"learning_rate": 9.898477157360407e-07, |
|
"loss": 0.59, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.4854700854700855, |
|
"grad_norm": 0.264670636993555, |
|
"learning_rate": 9.475465313028766e-07, |
|
"loss": 0.5873, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.5082621082621084, |
|
"grad_norm": 0.3050069220099809, |
|
"learning_rate": 9.052453468697125e-07, |
|
"loss": 0.5941, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.5310541310541312, |
|
"grad_norm": 0.2673821103230609, |
|
"learning_rate": 8.629441624365482e-07, |
|
"loss": 0.587, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.5538461538461537, |
|
"grad_norm": 0.26862153961949736, |
|
"learning_rate": 8.206429780033842e-07, |
|
"loss": 0.5852, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.5766381766381765, |
|
"grad_norm": 0.27402673827658036, |
|
"learning_rate": 7.7834179357022e-07, |
|
"loss": 0.5845, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.5994301994301994, |
|
"grad_norm": 0.27126620560352377, |
|
"learning_rate": 7.360406091370559e-07, |
|
"loss": 0.5842, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.6222222222222222, |
|
"grad_norm": 0.2795326890639239, |
|
"learning_rate": 6.937394247038917e-07, |
|
"loss": 0.5843, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.645014245014245, |
|
"grad_norm": 0.2755605775212532, |
|
"learning_rate": 6.514382402707276e-07, |
|
"loss": 0.585, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.667806267806268, |
|
"grad_norm": 0.2693489818532254, |
|
"learning_rate": 6.091370558375635e-07, |
|
"loss": 0.5883, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.690598290598291, |
|
"grad_norm": 0.27046692244308457, |
|
"learning_rate": 5.668358714043994e-07, |
|
"loss": 0.5833, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.7133903133903132, |
|
"grad_norm": 0.26289588127700764, |
|
"learning_rate": 5.245346869712352e-07, |
|
"loss": 0.5842, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.736182336182336, |
|
"grad_norm": 0.26779284153011157, |
|
"learning_rate": 4.822335025380711e-07, |
|
"loss": 0.5874, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.758974358974359, |
|
"grad_norm": 0.2635717002104139, |
|
"learning_rate": 4.39932318104907e-07, |
|
"loss": 0.5906, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.781766381766382, |
|
"grad_norm": 0.2619116219484112, |
|
"learning_rate": 3.9763113367174284e-07, |
|
"loss": 0.5803, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.8045584045584047, |
|
"grad_norm": 0.255714340845448, |
|
"learning_rate": 3.553299492385787e-07, |
|
"loss": 0.5788, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.827350427350427, |
|
"grad_norm": 0.2678196093330931, |
|
"learning_rate": 3.1302876480541457e-07, |
|
"loss": 0.5938, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.8501424501424504, |
|
"grad_norm": 0.2603561895360124, |
|
"learning_rate": 2.7072758037225044e-07, |
|
"loss": 0.5907, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.872934472934473, |
|
"grad_norm": 0.2659043115994451, |
|
"learning_rate": 2.284263959390863e-07, |
|
"loss": 0.5877, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.8957264957264957, |
|
"grad_norm": 0.2668875408756563, |
|
"learning_rate": 1.8612521150592217e-07, |
|
"loss": 0.5886, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.9185185185185185, |
|
"grad_norm": 0.2634976772367436, |
|
"learning_rate": 1.4382402707275804e-07, |
|
"loss": 0.5787, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.9413105413105414, |
|
"grad_norm": 0.2654663436073362, |
|
"learning_rate": 1.0152284263959391e-07, |
|
"loss": 0.5786, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.9641025641025642, |
|
"grad_norm": 0.27081875228470587, |
|
"learning_rate": 5.922165820642979e-08, |
|
"loss": 0.58, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.9868945868945866, |
|
"grad_norm": 0.2708769490432497, |
|
"learning_rate": 1.6920473773265652e-08, |
|
"loss": 0.5901, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.996011396011396, |
|
"eval_loss": 0.6205938458442688, |
|
"eval_runtime": 445.6085, |
|
"eval_samples_per_second": 26.532, |
|
"eval_steps_per_second": 0.415, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 2.996011396011396, |
|
"step": 1314, |
|
"total_flos": 2755219238682624.0, |
|
"train_loss": 0.6164092647430559, |
|
"train_runtime": 70895.1751, |
|
"train_samples_per_second": 9.505, |
|
"train_steps_per_second": 0.019 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 1314, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2755219238682624.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|