|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"eval_steps": 500, |
|
"global_step": 1478, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0013531799729364006, |
|
"grad_norm": 22.625, |
|
"learning_rate": 1.3513513513513515e-07, |
|
"loss": 2.1988, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.006765899864682003, |
|
"grad_norm": 21.875, |
|
"learning_rate": 6.756756756756758e-07, |
|
"loss": 2.2059, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.013531799729364006, |
|
"grad_norm": 22.5, |
|
"learning_rate": 1.3513513513513515e-06, |
|
"loss": 2.2336, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02029769959404601, |
|
"grad_norm": 16.25, |
|
"learning_rate": 2.0270270270270273e-06, |
|
"loss": 2.244, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02706359945872801, |
|
"grad_norm": 8.6875, |
|
"learning_rate": 2.702702702702703e-06, |
|
"loss": 2.181, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03382949932341001, |
|
"grad_norm": 6.03125, |
|
"learning_rate": 3.3783783783783788e-06, |
|
"loss": 2.1568, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.04059539918809202, |
|
"grad_norm": 5.15625, |
|
"learning_rate": 4.0540540540540545e-06, |
|
"loss": 2.1029, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.04736129905277402, |
|
"grad_norm": 4.4375, |
|
"learning_rate": 4.72972972972973e-06, |
|
"loss": 2.0545, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.05412719891745602, |
|
"grad_norm": 3.734375, |
|
"learning_rate": 5.405405405405406e-06, |
|
"loss": 2.0671, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.06089309878213803, |
|
"grad_norm": 3.546875, |
|
"learning_rate": 6.081081081081082e-06, |
|
"loss": 2.0335, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.06765899864682003, |
|
"grad_norm": 3.671875, |
|
"learning_rate": 6.7567567567567575e-06, |
|
"loss": 1.9884, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.07442489851150202, |
|
"grad_norm": 4.84375, |
|
"learning_rate": 7.4324324324324324e-06, |
|
"loss": 1.9429, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.08119079837618404, |
|
"grad_norm": 3.609375, |
|
"learning_rate": 8.108108108108109e-06, |
|
"loss": 1.8937, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.08795669824086604, |
|
"grad_norm": 3.21875, |
|
"learning_rate": 8.783783783783785e-06, |
|
"loss": 1.8555, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.09472259810554803, |
|
"grad_norm": 3.015625, |
|
"learning_rate": 9.45945945945946e-06, |
|
"loss": 1.8032, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.10148849797023005, |
|
"grad_norm": 2.921875, |
|
"learning_rate": 1.0135135135135136e-05, |
|
"loss": 1.8064, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.10825439783491204, |
|
"grad_norm": 2.90625, |
|
"learning_rate": 1.0810810810810812e-05, |
|
"loss": 1.8009, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.11502029769959404, |
|
"grad_norm": 2.859375, |
|
"learning_rate": 1.1486486486486488e-05, |
|
"loss": 1.7624, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.12178619756427606, |
|
"grad_norm": 2.796875, |
|
"learning_rate": 1.2162162162162164e-05, |
|
"loss": 1.7737, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.12855209742895804, |
|
"grad_norm": 2.8125, |
|
"learning_rate": 1.283783783783784e-05, |
|
"loss": 1.7472, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.13531799729364005, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 1.3513513513513515e-05, |
|
"loss": 1.7465, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.14208389715832206, |
|
"grad_norm": 2.75, |
|
"learning_rate": 1.4189189189189189e-05, |
|
"loss": 1.7326, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.14884979702300405, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 1.4864864864864865e-05, |
|
"loss": 1.7463, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15561569688768606, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 1.554054054054054e-05, |
|
"loss": 1.7175, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.16238159675236807, |
|
"grad_norm": 2.671875, |
|
"learning_rate": 1.6216216216216218e-05, |
|
"loss": 1.6941, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.16914749661705006, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 1.6891891891891896e-05, |
|
"loss": 1.6953, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.17591339648173207, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 1.756756756756757e-05, |
|
"loss": 1.7159, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.18267929634641408, |
|
"grad_norm": 2.671875, |
|
"learning_rate": 1.8243243243243244e-05, |
|
"loss": 1.7148, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.18944519621109607, |
|
"grad_norm": 2.6875, |
|
"learning_rate": 1.891891891891892e-05, |
|
"loss": 1.6967, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.19621109607577808, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 1.9594594594594595e-05, |
|
"loss": 1.7066, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.2029769959404601, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 1.9999888409903948e-05, |
|
"loss": 1.7105, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.20974289580514208, |
|
"grad_norm": 2.65625, |
|
"learning_rate": 1.9998633049924693e-05, |
|
"loss": 1.6862, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.2165087956698241, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 1.999598301803528e-05, |
|
"loss": 1.6791, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.2232746955345061, |
|
"grad_norm": 2.8125, |
|
"learning_rate": 1.9991938683878746e-05, |
|
"loss": 1.6925, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.23004059539918809, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 1.9986500611584133e-05, |
|
"loss": 1.6846, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.2368064952638701, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 1.997966955968779e-05, |
|
"loss": 1.6782, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.2435723951285521, |
|
"grad_norm": 2.734375, |
|
"learning_rate": 1.997144648102759e-05, |
|
"loss": 1.6852, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.2503382949932341, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 1.9961832522610004e-05, |
|
"loss": 1.7028, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.2571041948579161, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 1.9950829025450116e-05, |
|
"loss": 1.6483, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.2638700947225981, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 1.9938437524384572e-05, |
|
"loss": 1.6498, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.2706359945872801, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 1.9924659747857485e-05, |
|
"loss": 1.6601, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.2774018944519621, |
|
"grad_norm": 2.5, |
|
"learning_rate": 1.990949761767935e-05, |
|
"loss": 1.6657, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.28416779431664413, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 1.989295324875897e-05, |
|
"loss": 1.676, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.29093369418132614, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 1.9875028948808457e-05, |
|
"loss": 1.65, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.2976995940460081, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 1.985572721802134e-05, |
|
"loss": 1.648, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.3044654939106901, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 1.9835050748723826e-05, |
|
"loss": 1.6537, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.3112313937753721, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 1.981300242499924e-05, |
|
"loss": 1.6459, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.31799729364005414, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 1.978958532228576e-05, |
|
"loss": 1.6383, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.32476319350473615, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 1.9764802706947423e-05, |
|
"loss": 1.6624, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.33152909336941816, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 1.9738658035818495e-05, |
|
"loss": 1.6607, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.3382949932341001, |
|
"grad_norm": 2.375, |
|
"learning_rate": 1.9711154955721338e-05, |
|
"loss": 1.6527, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.34506089309878213, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 1.9682297302957666e-05, |
|
"loss": 1.6423, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.35182679296346414, |
|
"grad_norm": 2.5, |
|
"learning_rate": 1.9652089102773487e-05, |
|
"loss": 1.6288, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.35859269282814615, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 1.962053456879761e-05, |
|
"loss": 1.6436, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.36535859269282817, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 1.95876381024539e-05, |
|
"loss": 1.6378, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.3721244925575101, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 1.9553404292347356e-05, |
|
"loss": 1.6508, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.37889039242219213, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 1.9517837913624048e-05, |
|
"loss": 1.6475, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.38565629228687415, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 1.948094392730506e-05, |
|
"loss": 1.6353, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.39242219215155616, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 1.9442727479594486e-05, |
|
"loss": 1.6464, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.39918809201623817, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 1.9403193901161614e-05, |
|
"loss": 1.6157, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.4059539918809202, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 1.9362348706397374e-05, |
|
"loss": 1.643, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.41271989174560214, |
|
"grad_norm": 2.5, |
|
"learning_rate": 1.932019759264514e-05, |
|
"loss": 1.6225, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.41948579161028415, |
|
"grad_norm": 2.625, |
|
"learning_rate": 1.9276746439406046e-05, |
|
"loss": 1.6139, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.42625169147496617, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 1.923200130751887e-05, |
|
"loss": 1.6127, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.4330175913396482, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 1.918596843831462e-05, |
|
"loss": 1.6245, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.4397834912043302, |
|
"grad_norm": 2.84375, |
|
"learning_rate": 1.913865425274597e-05, |
|
"loss": 1.6153, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.4465493910690122, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 1.909006535049163e-05, |
|
"loss": 1.6101, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.45331529093369416, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 1.9040208509035745e-05, |
|
"loss": 1.5968, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.46008119079837617, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 1.8989090682722583e-05, |
|
"loss": 1.6055, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.4668470906630582, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 1.8936719001786453e-05, |
|
"loss": 1.6253, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.4736129905277402, |
|
"grad_norm": 2.625, |
|
"learning_rate": 1.888310077135716e-05, |
|
"loss": 1.6369, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.4803788903924222, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 1.8828243470441026e-05, |
|
"loss": 1.6359, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.4871447902571042, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 1.8772154750877696e-05, |
|
"loss": 1.613, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.4939106901217862, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 1.8714842436272774e-05, |
|
"loss": 1.603, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.5006765899864682, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 1.865631452090657e-05, |
|
"loss": 1.5804, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.5074424898511503, |
|
"grad_norm": 2.953125, |
|
"learning_rate": 1.859657916861899e-05, |
|
"loss": 1.62, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.5142083897158322, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 1.8535644711670804e-05, |
|
"loss": 1.6201, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.5209742895805142, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 1.8473519649581396e-05, |
|
"loss": 1.6105, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.5277401894451962, |
|
"grad_norm": 2.640625, |
|
"learning_rate": 1.8410212647943215e-05, |
|
"loss": 1.6231, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.5345060893098782, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 1.834573253721303e-05, |
|
"loss": 1.5854, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.5412719891745602, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 1.8280088311480203e-05, |
|
"loss": 1.5792, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.5480378890392422, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 1.8213289127212152e-05, |
|
"loss": 1.6033, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.5548037889039242, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 1.8145344301977126e-05, |
|
"loss": 1.5986, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.5615696887686062, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 1.8076263313144568e-05, |
|
"loss": 1.6238, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.5683355886332883, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 1.8006055796563103e-05, |
|
"loss": 1.5919, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.5751014884979703, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 1.7934731545216515e-05, |
|
"loss": 1.6125, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.5818673883626523, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 1.7862300507857733e-05, |
|
"loss": 1.6184, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.5886332882273342, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 1.7788772787621126e-05, |
|
"loss": 1.6034, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.5953991880920162, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 1.771415864061326e-05, |
|
"loss": 1.5829, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.6021650879566982, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 1.7638468474482297e-05, |
|
"loss": 1.6085, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.6089309878213802, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 1.756171284696629e-05, |
|
"loss": 1.6123, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.6156968876860622, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 1.7483902464420507e-05, |
|
"loss": 1.6034, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.6224627875507442, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 1.7405048180324046e-05, |
|
"loss": 1.5858, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.6292286874154263, |
|
"grad_norm": 2.5, |
|
"learning_rate": 1.7325160993765934e-05, |
|
"loss": 1.6065, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.6359945872801083, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 1.7244252047910893e-05, |
|
"loss": 1.5963, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.6427604871447903, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 1.7162332628445024e-05, |
|
"loss": 1.594, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.6495263870094723, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 1.7079414162001617e-05, |
|
"loss": 1.5899, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.6562922868741543, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 1.6995508214567275e-05, |
|
"loss": 1.5786, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.6630581867388363, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 1.691062648986865e-05, |
|
"loss": 1.5962, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.6698240866035182, |
|
"grad_norm": 2.625, |
|
"learning_rate": 1.682478082773989e-05, |
|
"loss": 1.5961, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.6765899864682002, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 1.673798320247118e-05, |
|
"loss": 1.6023, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.6833558863328822, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 1.6650245721138483e-05, |
|
"loss": 1.6074, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.6901217861975643, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 1.6561580621914764e-05, |
|
"loss": 1.5767, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.6968876860622463, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 1.6472000272362937e-05, |
|
"loss": 1.6047, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.7036535859269283, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 1.6381517167710757e-05, |
|
"loss": 1.5846, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.7104194857916103, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 1.629014392910791e-05, |
|
"loss": 1.5756, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.7171853856562923, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 1.619789330186555e-05, |
|
"loss": 1.5966, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.7239512855209743, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 1.6104778153678467e-05, |
|
"loss": 1.5901, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.7307171853856563, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 1.6010811472830253e-05, |
|
"loss": 1.5712, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.7374830852503383, |
|
"grad_norm": 2.5, |
|
"learning_rate": 1.591600636638161e-05, |
|
"loss": 1.5877, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.7442489851150202, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 1.5820376058342077e-05, |
|
"loss": 1.591, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.7510148849797023, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 1.5723933887825492e-05, |
|
"loss": 1.5814, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.7577807848443843, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 1.5626693307189334e-05, |
|
"loss": 1.5764, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.7645466847090663, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 1.5528667880158338e-05, |
|
"loss": 1.5726, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.7713125845737483, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 1.5429871279932514e-05, |
|
"loss": 1.5913, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.7780784844384303, |
|
"grad_norm": 2.5, |
|
"learning_rate": 1.533031728727994e-05, |
|
"loss": 1.5674, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.7848443843031123, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 1.5230019788614527e-05, |
|
"loss": 1.5727, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.7916102841677943, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 1.5128992774059063e-05, |
|
"loss": 1.5762, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.7983761840324763, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 1.5027250335493771e-05, |
|
"loss": 1.5678, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.8051420838971584, |
|
"grad_norm": 2.5, |
|
"learning_rate": 1.4924806664590702e-05, |
|
"loss": 1.5602, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.8119079837618404, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 1.4821676050834166e-05, |
|
"loss": 1.5761, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.8186738836265224, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 1.4717872879527578e-05, |
|
"loss": 1.5751, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.8254397834912043, |
|
"grad_norm": 3.96875, |
|
"learning_rate": 1.461341162978688e-05, |
|
"loss": 1.5783, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.8322056833558863, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 1.4508306872520912e-05, |
|
"loss": 1.5859, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.8389715832205683, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 1.4402573268398969e-05, |
|
"loss": 1.5898, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.8457374830852503, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 1.4296225565805854e-05, |
|
"loss": 1.5665, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.8525033829499323, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 1.4189278598784648e-05, |
|
"loss": 1.5729, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.8592692828146143, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 1.4081747284967602e-05, |
|
"loss": 1.5769, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.8660351826792964, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 1.3973646623495305e-05, |
|
"loss": 1.5618, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.8728010825439784, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 1.3864991692924524e-05, |
|
"loss": 1.589, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.8795669824086604, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 1.3755797649124944e-05, |
|
"loss": 1.5733, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.8863328822733424, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 1.3646079723165148e-05, |
|
"loss": 1.5596, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.8930987821380244, |
|
"grad_norm": 2.5, |
|
"learning_rate": 1.3535853219188064e-05, |
|
"loss": 1.5722, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.8998646820027063, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 1.3425133512276284e-05, |
|
"loss": 1.5554, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.9066305818673883, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 1.3313936046307411e-05, |
|
"loss": 1.576, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.9133964817320703, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 1.320227633179989e-05, |
|
"loss": 1.5668, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.9201623815967523, |
|
"grad_norm": 2.25, |
|
"learning_rate": 1.3090169943749475e-05, |
|
"loss": 1.5757, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.9269282814614344, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 1.2977632519456745e-05, |
|
"loss": 1.5817, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.9336941813261164, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 1.2864679756345905e-05, |
|
"loss": 1.5847, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.9404600811907984, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 1.2751327409775227e-05, |
|
"loss": 1.5847, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.9472259810554804, |
|
"grad_norm": 2.625, |
|
"learning_rate": 1.2637591290839377e-05, |
|
"loss": 1.5695, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.9539918809201624, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 1.2523487264163997e-05, |
|
"loss": 1.566, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.9607577807848444, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 1.2409031245692798e-05, |
|
"loss": 1.5487, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.9675236806495264, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 1.2294239200467516e-05, |
|
"loss": 1.5825, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.9742895805142084, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 1.2179127140400997e-05, |
|
"loss": 1.5651, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.9810554803788903, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 1.2063711122043759e-05, |
|
"loss": 1.5487, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.9878213802435724, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 1.1948007244344334e-05, |
|
"loss": 1.5604, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.9945872801082544, |
|
"grad_norm": 2.375, |
|
"learning_rate": 1.1832031646403654e-05, |
|
"loss": 1.5838, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.6891789436340332, |
|
"eval_runtime": 28.8376, |
|
"eval_samples_per_second": 15.084, |
|
"eval_steps_per_second": 1.907, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 1.0013531799729365, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 1.1715800505223918e-05, |
|
"loss": 1.5323, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.0081190798376185, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 1.1599330033452078e-05, |
|
"loss": 1.4855, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.0148849797023005, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 1.148263647711842e-05, |
|
"loss": 1.531, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.0216508795669823, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 1.1365736113370463e-05, |
|
"loss": 1.5063, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.0284167794316643, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 1.124864524820251e-05, |
|
"loss": 1.5219, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.0351826792963463, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 1.1131380214181205e-05, |
|
"loss": 1.4972, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.0419485791610283, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 1.1013957368167343e-05, |
|
"loss": 1.5092, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.0487144790257104, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 1.0896393089034336e-05, |
|
"loss": 1.4743, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.0554803788903924, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 1.0778703775383559e-05, |
|
"loss": 1.4991, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.0622462787550744, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 1.0660905843256995e-05, |
|
"loss": 1.4727, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.0690121786197564, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 1.0543015723847402e-05, |
|
"loss": 1.4992, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.0757780784844384, |
|
"grad_norm": 2.375, |
|
"learning_rate": 1.042504986120641e-05, |
|
"loss": 1.5143, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.0825439783491204, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 1.0307024709950775e-05, |
|
"loss": 1.5027, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.0893098782138024, |
|
"grad_norm": 2.25, |
|
"learning_rate": 1.0188956732967208e-05, |
|
"loss": 1.4863, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.0960757780784844, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 1.0070862399116016e-05, |
|
"loss": 1.5161, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.1028416779431665, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 9.952758180933933e-06, |
|
"loss": 1.5031, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.1096075778078485, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 9.834660552336415e-06, |
|
"loss": 1.5017, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.1163734776725305, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 9.716585986319769e-06, |
|
"loss": 1.4958, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.1231393775372125, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 9.598550952663383e-06, |
|
"loss": 1.5104, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.1299052774018945, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 9.480571915632422e-06, |
|
"loss": 1.5208, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.1366711772665765, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 9.362665331681294e-06, |
|
"loss": 1.5244, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.1434370771312585, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 9.244847647158203e-06, |
|
"loss": 1.5114, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.1502029769959405, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 9.127135296011102e-06, |
|
"loss": 1.5153, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.1569688768606226, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 9.009544697495373e-06, |
|
"loss": 1.4826, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.1637347767253043, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 8.892092253883602e-06, |
|
"loss": 1.5164, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.1705006765899864, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 8.774794348177641e-06, |
|
"loss": 1.4995, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.1772665764546684, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 8.657667341823449e-06, |
|
"loss": 1.5047, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.1840324763193504, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 8.540727572428854e-06, |
|
"loss": 1.5063, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.1907983761840324, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 8.423991351484715e-06, |
|
"loss": 1.4832, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.1975642760487144, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 8.307474962089676e-06, |
|
"loss": 1.4901, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.2043301759133964, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 8.191194656678905e-06, |
|
"loss": 1.5061, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.2110960757780784, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 8.07516665475708e-06, |
|
"loss": 1.4967, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.2178619756427604, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 7.959407140636034e-06, |
|
"loss": 1.5082, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.2246278755074425, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 7.843932261177224e-06, |
|
"loss": 1.485, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.2313937753721245, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 7.728758123539498e-06, |
|
"loss": 1.5061, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.2381596752368065, |
|
"grad_norm": 2.375, |
|
"learning_rate": 7.613900792932331e-06, |
|
"loss": 1.4953, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.2449255751014885, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 7.499376290374994e-06, |
|
"loss": 1.493, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.2516914749661705, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 7.385200590461803e-06, |
|
"loss": 1.506, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.2584573748308525, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 7.271389619133908e-06, |
|
"loss": 1.5135, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.2652232746955345, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 7.1579592514578234e-06, |
|
"loss": 1.5076, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 1.2719891745602165, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 7.0449253094110925e-06, |
|
"loss": 1.486, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.2787550744248986, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 6.93230355967533e-06, |
|
"loss": 1.5115, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 1.2855209742895806, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 6.820109711436989e-06, |
|
"loss": 1.4928, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.2922868741542626, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 6.7083594141961326e-06, |
|
"loss": 1.5128, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 1.2990527740189446, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 6.59706825558357e-06, |
|
"loss": 1.5023, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.3058186738836266, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 6.486251759186573e-06, |
|
"loss": 1.489, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 1.3125845737483086, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 6.375925382383561e-06, |
|
"loss": 1.4976, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.3193504736129906, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 6.266104514187997e-06, |
|
"loss": 1.5049, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1.3261163734776726, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 6.156804473101852e-06, |
|
"loss": 1.506, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.3328822733423547, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 6.048040504978861e-06, |
|
"loss": 1.5012, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 1.3396481732070367, |
|
"grad_norm": 2.375, |
|
"learning_rate": 5.93982778089796e-06, |
|
"loss": 1.5174, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.3464140730717187, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 5.832181395047099e-06, |
|
"loss": 1.5131, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 1.3531799729364005, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 5.72511636261784e-06, |
|
"loss": 1.4879, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.3599458728010825, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 5.618647617710935e-06, |
|
"loss": 1.4899, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 1.3667117726657645, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 5.512790011253211e-06, |
|
"loss": 1.4927, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.3734776725304465, |
|
"grad_norm": 2.375, |
|
"learning_rate": 5.407558308926083e-06, |
|
"loss": 1.5084, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 1.3802435723951285, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 5.302967189105941e-06, |
|
"loss": 1.4999, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.3870094722598105, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 5.199031240816715e-06, |
|
"loss": 1.4828, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 1.3937753721244925, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 5.095764961694923e-06, |
|
"loss": 1.4967, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.4005412719891746, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 4.993182755967412e-06, |
|
"loss": 1.4959, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 1.4073071718538566, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.8912989324422164e-06, |
|
"loss": 1.4905, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.4140730717185386, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 4.7901277025126345e-06, |
|
"loss": 1.5105, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 1.4208389715832206, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 4.689683178174964e-06, |
|
"loss": 1.5236, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.4276048714479026, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 4.589979370060037e-06, |
|
"loss": 1.4801, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 1.4343707713125846, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 4.491030185478976e-06, |
|
"loss": 1.5219, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.4411366711772666, |
|
"grad_norm": 2.375, |
|
"learning_rate": 4.392849426483275e-06, |
|
"loss": 1.5072, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 1.4479025710419486, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 4.295450787939622e-06, |
|
"loss": 1.4897, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.4546684709066307, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.198847855619652e-06, |
|
"loss": 1.5098, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 1.4614343707713127, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 4.1030541043049125e-06, |
|
"loss": 1.5226, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.4682002706359945, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 4.008082895907326e-06, |
|
"loss": 1.4999, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 1.4749661705006765, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 3.913947477605378e-06, |
|
"loss": 1.5023, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.4817320703653585, |
|
"grad_norm": 2.5, |
|
"learning_rate": 3.8206609799963205e-06, |
|
"loss": 1.5144, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 1.4884979702300405, |
|
"grad_norm": 2.375, |
|
"learning_rate": 3.72823641526463e-06, |
|
"loss": 1.5185, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.4952638700947225, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 3.636686675367006e-06, |
|
"loss": 1.4987, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 1.5020297699594045, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 3.5460245302340914e-06, |
|
"loss": 1.5168, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.5087956698240865, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 3.4562626259892605e-06, |
|
"loss": 1.4996, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 1.5155615696887685, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 3.367413483184654e-06, |
|
"loss": 1.5008, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.5223274695534506, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 3.279489495054742e-06, |
|
"loss": 1.4882, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 1.5290933694181326, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 3.1925029257876206e-06, |
|
"loss": 1.4648, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.5358592692828146, |
|
"grad_norm": 2.5, |
|
"learning_rate": 3.1064659088143424e-06, |
|
"loss": 1.5048, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 1.5426251691474966, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 3.0213904451164623e-06, |
|
"loss": 1.5161, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.5493910690121786, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 2.937288401552063e-06, |
|
"loss": 1.5037, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 1.5561569688768606, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 2.8541715092005097e-06, |
|
"loss": 1.5058, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.5629228687415426, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 2.7720513617260857e-06, |
|
"loss": 1.5019, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 1.5696887686062246, |
|
"grad_norm": 2.859375, |
|
"learning_rate": 2.690939413760887e-06, |
|
"loss": 1.5006, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.5764546684709067, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 2.610846979307016e-06, |
|
"loss": 1.5078, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 1.5832205683355887, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 2.5317852301584642e-06, |
|
"loss": 1.4985, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.5899864682002707, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 2.4537651943427666e-06, |
|
"loss": 1.4675, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 1.5967523680649527, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 2.376797754582785e-06, |
|
"loss": 1.4989, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.6035182679296347, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 2.3008936467786815e-06, |
|
"loss": 1.4755, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 1.6102841677943167, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 2.226063458510428e-06, |
|
"loss": 1.4977, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.6170500676589987, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 2.152317627560979e-06, |
|
"loss": 1.4965, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 1.6238159675236807, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 2.0796664404603416e-06, |
|
"loss": 1.4801, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.6305818673883627, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 2.008120031050753e-06, |
|
"loss": 1.5145, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 1.6373477672530448, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 1.9376883790731417e-06, |
|
"loss": 1.5032, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.6441136671177268, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 1.8683813087750901e-06, |
|
"loss": 1.4968, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 1.6508795669824088, |
|
"grad_norm": 2.375, |
|
"learning_rate": 1.8002084875404935e-06, |
|
"loss": 1.512, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.6576454668470908, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 1.7331794245410926e-06, |
|
"loss": 1.5059, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 1.6644113667117728, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 1.6673034694100655e-06, |
|
"loss": 1.5124, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.6711772665764548, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 1.6025898109378967e-06, |
|
"loss": 1.5248, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 1.6779431664411368, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 1.5390474757906449e-06, |
|
"loss": 1.5044, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.6847090663058186, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 1.4766853272508785e-06, |
|
"loss": 1.5081, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 1.6914749661705006, |
|
"grad_norm": 2.375, |
|
"learning_rate": 1.4155120639813392e-06, |
|
"loss": 1.4982, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.6982408660351827, |
|
"grad_norm": 2.375, |
|
"learning_rate": 1.3555362188116173e-06, |
|
"loss": 1.4716, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 1.7050067658998647, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 1.2967661575479318e-06, |
|
"loss": 1.5055, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.7117726657645467, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 1.2392100778062123e-06, |
|
"loss": 1.5066, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 1.7185385656292287, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 1.1828760078686563e-06, |
|
"loss": 1.5146, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.7253044654939107, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 1.127771805563882e-06, |
|
"loss": 1.5009, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 1.7320703653585927, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 1.0739051571708736e-06, |
|
"loss": 1.5149, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.7388362652232747, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 1.0212835763468488e-06, |
|
"loss": 1.5055, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 1.7456021650879567, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 9.699144030792163e-07, |
|
"loss": 1.5028, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.7523680649526387, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 9.198048026617323e-07, |
|
"loss": 1.4877, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 1.7591339648173205, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 8.709617646950563e-07, |
|
"loss": 1.5085, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.7658998646820026, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 8.233921021117863e-07, |
|
"loss": 1.4859, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 1.7726657645466846, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 7.771024502261526e-07, |
|
"loss": 1.5006, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.7794316644113666, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 7.320992658084891e-07, |
|
"loss": 1.5159, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 1.7861975642760486, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 6.883888261845917e-07, |
|
"loss": 1.5117, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.7929634641407306, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 6.459772283601218e-07, |
|
"loss": 1.5026, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 1.7997293640054126, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 6.048703881701579e-07, |
|
"loss": 1.4971, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.8064952638700946, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 5.650740394540255e-07, |
|
"loss": 1.4969, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 1.8132611637347766, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 5.265937332554849e-07, |
|
"loss": 1.4996, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.8200270635994586, |
|
"grad_norm": 2.375, |
|
"learning_rate": 4.894348370484648e-07, |
|
"loss": 1.5124, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 1.8267929634641407, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.5360253398834765e-07, |
|
"loss": 1.5047, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.8335588633288227, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 4.1910182218900977e-07, |
|
"loss": 1.5211, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 1.8403247631935047, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 3.8593751402563715e-07, |
|
"loss": 1.5189, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.8470906630581867, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 3.541142354634808e-07, |
|
"loss": 1.4941, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 1.8538565629228687, |
|
"grad_norm": 2.5, |
|
"learning_rate": 3.236364254125868e-07, |
|
"loss": 1.47, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.8606224627875507, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 2.945083351086331e-07, |
|
"loss": 1.5138, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 1.8673883626522327, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 2.667340275199426e-07, |
|
"loss": 1.5025, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.8741542625169147, |
|
"grad_norm": 2.5, |
|
"learning_rate": 2.4031737678074985e-07, |
|
"loss": 1.5216, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 1.8809201623815968, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 2.1526206765082104e-07, |
|
"loss": 1.5059, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.8876860622462788, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 1.915715950014696e-07, |
|
"loss": 1.4874, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 1.8944519621109608, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 1.6924926332807956e-07, |
|
"loss": 1.5109, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.9012178619756428, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 1.4829818628916525e-07, |
|
"loss": 1.4955, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 1.9079837618403248, |
|
"grad_norm": 2.25, |
|
"learning_rate": 1.287212862720677e-07, |
|
"loss": 1.5023, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.9147496617050068, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 1.1052129398531508e-07, |
|
"loss": 1.4818, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 1.9215155615696888, |
|
"grad_norm": 2.25, |
|
"learning_rate": 9.370074807772966e-08, |
|
"loss": 1.4966, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.9282814614343708, |
|
"grad_norm": 2.375, |
|
"learning_rate": 7.826199478431551e-08, |
|
"loss": 1.4801, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 1.9350473612990529, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 6.420718759900357e-08, |
|
"loss": 1.5037, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.9418132611637349, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 5.1538286974254225e-08, |
|
"loss": 1.4796, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 1.9485791610284169, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.025706004760932e-08, |
|
"loss": 1.5041, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.955345060893099, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 3.036508039520047e-08, |
|
"loss": 1.4933, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 1.962110960757781, |
|
"grad_norm": 2.375, |
|
"learning_rate": 2.1863727812254653e-08, |
|
"loss": 1.4872, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.968876860622463, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 1.4754188120631453e-08, |
|
"loss": 1.4953, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 1.975642760487145, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 9.03745300341874e-09, |
|
"loss": 1.5199, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.982408660351827, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 4.714319866607753e-09, |
|
"loss": 1.519, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 1.989174560216509, |
|
"grad_norm": 2.375, |
|
"learning_rate": 1.7853917278631838e-09, |
|
"loss": 1.5044, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.9959404600811907, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 2.5107713241045994e-10, |
|
"loss": 1.5051, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 1.691962480545044, |
|
"eval_runtime": 28.7807, |
|
"eval_samples_per_second": 15.114, |
|
"eval_steps_per_second": 1.911, |
|
"step": 1478 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"step": 1478, |
|
"total_flos": 1.502823063832494e+17, |
|
"train_loss": 1.5822677802652403, |
|
"train_runtime": 3974.9532, |
|
"train_samples_per_second": 2.974, |
|
"train_steps_per_second": 0.372 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1478, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.502823063832494e+17, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|