|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.994991652754591, |
|
"eval_steps": 500, |
|
"global_step": 897, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 1611.325982263448, |
|
"learning_rate": 2.2222222222222224e-07, |
|
"loss": 14.3706, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1203.709045070288, |
|
"learning_rate": 1.111111111111111e-06, |
|
"loss": 14.0095, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 317.62260508814944, |
|
"learning_rate": 2.222222222222222e-06, |
|
"loss": 7.0245, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 140.7698798601618, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 3.2406, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 53.214014098542116, |
|
"learning_rate": 4.444444444444444e-06, |
|
"loss": 2.3518, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 32.80877069708861, |
|
"learning_rate": 5.555555555555557e-06, |
|
"loss": 1.9155, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 18.406728342109837, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 1.7744, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 15.70206361537138, |
|
"learning_rate": 7.77777777777778e-06, |
|
"loss": 1.6662, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 9.680796676938787, |
|
"learning_rate": 8.888888888888888e-06, |
|
"loss": 1.5805, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 9.949620133147931, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5108, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 17.89818156462526, |
|
"learning_rate": 1.1111111111111113e-05, |
|
"loss": 1.4516, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 9.463874703914655, |
|
"learning_rate": 1.2222222222222224e-05, |
|
"loss": 1.386, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 16.600372205018896, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 1.3472, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 6.591121067230071, |
|
"learning_rate": 1.4444444444444446e-05, |
|
"loss": 1.3051, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 17.026560958812688, |
|
"learning_rate": 1.555555555555556e-05, |
|
"loss": 1.2846, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 11.244332052116006, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 1.2202, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 21.171092249623154, |
|
"learning_rate": 1.7777777777777777e-05, |
|
"loss": 1.1422, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 79.69525524449374, |
|
"learning_rate": 1.888888888888889e-05, |
|
"loss": 1.2502, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 48.3592163956308, |
|
"learning_rate": 2e-05, |
|
"loss": 1.3437, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 31.60366391829559, |
|
"learning_rate": 1.9998105699049984e-05, |
|
"loss": 1.2415, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 14.941685751929072, |
|
"learning_rate": 1.9992423513875158e-05, |
|
"loss": 1.1521, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 10.723912828129833, |
|
"learning_rate": 1.9982955597229275e-05, |
|
"loss": 1.1428, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 6.661132660781078, |
|
"learning_rate": 1.9969705536129033e-05, |
|
"loss": 1.1019, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 11.84763200258169, |
|
"learning_rate": 1.9952678350495104e-05, |
|
"loss": 1.0693, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 5.584094081913566, |
|
"learning_rate": 1.9931880491250263e-05, |
|
"loss": 1.06, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 4.687346480648606, |
|
"learning_rate": 1.990731983787542e-05, |
|
"loss": 1.0754, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 8.02176533763531, |
|
"learning_rate": 1.987900569542438e-05, |
|
"loss": 1.0656, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 5.890194579215236, |
|
"learning_rate": 1.9846948790998532e-05, |
|
"loss": 1.0494, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 5.573476232049487, |
|
"learning_rate": 1.9811161269682776e-05, |
|
"loss": 1.0246, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 3.9288708942568094, |
|
"learning_rate": 1.9771656689944238e-05, |
|
"loss": 1.031, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 3.4500277726115267, |
|
"learning_rate": 1.9728450018495506e-05, |
|
"loss": 1.0133, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 4.717186694916964, |
|
"learning_rate": 1.968155762462433e-05, |
|
"loss": 0.9788, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 5.124236034673133, |
|
"learning_rate": 1.9630997273991964e-05, |
|
"loss": 1.0098, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 6.845307185570511, |
|
"learning_rate": 1.9576788121902457e-05, |
|
"loss": 1.0105, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 4.010533684726192, |
|
"learning_rate": 1.951895070604547e-05, |
|
"loss": 1.0024, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 5.096428764017947, |
|
"learning_rate": 1.9457506938715357e-05, |
|
"loss": 0.9934, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 12.29708093689668, |
|
"learning_rate": 1.9392480098509488e-05, |
|
"loss": 0.9722, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 5.10527746898357, |
|
"learning_rate": 1.93238948215089e-05, |
|
"loss": 1.005, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 8.993111720295193, |
|
"learning_rate": 1.9251777091944665e-05, |
|
"loss": 0.9962, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 3.4437270374749556, |
|
"learning_rate": 1.9176154232353513e-05, |
|
"loss": 0.9868, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 3.771850755773128, |
|
"learning_rate": 1.9097054893226395e-05, |
|
"loss": 0.9851, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 4.049218130758563, |
|
"learning_rate": 1.9014509042153964e-05, |
|
"loss": 0.9738, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 7.7715704988491385, |
|
"learning_rate": 1.8928547952473037e-05, |
|
"loss": 1.005, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 6.146573303594628, |
|
"learning_rate": 1.8839204191418386e-05, |
|
"loss": 0.9586, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 8.160744647991924, |
|
"learning_rate": 1.8746511607784298e-05, |
|
"loss": 0.9808, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 7.706895037397381, |
|
"learning_rate": 1.865050531910062e-05, |
|
"loss": 1.0017, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 11.758878906059278, |
|
"learning_rate": 1.855122169832813e-05, |
|
"loss": 0.9999, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 6.613719964629243, |
|
"learning_rate": 1.844869836007825e-05, |
|
"loss": 0.9546, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 5.5628300646011075, |
|
"learning_rate": 1.8342974146362397e-05, |
|
"loss": 0.952, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 8.005725339426444, |
|
"learning_rate": 1.8234089111876256e-05, |
|
"loss": 0.9721, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 3.8142262373421842, |
|
"learning_rate": 1.8122084508824692e-05, |
|
"loss": 0.9466, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 3.9283785698082263, |
|
"learning_rate": 1.80070027712929e-05, |
|
"loss": 0.9714, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 4.77851405459744, |
|
"learning_rate": 1.7888887499169816e-05, |
|
"loss": 0.9499, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 4.591252723643397, |
|
"learning_rate": 1.7767783441629883e-05, |
|
"loss": 0.9912, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 4.016605910673708, |
|
"learning_rate": 1.7643736480179353e-05, |
|
"loss": 0.9566, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 6.373102607823453, |
|
"learning_rate": 1.7516793611273614e-05, |
|
"loss": 0.9308, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 5.834120929511878, |
|
"learning_rate": 1.7387002928512093e-05, |
|
"loss": 0.9524, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 3.634655228348776, |
|
"learning_rate": 1.725441360441752e-05, |
|
"loss": 0.9392, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 6.620105860028147, |
|
"learning_rate": 1.711907587180642e-05, |
|
"loss": 0.9351, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 3.670289326633902, |
|
"learning_rate": 1.698104100475788e-05, |
|
"loss": 0.9693, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 5.39460086399978, |
|
"learning_rate": 1.684036129918786e-05, |
|
"loss": 0.951, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 4.585629392068035, |
|
"learning_rate": 1.6697090053036344e-05, |
|
"loss": 0.8574, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 3.7164245648143464, |
|
"learning_rate": 1.6551281546074863e-05, |
|
"loss": 0.8819, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 5.697457454364852, |
|
"learning_rate": 1.6402991019342073e-05, |
|
"loss": 0.8602, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 4.644169677070983, |
|
"learning_rate": 1.625227465421511e-05, |
|
"loss": 0.8301, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 4.037165905809593, |
|
"learning_rate": 1.60991895511247e-05, |
|
"loss": 0.8698, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 4.742488771494794, |
|
"learning_rate": 1.5943793707922086e-05, |
|
"loss": 0.8465, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 4.112214119398294, |
|
"learning_rate": 1.5786145997905952e-05, |
|
"loss": 0.8612, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 9.430841344870588, |
|
"learning_rate": 1.5626306147517665e-05, |
|
"loss": 0.8798, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 10.376642310954942, |
|
"learning_rate": 1.5464334713713312e-05, |
|
"loss": 0.8482, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 4.046603696090184, |
|
"learning_rate": 1.5300293061021084e-05, |
|
"loss": 0.8383, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 5.823206738990728, |
|
"learning_rate": 1.5134243338292686e-05, |
|
"loss": 0.8734, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 4.736067876795748, |
|
"learning_rate": 1.4966248455157622e-05, |
|
"loss": 0.8449, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 3.810954674146249, |
|
"learning_rate": 1.4796372058189235e-05, |
|
"loss": 0.8302, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 4.844425691427299, |
|
"learning_rate": 1.4624678506791556e-05, |
|
"loss": 0.8437, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 3.1753997442211954, |
|
"learning_rate": 1.445123284881609e-05, |
|
"loss": 0.8671, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 4.573612034944129, |
|
"learning_rate": 1.4276100795917777e-05, |
|
"loss": 0.8126, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 3.6271701232281752, |
|
"learning_rate": 1.409934869865945e-05, |
|
"loss": 0.8423, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 4.384171797308968, |
|
"learning_rate": 1.392104352137426e-05, |
|
"loss": 0.8348, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 3.572132246822777, |
|
"learning_rate": 1.3741252816795552e-05, |
|
"loss": 0.8685, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 7.154463900691466, |
|
"learning_rate": 1.3560044700463824e-05, |
|
"loss": 0.8234, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 5.1445903649483355, |
|
"learning_rate": 1.3377487824920459e-05, |
|
"loss": 0.8603, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 3.417578210350955, |
|
"learning_rate": 1.3193651353698012e-05, |
|
"loss": 0.846, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 2.89568583566464, |
|
"learning_rate": 1.30086049351169e-05, |
|
"loss": 0.8396, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 3.7362473425942007, |
|
"learning_rate": 1.2822418675898428e-05, |
|
"loss": 0.8758, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 4.330928362220086, |
|
"learning_rate": 1.2635163114604131e-05, |
|
"loss": 0.8616, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 3.7106724475482826, |
|
"learning_rate": 1.2446909194911552e-05, |
|
"loss": 0.8373, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 3.926542815570075, |
|
"learning_rate": 1.2257728238736468e-05, |
|
"loss": 0.8383, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 4.47693007964662, |
|
"learning_rate": 1.2067691919211879e-05, |
|
"loss": 0.8547, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 3.990520695042587, |
|
"learning_rate": 1.1876872233533909e-05, |
|
"loss": 0.8288, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 3.9073482540862754, |
|
"learning_rate": 1.1685341475684935e-05, |
|
"loss": 0.8506, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 4.0981061235737295, |
|
"learning_rate": 1.1493172209044259e-05, |
|
"loss": 0.8181, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 3.6983475788184945, |
|
"learning_rate": 1.1300437238896758e-05, |
|
"loss": 0.8339, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 3.214152827379248, |
|
"learning_rate": 1.1107209584849845e-05, |
|
"loss": 0.851, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 3.350988754839917, |
|
"learning_rate": 1.0913562453169241e-05, |
|
"loss": 0.8266, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 7.847759662798763, |
|
"learning_rate": 1.0719569209044047e-05, |
|
"loss": 0.8429, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 4.0207776210716215, |
|
"learning_rate": 1.0525303348791599e-05, |
|
"loss": 0.8498, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 2.7225943916928963, |
|
"learning_rate": 1.0330838472012617e-05, |
|
"loss": 0.8215, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 3.6576366170765597, |
|
"learning_rate": 1.0136248253707267e-05, |
|
"loss": 0.8307, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 3.4227860041960665, |
|
"learning_rate": 9.94160641636263e-06, |
|
"loss": 0.8148, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 3.111038433143253, |
|
"learning_rate": 9.74698670202218e-06, |
|
"loss": 0.8443, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 3.252772592031176, |
|
"learning_rate": 9.552462844347883e-06, |
|
"loss": 0.8438, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 4.718741450931272, |
|
"learning_rate": 9.358108540685406e-06, |
|
"loss": 0.832, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 3.839793400271427, |
|
"learning_rate": 9.163997424143167e-06, |
|
"loss": 0.8159, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 3.0905029907583432, |
|
"learning_rate": 8.970203035695662e-06, |
|
"loss": 0.8561, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 2.9454863037823777, |
|
"learning_rate": 8.776798796321715e-06, |
|
"loss": 0.8482, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 4.171404088039613, |
|
"learning_rate": 8.583857979188203e-06, |
|
"loss": 0.8027, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 2.8741497032915415, |
|
"learning_rate": 8.391453681889772e-06, |
|
"loss": 0.8452, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 3.618045180794514, |
|
"learning_rate": 8.199658798755048e-06, |
|
"loss": 0.846, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 4.5919678070450445, |
|
"learning_rate": 8.008545993229897e-06, |
|
"loss": 0.8259, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 3.286397180719563, |
|
"learning_rate": 7.818187670348133e-06, |
|
"loss": 0.8292, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 3.273535359111842, |
|
"learning_rate": 7.628655949300133e-06, |
|
"loss": 0.8338, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 3.15565303365465, |
|
"learning_rate": 7.440022636109742e-06, |
|
"loss": 0.818, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 3.1548763199568035, |
|
"learning_rate": 7.2523591964298345e-06, |
|
"loss": 0.8485, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 3.1540649270721524, |
|
"learning_rate": 7.065736728466832e-06, |
|
"loss": 0.819, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 4.373590862563539, |
|
"learning_rate": 6.880225936044402e-06, |
|
"loss": 0.8031, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 3.1586070204270467, |
|
"learning_rate": 6.695897101816606e-06, |
|
"loss": 0.8406, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 2.9568635260100127, |
|
"learning_rate": 6.512820060640608e-06, |
|
"loss": 0.835, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 3.0203617695232663, |
|
"learning_rate": 6.331064173119008e-06, |
|
"loss": 0.8494, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 3.1117798744102574, |
|
"learning_rate": 6.150698299321889e-06, |
|
"loss": 0.8308, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 4.55872308952704, |
|
"learning_rate": 5.971790772698467e-06, |
|
"loss": 0.8035, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 4.2821019574649535, |
|
"learning_rate": 5.794409374188272e-06, |
|
"loss": 0.7114, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 3.707456621907476, |
|
"learning_rate": 5.61862130654165e-06, |
|
"loss": 0.7153, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 3.2111867758546278, |
|
"learning_rate": 5.444493168859304e-06, |
|
"loss": 0.7127, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 2.981834951561055, |
|
"learning_rate": 5.272090931360564e-06, |
|
"loss": 0.7152, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 3.261244591092665, |
|
"learning_rate": 5.101479910389888e-06, |
|
"loss": 0.7187, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 3.572508474876337, |
|
"learning_rate": 4.932724743671089e-06, |
|
"loss": 0.6839, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 4.094077691261136, |
|
"learning_rate": 4.765889365818708e-06, |
|
"loss": 0.6999, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 3.1272831710472118, |
|
"learning_rate": 4.601036984115684e-06, |
|
"loss": 0.7086, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 2.923329574215811, |
|
"learning_rate": 4.438230054566678e-06, |
|
"loss": 0.7085, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 3.0901398496943226, |
|
"learning_rate": 4.277530258235955e-06, |
|
"loss": 0.701, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 3.4929443326902465, |
|
"learning_rate": 4.118998477878879e-06, |
|
"loss": 0.7049, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 3.6504428413222376, |
|
"learning_rate": 3.96269477487588e-06, |
|
"loss": 0.7214, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 3.2341076651772975, |
|
"learning_rate": 3.8086783664775827e-06, |
|
"loss": 0.7085, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 3.092474822298113, |
|
"learning_rate": 3.657007603369728e-06, |
|
"loss": 0.6767, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 2.947080196219785, |
|
"learning_rate": 3.5077399475664474e-06, |
|
"loss": 0.7076, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 3.6087538594951662, |
|
"learning_rate": 3.360931950640185e-06, |
|
"loss": 0.6975, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 2.929061678667629, |
|
"learning_rate": 3.2166392322965423e-06, |
|
"loss": 0.708, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 2.8841689757419875, |
|
"learning_rate": 3.074916459302211e-06, |
|
"loss": 0.6923, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 2.9696900618191338, |
|
"learning_rate": 2.935817324773893e-06, |
|
"loss": 0.7001, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 2.6611962094828363, |
|
"learning_rate": 2.799394527836129e-06, |
|
"loss": 0.7017, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 3.454056655854251, |
|
"learning_rate": 2.665699753655684e-06, |
|
"loss": 0.7036, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 2.951382811612529, |
|
"learning_rate": 2.5347836538601113e-06, |
|
"loss": 0.7128, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 2.720458967552173, |
|
"learning_rate": 2.406695827347848e-06, |
|
"loss": 0.6813, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 3.2947724618805436, |
|
"learning_rate": 2.281484801497186e-06, |
|
"loss": 0.701, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 3.3925114406979, |
|
"learning_rate": 2.1591980137811684e-06, |
|
"loss": 0.7131, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 2.762867665401941, |
|
"learning_rate": 2.0398817937954275e-06, |
|
"loss": 0.6984, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 2.7010469113237434, |
|
"learning_rate": 1.923581345705736e-06, |
|
"loss": 0.7096, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 2.852400843485154, |
|
"learning_rate": 1.8103407311219523e-06, |
|
"loss": 0.7032, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 2.86250818820308, |
|
"learning_rate": 1.7002028524048354e-06, |
|
"loss": 0.7293, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 2.51712023734419, |
|
"learning_rate": 1.5932094364120453e-06, |
|
"loss": 0.7012, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 2.7437779027345215, |
|
"learning_rate": 1.489401018689488e-06, |
|
"loss": 0.6839, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 2.661627012464701, |
|
"learning_rate": 1.3888169281140284e-06, |
|
"loss": 0.6939, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 2.732001611716385, |
|
"learning_rate": 1.291495271993337e-06, |
|
"loss": 0.688, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 2.6615007947383553, |
|
"learning_rate": 1.1974729216285386e-06, |
|
"loss": 0.7018, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 2.924733067359529, |
|
"learning_rate": 1.1067854983451575e-06, |
|
"loss": 0.7067, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 2.6258772657245557, |
|
"learning_rate": 1.0194673599976134e-06, |
|
"loss": 0.7105, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 2.516616483442641, |
|
"learning_rate": 9.355515879523858e-07, |
|
"loss": 0.7135, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 2.7835305971129163, |
|
"learning_rate": 8.550699745548196e-07, |
|
"loss": 0.6885, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 2.615176282326374, |
|
"learning_rate": 7.780530110842566e-07, |
|
"loss": 0.6819, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 2.654709241244524, |
|
"learning_rate": 7.045298762021125e-07, |
|
"loss": 0.7008, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 2.7964338501174395, |
|
"learning_rate": 6.345284248972383e-07, |
|
"loss": 0.6944, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 2.6124049003030243, |
|
"learning_rate": 5.680751779327742e-07, |
|
"loss": 0.709, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"grad_norm": 2.798593198334347, |
|
"learning_rate": 5.05195311798491e-07, |
|
"loss": 0.7001, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 2.6899441693791037, |
|
"learning_rate": 4.4591264917242195e-07, |
|
"loss": 0.6996, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 2.4467811439684475, |
|
"learning_rate": 3.9024964989539227e-07, |
|
"loss": 0.7111, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"grad_norm": 2.642511218648128, |
|
"learning_rate": 3.3822740246188477e-07, |
|
"loss": 0.7003, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 2.5965636823764444, |
|
"learning_rate": 2.8986561603044694e-07, |
|
"loss": 0.6972, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 2.531198285866803, |
|
"learning_rate": 2.4518261295667255e-07, |
|
"loss": 0.7094, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 2.4502750845690455, |
|
"learning_rate": 2.0419532185159796e-07, |
|
"loss": 0.6844, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 2.6177312094207346, |
|
"learning_rate": 1.6691927116812002e-07, |
|
"loss": 0.6924, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 2.7461025264636763, |
|
"learning_rate": 1.3336858331787993e-07, |
|
"loss": 0.6944, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 2.5081604238233237, |
|
"learning_rate": 1.0355596932085432e-07, |
|
"loss": 0.7123, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 2.4841520023484462, |
|
"learning_rate": 7.749272398964613e-08, |
|
"loss": 0.6777, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 2.5778856215526447, |
|
"learning_rate": 5.518872165033329e-08, |
|
"loss": 0.695, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 2.572814592379813, |
|
"learning_rate": 3.6652412401478875e-08, |
|
"loss": 0.6948, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 2.4573450802780363, |
|
"learning_rate": 2.1890818912728706e-08, |
|
"loss": 0.6865, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 2.5566553348831174, |
|
"learning_rate": 1.0909533764194013e-08, |
|
"loss": 0.6825, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"grad_norm": 2.429805674481568, |
|
"learning_rate": 3.7127173276563234e-09, |
|
"loss": 0.7116, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"grad_norm": 2.6137017455070115, |
|
"learning_rate": 3.0309619035495675e-10, |
|
"loss": 0.7018, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"step": 897, |
|
"total_flos": 246978202042368.0, |
|
"train_loss": 1.0102584746104553, |
|
"train_runtime": 5029.5541, |
|
"train_samples_per_second": 22.861, |
|
"train_steps_per_second": 0.178 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 897, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 246978202042368.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|