|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.036281837312241494, |
|
"eval_steps": 500, |
|
"global_step": 500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0007256367462448298, |
|
"grad_norm": 13.1875, |
|
"learning_rate": 5e-06, |
|
"loss": 3.553, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0014512734924896596, |
|
"grad_norm": 9.375, |
|
"learning_rate": 1e-05, |
|
"loss": 3.5658, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0021769102387344894, |
|
"grad_norm": 8.8125, |
|
"learning_rate": 1.5e-05, |
|
"loss": 3.4669, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0029025469849793192, |
|
"grad_norm": 13.25, |
|
"learning_rate": 2e-05, |
|
"loss": 3.5007, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.003628183731224149, |
|
"grad_norm": 9.3125, |
|
"learning_rate": 2.5e-05, |
|
"loss": 3.5062, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.004353820477468979, |
|
"grad_norm": 9.9375, |
|
"learning_rate": 3e-05, |
|
"loss": 3.4178, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.005079457223713809, |
|
"grad_norm": 17.125, |
|
"learning_rate": 3.5e-05, |
|
"loss": 3.4136, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.0058050939699586385, |
|
"grad_norm": 17.5, |
|
"learning_rate": 4e-05, |
|
"loss": 3.3792, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.006530730716203468, |
|
"grad_norm": 17.0, |
|
"learning_rate": 4.5e-05, |
|
"loss": 3.3684, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.007256367462448298, |
|
"grad_norm": 21.875, |
|
"learning_rate": 5e-05, |
|
"loss": 3.2698, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.007982004208693128, |
|
"grad_norm": 32.5, |
|
"learning_rate": 4.9999934086574596e-05, |
|
"loss": 3.1523, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.008707640954937958, |
|
"grad_norm": 73.5, |
|
"learning_rate": 4.9999736346645943e-05, |
|
"loss": 3.0884, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.009433277701182788, |
|
"grad_norm": 34.75, |
|
"learning_rate": 4.999940678125673e-05, |
|
"loss": 2.9507, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.010158914447427617, |
|
"grad_norm": 47.5, |
|
"learning_rate": 4.9998945392144796e-05, |
|
"loss": 2.8659, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.010884551193672447, |
|
"grad_norm": 35.25, |
|
"learning_rate": 4.999835218174307e-05, |
|
"loss": 2.9846, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.011610187939917277, |
|
"grad_norm": 27.5, |
|
"learning_rate": 4.99976271531796e-05, |
|
"loss": 2.8913, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.012335824686162107, |
|
"grad_norm": 38.5, |
|
"learning_rate": 4.9996770310277506e-05, |
|
"loss": 2.8982, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.013061461432406937, |
|
"grad_norm": 52.0, |
|
"learning_rate": 4.9995781657555e-05, |
|
"loss": 2.7165, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.013787098178651766, |
|
"grad_norm": 34.75, |
|
"learning_rate": 4.99946612002253e-05, |
|
"loss": 2.4166, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.014512734924896596, |
|
"grad_norm": 40.25, |
|
"learning_rate": 4.9993408944196676e-05, |
|
"loss": 2.2161, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.015238371671141426, |
|
"grad_norm": 39.25, |
|
"learning_rate": 4.9992024896072364e-05, |
|
"loss": 1.7825, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.015964008417386256, |
|
"grad_norm": 37.25, |
|
"learning_rate": 4.999050906315055e-05, |
|
"loss": 1.46, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.016689645163631087, |
|
"grad_norm": 13.625, |
|
"learning_rate": 4.998886145342434e-05, |
|
"loss": 1.3157, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.017415281909875915, |
|
"grad_norm": 10.4375, |
|
"learning_rate": 4.9987082075581684e-05, |
|
"loss": 1.1408, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.018140918656120747, |
|
"grad_norm": 14.0, |
|
"learning_rate": 4.9985170939005386e-05, |
|
"loss": 0.8782, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.018866555402365575, |
|
"grad_norm": 22.0, |
|
"learning_rate": 4.998312805377302e-05, |
|
"loss": 0.9109, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.019592192148610407, |
|
"grad_norm": 12.625, |
|
"learning_rate": 4.998095343065685e-05, |
|
"loss": 0.7414, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.020317828894855235, |
|
"grad_norm": 15.8125, |
|
"learning_rate": 4.997864708112384e-05, |
|
"loss": 0.7926, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.021043465641100066, |
|
"grad_norm": 16.25, |
|
"learning_rate": 4.997620901733554e-05, |
|
"loss": 0.7382, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.021769102387344894, |
|
"grad_norm": 22.0, |
|
"learning_rate": 4.997363925214803e-05, |
|
"loss": 0.7158, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.022494739133589726, |
|
"grad_norm": 13.375, |
|
"learning_rate": 4.9970937799111896e-05, |
|
"loss": 0.526, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.023220375879834554, |
|
"grad_norm": 12.9375, |
|
"learning_rate": 4.996810467247207e-05, |
|
"loss": 0.6137, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.023946012626079385, |
|
"grad_norm": 6.78125, |
|
"learning_rate": 4.9965139887167856e-05, |
|
"loss": 0.4864, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.024671649372324213, |
|
"grad_norm": 10.25, |
|
"learning_rate": 4.996204345883278e-05, |
|
"loss": 0.6921, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.025397286118569045, |
|
"grad_norm": 6.3125, |
|
"learning_rate": 4.9958815403794546e-05, |
|
"loss": 0.5826, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.026122922864813873, |
|
"grad_norm": 10.625, |
|
"learning_rate": 4.995545573907492e-05, |
|
"loss": 0.5838, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.026848559611058705, |
|
"grad_norm": 17.25, |
|
"learning_rate": 4.995196448238966e-05, |
|
"loss": 0.5941, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.027574196357303533, |
|
"grad_norm": 9.875, |
|
"learning_rate": 4.9948341652148436e-05, |
|
"loss": 0.5055, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.028299833103548364, |
|
"grad_norm": 21.25, |
|
"learning_rate": 4.994458726745468e-05, |
|
"loss": 0.6114, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.029025469849793192, |
|
"grad_norm": 3.375, |
|
"learning_rate": 4.9940701348105554e-05, |
|
"loss": 0.5372, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.029751106596038024, |
|
"grad_norm": 12.625, |
|
"learning_rate": 4.99366839145918e-05, |
|
"loss": 0.4815, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.030476743342282852, |
|
"grad_norm": 10.8125, |
|
"learning_rate": 4.993253498809762e-05, |
|
"loss": 0.4903, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.031202380088527683, |
|
"grad_norm": 9.0, |
|
"learning_rate": 4.9928254590500646e-05, |
|
"loss": 0.5927, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.03192801683477251, |
|
"grad_norm": 6.71875, |
|
"learning_rate": 4.9923842744371707e-05, |
|
"loss": 0.4424, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.03265365358101734, |
|
"grad_norm": 5.0, |
|
"learning_rate": 4.99192994729748e-05, |
|
"loss": 0.362, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.033379290327262175, |
|
"grad_norm": 8.625, |
|
"learning_rate": 4.991462480026693e-05, |
|
"loss": 0.593, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.034104927073507, |
|
"grad_norm": 4.53125, |
|
"learning_rate": 4.9909818750898e-05, |
|
"loss": 0.4769, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.03483056381975183, |
|
"grad_norm": 11.125, |
|
"learning_rate": 4.990488135021065e-05, |
|
"loss": 0.4455, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.03555620056599666, |
|
"grad_norm": 8.375, |
|
"learning_rate": 4.989981262424017e-05, |
|
"loss": 0.4713, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.036281837312241494, |
|
"grad_norm": 7.75, |
|
"learning_rate": 4.989461259971432e-05, |
|
"loss": 0.2778, |
|
"step": 500 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 13781, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"total_flos": 0.0, |
|
"train_batch_size": 5, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|