|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.05, |
|
"eval_steps": 500, |
|
"global_step": 1000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 83.0, |
|
"learning_rate": 2e-08, |
|
"loss": 7.8413, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 82.0, |
|
"learning_rate": 4e-08, |
|
"loss": 7.8305, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 84.0, |
|
"learning_rate": 6e-08, |
|
"loss": 7.8515, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 83.5, |
|
"learning_rate": 8e-08, |
|
"loss": 7.8284, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 82.0, |
|
"learning_rate": 1e-07, |
|
"loss": 7.8162, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 83.5, |
|
"learning_rate": 1.2e-07, |
|
"loss": 7.8523, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 81.0, |
|
"learning_rate": 1.4e-07, |
|
"loss": 7.8326, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 83.0, |
|
"learning_rate": 1.6e-07, |
|
"loss": 7.8242, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 81.5, |
|
"learning_rate": 1.8e-07, |
|
"loss": 7.8319, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 81.5, |
|
"learning_rate": 2e-07, |
|
"loss": 7.8175, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 83.0, |
|
"learning_rate": 2.1999999999999998e-07, |
|
"loss": 7.8136, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 81.5, |
|
"learning_rate": 2.4e-07, |
|
"loss": 7.846, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 82.5, |
|
"learning_rate": 2.6e-07, |
|
"loss": 7.8039, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 82.5, |
|
"learning_rate": 2.8e-07, |
|
"loss": 7.8113, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 82.5, |
|
"learning_rate": 3e-07, |
|
"loss": 7.7837, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 80.5, |
|
"learning_rate": 3.2e-07, |
|
"loss": 7.7796, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 82.0, |
|
"learning_rate": 3.4000000000000003e-07, |
|
"loss": 7.8044, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 81.5, |
|
"learning_rate": 3.6e-07, |
|
"loss": 7.7643, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 82.0, |
|
"learning_rate": 3.7999999999999996e-07, |
|
"loss": 7.7304, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 82.0, |
|
"learning_rate": 4e-07, |
|
"loss": 7.7071, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 80.5, |
|
"learning_rate": 4.1999999999999995e-07, |
|
"loss": 7.7164, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 82.5, |
|
"learning_rate": 4.3999999999999997e-07, |
|
"loss": 7.7243, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 80.5, |
|
"learning_rate": 4.6e-07, |
|
"loss": 7.6916, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 81.0, |
|
"learning_rate": 4.8e-07, |
|
"loss": 7.6749, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 80.5, |
|
"learning_rate": 5e-07, |
|
"loss": 7.6817, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 81.0, |
|
"learning_rate": 5.2e-07, |
|
"loss": 7.6871, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 80.5, |
|
"learning_rate": 5.4e-07, |
|
"loss": 7.6558, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 80.5, |
|
"learning_rate": 5.6e-07, |
|
"loss": 7.5715, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 79.0, |
|
"learning_rate": 5.8e-07, |
|
"loss": 7.5422, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 80.0, |
|
"learning_rate": 6e-07, |
|
"loss": 7.4936, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 78.5, |
|
"learning_rate": 6.2e-07, |
|
"loss": 7.4695, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 79.0, |
|
"learning_rate": 6.4e-07, |
|
"loss": 7.4154, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 78.0, |
|
"learning_rate": 6.6e-07, |
|
"loss": 7.3668, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 79.0, |
|
"learning_rate": 6.800000000000001e-07, |
|
"loss": 7.3532, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 77.5, |
|
"learning_rate": 7e-07, |
|
"loss": 7.2727, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 77.5, |
|
"learning_rate": 7.2e-07, |
|
"loss": 7.2473, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 78.0, |
|
"learning_rate": 7.4e-07, |
|
"loss": 7.236, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 77.5, |
|
"learning_rate": 7.599999999999999e-07, |
|
"loss": 7.1711, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 77.5, |
|
"learning_rate": 7.799999999999999e-07, |
|
"loss": 7.1217, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 77.5, |
|
"learning_rate": 8e-07, |
|
"loss": 7.1038, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 78.0, |
|
"learning_rate": 8.199999999999999e-07, |
|
"loss": 7.0849, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 78.0, |
|
"learning_rate": 8.399999999999999e-07, |
|
"loss": 7.0331, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 77.0, |
|
"learning_rate": 8.599999999999999e-07, |
|
"loss": 6.9839, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 77.0, |
|
"learning_rate": 8.799999999999999e-07, |
|
"loss": 6.962, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 77.0, |
|
"learning_rate": 9e-07, |
|
"loss": 6.9108, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 77.5, |
|
"learning_rate": 9.2e-07, |
|
"loss": 6.8641, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 77.5, |
|
"learning_rate": 9.399999999999999e-07, |
|
"loss": 6.7994, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 77.0, |
|
"learning_rate": 9.6e-07, |
|
"loss": 6.7141, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 77.0, |
|
"learning_rate": 9.8e-07, |
|
"loss": 6.6188, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 77.5, |
|
"learning_rate": 1e-06, |
|
"loss": 6.4929, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 6.463568210601807, |
|
"eval_runtime": 65.3176, |
|
"eval_samples_per_second": 15.31, |
|
"eval_steps_per_second": 15.31, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 77.5, |
|
"learning_rate": 9.933333333333333e-07, |
|
"loss": 6.4003, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 77.5, |
|
"learning_rate": 9.866666666666666e-07, |
|
"loss": 6.2969, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 78.5, |
|
"learning_rate": 9.8e-07, |
|
"loss": 6.19, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 78.0, |
|
"learning_rate": 9.733333333333333e-07, |
|
"loss": 6.0401, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 79.0, |
|
"learning_rate": 9.666666666666666e-07, |
|
"loss": 5.9727, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 80.0, |
|
"learning_rate": 9.6e-07, |
|
"loss": 5.821, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 80.5, |
|
"learning_rate": 9.533333333333333e-07, |
|
"loss": 5.6864, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 80.5, |
|
"learning_rate": 9.466666666666666e-07, |
|
"loss": 5.5677, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 81.5, |
|
"learning_rate": 9.399999999999999e-07, |
|
"loss": 5.4587, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 82.0, |
|
"learning_rate": 9.333333333333333e-07, |
|
"loss": 5.3674, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 83.5, |
|
"learning_rate": 9.266666666666665e-07, |
|
"loss": 5.2343, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 83.5, |
|
"learning_rate": 9.2e-07, |
|
"loss": 5.1118, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 83.5, |
|
"learning_rate": 9.133333333333333e-07, |
|
"loss": 5.0416, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 85.5, |
|
"learning_rate": 9.066666666666665e-07, |
|
"loss": 4.9311, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 86.0, |
|
"learning_rate": 9e-07, |
|
"loss": 4.8409, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 86.5, |
|
"learning_rate": 8.933333333333333e-07, |
|
"loss": 4.7077, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 87.5, |
|
"learning_rate": 8.866666666666667e-07, |
|
"loss": 4.6436, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 86.0, |
|
"learning_rate": 8.799999999999999e-07, |
|
"loss": 4.5543, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 87.0, |
|
"learning_rate": 8.733333333333333e-07, |
|
"loss": 4.473, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 87.0, |
|
"learning_rate": 8.666666666666667e-07, |
|
"loss": 4.373, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 88.5, |
|
"learning_rate": 8.599999999999999e-07, |
|
"loss": 4.3226, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 87.5, |
|
"learning_rate": 8.533333333333334e-07, |
|
"loss": 4.2063, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 87.5, |
|
"learning_rate": 8.466666666666667e-07, |
|
"loss": 4.1628, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 86.5, |
|
"learning_rate": 8.399999999999999e-07, |
|
"loss": 4.0588, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 88.0, |
|
"learning_rate": 8.333333333333333e-07, |
|
"loss": 4.0117, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 88.0, |
|
"learning_rate": 8.266666666666667e-07, |
|
"loss": 3.9443, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 88.5, |
|
"learning_rate": 8.199999999999999e-07, |
|
"loss": 3.8509, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 88.0, |
|
"learning_rate": 8.133333333333333e-07, |
|
"loss": 3.7851, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 87.5, |
|
"learning_rate": 8.066666666666666e-07, |
|
"loss": 3.7559, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 87.0, |
|
"learning_rate": 8e-07, |
|
"loss": 3.6867, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 88.0, |
|
"learning_rate": 7.933333333333333e-07, |
|
"loss": 3.6217, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 86.5, |
|
"learning_rate": 7.866666666666666e-07, |
|
"loss": 3.5678, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 86.5, |
|
"learning_rate": 7.799999999999999e-07, |
|
"loss": 3.5387, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 86.5, |
|
"learning_rate": 7.733333333333333e-07, |
|
"loss": 3.4999, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 87.0, |
|
"learning_rate": 7.666666666666667e-07, |
|
"loss": 3.4263, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 87.0, |
|
"learning_rate": 7.599999999999999e-07, |
|
"loss": 3.4185, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 86.0, |
|
"learning_rate": 7.533333333333332e-07, |
|
"loss": 3.3753, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 86.0, |
|
"learning_rate": 7.466666666666667e-07, |
|
"loss": 3.318, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 85.5, |
|
"learning_rate": 7.4e-07, |
|
"loss": 3.2846, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 86.5, |
|
"learning_rate": 7.333333333333332e-07, |
|
"loss": 3.2757, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 85.0, |
|
"learning_rate": 7.266666666666667e-07, |
|
"loss": 3.2204, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 87.0, |
|
"learning_rate": 7.2e-07, |
|
"loss": 3.1933, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 85.0, |
|
"learning_rate": 7.133333333333333e-07, |
|
"loss": 3.1404, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 84.5, |
|
"learning_rate": 7.066666666666666e-07, |
|
"loss": 3.1084, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 83.5, |
|
"learning_rate": 7e-07, |
|
"loss": 3.0518, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 84.5, |
|
"learning_rate": 6.933333333333333e-07, |
|
"loss": 3.0331, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 84.0, |
|
"learning_rate": 6.866666666666666e-07, |
|
"loss": 3.0252, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 83.0, |
|
"learning_rate": 6.800000000000001e-07, |
|
"loss": 2.9718, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 84.0, |
|
"learning_rate": 6.733333333333333e-07, |
|
"loss": 2.9666, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 82.0, |
|
"learning_rate": 6.666666666666666e-07, |
|
"loss": 2.9071, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"eval_loss": 2.927218437194824, |
|
"eval_runtime": 65.2971, |
|
"eval_samples_per_second": 15.315, |
|
"eval_steps_per_second": 15.315, |
|
"step": 1000 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 2000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"total_flos": 1.613922041856e+16, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|