|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.0, |
|
"global_step": 665, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9699248120300753e-05, |
|
"loss": 1.2157, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9398496240601504e-05, |
|
"loss": 1.1007, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.909774436090226e-05, |
|
"loss": 1.1295, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.879699248120301e-05, |
|
"loss": 1.1043, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.849624060150376e-05, |
|
"loss": 1.0538, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.8195488721804512e-05, |
|
"loss": 1.0711, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.7894736842105264e-05, |
|
"loss": 1.0592, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.7593984962406015e-05, |
|
"loss": 0.9932, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.729323308270677e-05, |
|
"loss": 1.002, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.699248120300752e-05, |
|
"loss": 0.9694, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.6691729323308272e-05, |
|
"loss": 0.9617, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6390977443609023e-05, |
|
"loss": 0.9825, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.6090225563909775e-05, |
|
"loss": 0.9677, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.578947368421053e-05, |
|
"loss": 0.9367, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.548872180451128e-05, |
|
"loss": 0.9647, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.5187969924812032e-05, |
|
"loss": 0.9355, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.4887218045112783e-05, |
|
"loss": 0.8931, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.4586466165413536e-05, |
|
"loss": 0.9131, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.4285714285714287e-05, |
|
"loss": 0.8635, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.3984962406015038e-05, |
|
"loss": 0.886, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 1.3684210526315791e-05, |
|
"loss": 0.9067, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.3383458646616543e-05, |
|
"loss": 0.8514, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.3082706766917295e-05, |
|
"loss": 0.783, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.2781954887218047e-05, |
|
"loss": 0.82, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.2481203007518798e-05, |
|
"loss": 0.7896, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.2180451127819551e-05, |
|
"loss": 0.7797, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.1879699248120302e-05, |
|
"loss": 0.7522, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.1578947368421053e-05, |
|
"loss": 0.7554, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.1278195488721806e-05, |
|
"loss": 0.7019, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.0977443609022558e-05, |
|
"loss": 0.689, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.0676691729323309e-05, |
|
"loss": 0.6378, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.0375939849624062e-05, |
|
"loss": 0.6813, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.0075187969924813e-05, |
|
"loss": 0.6964, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 9.774436090225564e-06, |
|
"loss": 0.6601, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 9.473684210526315e-06, |
|
"loss": 0.5949, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 9.172932330827068e-06, |
|
"loss": 0.6844, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 8.87218045112782e-06, |
|
"loss": 0.6175, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 8.571428571428571e-06, |
|
"loss": 0.6629, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 8.270676691729324e-06, |
|
"loss": 0.5215, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 7.969924812030075e-06, |
|
"loss": 0.5498, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 7.669172932330828e-06, |
|
"loss": 0.5875, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 7.368421052631579e-06, |
|
"loss": 0.5036, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 7.067669172932331e-06, |
|
"loss": 0.5403, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 6.766917293233083e-06, |
|
"loss": 0.5277, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 6.466165413533835e-06, |
|
"loss": 0.5654, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 6.165413533834587e-06, |
|
"loss": 0.5221, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 5.864661654135339e-06, |
|
"loss": 0.5453, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 5.56390977443609e-06, |
|
"loss": 0.4961, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 5.263157894736842e-06, |
|
"loss": 0.5772, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 4.962406015037594e-06, |
|
"loss": 0.568, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 4.661654135338346e-06, |
|
"loss": 0.4935, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 4.360902255639098e-06, |
|
"loss": 0.5077, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 4.06015037593985e-06, |
|
"loss": 0.4495, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 3.7593984962406014e-06, |
|
"loss": 0.4935, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 3.4586466165413535e-06, |
|
"loss": 0.4925, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 3.157894736842105e-06, |
|
"loss": 0.4146, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 2.8571428571428573e-06, |
|
"loss": 0.4654, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 2.556390977443609e-06, |
|
"loss": 0.5054, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 2.255639097744361e-06, |
|
"loss": 0.474, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 1.9548872180451127e-06, |
|
"loss": 0.4338, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 1.6541353383458648e-06, |
|
"loss": 0.4501, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 1.3533834586466167e-06, |
|
"loss": 0.5026, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 1.0526315789473685e-06, |
|
"loss": 0.5256, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 7.518796992481203e-07, |
|
"loss": 0.4959, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 4.511278195488722e-07, |
|
"loss": 0.4885, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 1.5037593984962406e-07, |
|
"loss": 0.4129, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"step": 665, |
|
"total_flos": 1391869979781120.0, |
|
"train_loss": 0.712713550983515, |
|
"train_runtime": 260.1715, |
|
"train_samples_per_second": 81.331, |
|
"train_steps_per_second": 2.556 |
|
} |
|
], |
|
"max_steps": 665, |
|
"num_train_epochs": 5, |
|
"total_flos": 1391869979781120.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|