|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.8252450781819656, |
|
"global_step": 24686, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.9392368143887224e-05, |
|
"loss": 2.6222, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.8784736287774447e-05, |
|
"loss": 1.7824, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.817710443166167e-05, |
|
"loss": 1.6134, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 2.7569472575548894e-05, |
|
"loss": 1.492, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.6961840719436117e-05, |
|
"loss": 1.4179, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.6354208863323343e-05, |
|
"loss": 1.4101, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 2.5746577007210563e-05, |
|
"loss": 1.3602, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.513894515109779e-05, |
|
"loss": 1.3182, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.4531313294985013e-05, |
|
"loss": 1.3338, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.3923681438872236e-05, |
|
"loss": 1.3377, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.331604958275946e-05, |
|
"loss": 1.3059, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.2708417726646683e-05, |
|
"loss": 1.2811, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.2100785870533906e-05, |
|
"loss": 1.2666, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.1493154014421133e-05, |
|
"loss": 1.2554, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 2.0885522158308352e-05, |
|
"loss": 1.2196, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 2.027789030219558e-05, |
|
"loss": 1.2499, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.96702584460828e-05, |
|
"loss": 1.197, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.9062626589970022e-05, |
|
"loss": 1.2032, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.845499473385725e-05, |
|
"loss": 1.2115, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.784736287774447e-05, |
|
"loss": 1.2381, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.7239731021631695e-05, |
|
"loss": 1.2194, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.663209916551892e-05, |
|
"loss": 1.2124, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.602446730940614e-05, |
|
"loss": 1.1917, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5416835453293365e-05, |
|
"loss": 1.1998, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.4809203597180588e-05, |
|
"loss": 1.1596, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.4201571741067811e-05, |
|
"loss": 1.0373, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.3593939884955035e-05, |
|
"loss": 1.0618, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.298630802884226e-05, |
|
"loss": 1.0332, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.2378676172729483e-05, |
|
"loss": 1.0407, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 2.9392368143887224e-05, |
|
"loss": 1.0722, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 2.8784736287774447e-05, |
|
"loss": 1.0825, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 2.817710443166167e-05, |
|
"loss": 1.1312, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 2.7569472575548894e-05, |
|
"loss": 1.1025, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.6961840719436117e-05, |
|
"loss": 1.1045, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.6354208863323343e-05, |
|
"loss": 1.1177, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.5746577007210563e-05, |
|
"loss": 1.1005, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.513894515109779e-05, |
|
"loss": 1.0777, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.4531313294985013e-05, |
|
"loss": 1.1009, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.3923681438872236e-05, |
|
"loss": 1.0944, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.331604958275946e-05, |
|
"loss": 1.1001, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.2708417726646683e-05, |
|
"loss": 1.0926, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.2100785870533906e-05, |
|
"loss": 1.1123, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.1493154014421133e-05, |
|
"loss": 1.109, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.0885522158308352e-05, |
|
"loss": 1.1017, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.027789030219558e-05, |
|
"loss": 1.0779, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.96702584460828e-05, |
|
"loss": 1.067, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.9062626589970022e-05, |
|
"loss": 1.0914, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.845499473385725e-05, |
|
"loss": 1.0886, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.784736287774447e-05, |
|
"loss": 1.0861, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"step": 24686, |
|
"total_flos": 1.4533519377936384e+16, |
|
"train_loss": 0.4519888086604075, |
|
"train_runtime": 3497.3077, |
|
"train_samples_per_second": 84.698, |
|
"train_steps_per_second": 7.059 |
|
} |
|
], |
|
"max_steps": 24686, |
|
"num_train_epochs": 2, |
|
"total_flos": 1.4533519377936384e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|