|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 12.0, |
|
"global_step": 10452, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.760811327975507e-05, |
|
"loss": 2.2438, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 2.1121535301208496, |
|
"eval_runtime": 46.0061, |
|
"eval_samples_per_second": 63.752, |
|
"eval_steps_per_second": 7.977, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 4.521622655951014e-05, |
|
"loss": 2.1935, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 4.282433983926522e-05, |
|
"loss": 2.1235, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 2.0784153938293457, |
|
"eval_runtime": 45.9581, |
|
"eval_samples_per_second": 63.819, |
|
"eval_steps_per_second": 7.986, |
|
"step": 1742 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 4.043245311902029e-05, |
|
"loss": 2.0744, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 3.8040566398775356e-05, |
|
"loss": 2.0712, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 2.0678515434265137, |
|
"eval_runtime": 45.9902, |
|
"eval_samples_per_second": 63.774, |
|
"eval_steps_per_second": 7.98, |
|
"step": 2613 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 3.5648679678530425e-05, |
|
"loss": 2.0034, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 2.0545804500579834, |
|
"eval_runtime": 45.9685, |
|
"eval_samples_per_second": 63.805, |
|
"eval_steps_per_second": 7.984, |
|
"step": 3484 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 3.32567929582855e-05, |
|
"loss": 1.9854, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 3.086490623804057e-05, |
|
"loss": 1.9375, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_loss": 2.027700662612915, |
|
"eval_runtime": 45.9612, |
|
"eval_samples_per_second": 63.815, |
|
"eval_steps_per_second": 7.985, |
|
"step": 4355 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 2.847301951779564e-05, |
|
"loss": 1.9208, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 2.608113279755071e-05, |
|
"loss": 1.8911, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_loss": 2.0363667011260986, |
|
"eval_runtime": 45.9893, |
|
"eval_samples_per_second": 63.776, |
|
"eval_steps_per_second": 7.98, |
|
"step": 5226 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"learning_rate": 2.368924607730578e-05, |
|
"loss": 1.8687, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 6.89, |
|
"learning_rate": 2.1297359357060852e-05, |
|
"loss": 1.8454, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_loss": 1.981174111366272, |
|
"eval_runtime": 45.9582, |
|
"eval_samples_per_second": 63.819, |
|
"eval_steps_per_second": 7.986, |
|
"step": 6097 |
|
}, |
|
{ |
|
"epoch": 7.46, |
|
"learning_rate": 1.890547263681592e-05, |
|
"loss": 1.808, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_loss": 2.0175108909606934, |
|
"eval_runtime": 45.9528, |
|
"eval_samples_per_second": 63.826, |
|
"eval_steps_per_second": 7.986, |
|
"step": 6968 |
|
}, |
|
{ |
|
"epoch": 8.04, |
|
"learning_rate": 1.651358591657099e-05, |
|
"loss": 1.81, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 8.61, |
|
"learning_rate": 1.4121699196326064e-05, |
|
"loss": 1.7716, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_loss": 2.028625965118408, |
|
"eval_runtime": 45.9318, |
|
"eval_samples_per_second": 63.856, |
|
"eval_steps_per_second": 7.99, |
|
"step": 7839 |
|
}, |
|
{ |
|
"epoch": 9.18, |
|
"learning_rate": 1.1729812476081135e-05, |
|
"loss": 1.7693, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 9.76, |
|
"learning_rate": 9.337925755836204e-06, |
|
"loss": 1.7519, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_loss": 1.9653006792068481, |
|
"eval_runtime": 45.9563, |
|
"eval_samples_per_second": 63.821, |
|
"eval_steps_per_second": 7.986, |
|
"step": 8710 |
|
}, |
|
{ |
|
"epoch": 10.33, |
|
"learning_rate": 6.946039035591274e-06, |
|
"loss": 1.7484, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 10.91, |
|
"learning_rate": 4.554152315346345e-06, |
|
"loss": 1.7358, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"eval_loss": 1.9817312955856323, |
|
"eval_runtime": 45.9672, |
|
"eval_samples_per_second": 63.806, |
|
"eval_steps_per_second": 7.984, |
|
"step": 9581 |
|
}, |
|
{ |
|
"epoch": 11.48, |
|
"learning_rate": 2.1622655951014164e-06, |
|
"loss": 1.7084, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"eval_loss": 1.9633114337921143, |
|
"eval_runtime": 45.9914, |
|
"eval_samples_per_second": 63.773, |
|
"eval_steps_per_second": 7.98, |
|
"step": 10452 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"step": 10452, |
|
"total_flos": 1.760388788159447e+17, |
|
"train_loss": 1.9039230375877523, |
|
"train_runtime": 39633.1701, |
|
"train_samples_per_second": 16.876, |
|
"train_steps_per_second": 0.264 |
|
} |
|
], |
|
"max_steps": 10452, |
|
"num_train_epochs": 12, |
|
"total_flos": 1.760388788159447e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|