|
{ |
|
"best_metric": 0.636893310208111, |
|
"best_model_checkpoint": "./save_models/cola/roberta-base_lr1e-05_run0/checkpoint-3848", |
|
"epoch": 10.0, |
|
"eval_steps": 500, |
|
"global_step": 4810, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.45413094758987427, |
|
"eval_matthews_correlation": 0.47785514722622213, |
|
"eval_runtime": 0.5981, |
|
"eval_samples_per_second": 1431.104, |
|
"eval_steps_per_second": 90.28, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.533289095332891e-06, |
|
"loss": 0.5451, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 0.4294413626194, |
|
"eval_matthews_correlation": 0.5559024469662472, |
|
"eval_runtime": 0.6646, |
|
"eval_samples_per_second": 1288.013, |
|
"eval_steps_per_second": 81.253, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 8.427339084273391e-06, |
|
"loss": 0.3643, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 0.5282544493675232, |
|
"eval_matthews_correlation": 0.6061655951679263, |
|
"eval_runtime": 0.5226, |
|
"eval_samples_per_second": 1638.098, |
|
"eval_steps_per_second": 103.338, |
|
"step": 1443 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 7.3213890732138915e-06, |
|
"loss": 0.263, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 0.5712747573852539, |
|
"eval_matthews_correlation": 0.6315329091741146, |
|
"eval_runtime": 0.5375, |
|
"eval_samples_per_second": 1592.64, |
|
"eval_steps_per_second": 100.47, |
|
"step": 1924 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 6.2154390621543915e-06, |
|
"loss": 0.2115, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_loss": 0.6360122561454773, |
|
"eval_matthews_correlation": 0.6312355301626161, |
|
"eval_runtime": 0.5144, |
|
"eval_samples_per_second": 1664.168, |
|
"eval_steps_per_second": 104.983, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 5.1094890510948916e-06, |
|
"loss": 0.1785, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_loss": 0.8011564612388611, |
|
"eval_matthews_correlation": 0.6138643516919586, |
|
"eval_runtime": 0.5202, |
|
"eval_samples_per_second": 1645.404, |
|
"eval_steps_per_second": 103.799, |
|
"step": 2886 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"learning_rate": 4.003539040035391e-06, |
|
"loss": 0.1659, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_loss": 0.812069833278656, |
|
"eval_matthews_correlation": 0.6262286365318072, |
|
"eval_runtime": 0.5378, |
|
"eval_samples_per_second": 1591.669, |
|
"eval_steps_per_second": 100.409, |
|
"step": 3367 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"learning_rate": 2.897589028975891e-06, |
|
"loss": 0.1218, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_loss": 0.7686376571655273, |
|
"eval_matthews_correlation": 0.636893310208111, |
|
"eval_runtime": 0.5131, |
|
"eval_samples_per_second": 1668.28, |
|
"eval_steps_per_second": 105.242, |
|
"step": 3848 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"learning_rate": 1.7916390179163902e-06, |
|
"loss": 0.1063, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_loss": 0.8879063129425049, |
|
"eval_matthews_correlation": 0.6314014927014334, |
|
"eval_runtime": 0.5155, |
|
"eval_samples_per_second": 1660.431, |
|
"eval_steps_per_second": 104.747, |
|
"step": 4329 |
|
}, |
|
{ |
|
"epoch": 9.36, |
|
"learning_rate": 6.856890068568902e-07, |
|
"loss": 0.0838, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_loss": 0.8707919716835022, |
|
"eval_matthews_correlation": 0.6253690295963492, |
|
"eval_runtime": 0.5401, |
|
"eval_samples_per_second": 1584.804, |
|
"eval_steps_per_second": 99.976, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"step": 4810, |
|
"total_flos": 808993009136460.0, |
|
"train_loss": 0.21800182366321588, |
|
"train_runtime": 226.2198, |
|
"train_samples_per_second": 340.156, |
|
"train_steps_per_second": 21.263 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 4810, |
|
"num_train_epochs": 10, |
|
"save_steps": 500, |
|
"total_flos": 808993009136460.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|