|
{ |
|
"best_metric": 0.04534918814897537, |
|
"best_model_checkpoint": "./roberta-large-condaqa-neg-tag-token-classifier/checkpoint-16", |
|
"epoch": 4.0, |
|
"global_step": 16, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.9587528418317636, |
|
"eval_f1": 0.0, |
|
"eval_loss": 0.1548939347267151, |
|
"eval_precision": 0.0, |
|
"eval_recall": 0.0, |
|
"eval_runtime": 1.8568, |
|
"eval_samples_per_second": 66.78, |
|
"eval_steps_per_second": 2.154, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.9587528418317636, |
|
"eval_f1": 0.0, |
|
"eval_loss": 0.1006716936826706, |
|
"eval_precision": 0.0, |
|
"eval_recall": 0.0, |
|
"eval_runtime": 2.0399, |
|
"eval_samples_per_second": 60.786, |
|
"eval_steps_per_second": 1.961, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.9818122767132186, |
|
"eval_f1": 0.0, |
|
"eval_loss": 0.0592331625521183, |
|
"eval_precision": 0.0, |
|
"eval_recall": 0.0, |
|
"eval_runtime": 2.0042, |
|
"eval_samples_per_second": 61.87, |
|
"eval_steps_per_second": 1.996, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.9857096459889575, |
|
"eval_f1": 0.0, |
|
"eval_loss": 0.04534918814897537, |
|
"eval_precision": 0.0, |
|
"eval_recall": 0.0, |
|
"eval_runtime": 1.8522, |
|
"eval_samples_per_second": 66.948, |
|
"eval_steps_per_second": 2.16, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"step": 16, |
|
"total_flos": 598453706860920.0, |
|
"train_loss": 0.19345927238464355, |
|
"train_runtime": 231.2313, |
|
"train_samples_per_second": 16.434, |
|
"train_steps_per_second": 0.069 |
|
} |
|
], |
|
"max_steps": 16, |
|
"num_train_epochs": 4, |
|
"total_flos": 598453706860920.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|