|
{ |
|
"best_metric": 0.07112815976142883, |
|
"best_model_checkpoint": "../model/xlm-roberta-large_8_5e-06_512_train_french_5_512/tmp/checkpoint-6840", |
|
"epoch": 3.0, |
|
"global_step": 6840, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.5e-06, |
|
"loss": 0.1391, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.7017543859649122, |
|
"eval_loss": 0.08547523617744446, |
|
"eval_runtime": 56.0096, |
|
"eval_samples_per_second": 40.707, |
|
"eval_steps_per_second": 5.088, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.0803, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.7263157894736842, |
|
"eval_loss": 0.07496826350688934, |
|
"eval_runtime": 55.7373, |
|
"eval_samples_per_second": 40.906, |
|
"eval_steps_per_second": 5.113, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 3.5e-06, |
|
"loss": 0.0654, |
|
"step": 6840 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.7328947368421053, |
|
"eval_loss": 0.07112815976142883, |
|
"eval_runtime": 56.0517, |
|
"eval_samples_per_second": 40.677, |
|
"eval_steps_per_second": 5.085, |
|
"step": 6840 |
|
} |
|
], |
|
"max_steps": 22800, |
|
"num_train_epochs": 10, |
|
"total_flos": 5.098754631595622e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|