deberta-large_lemon-spell_5k_4_p3 / trainer_state.json
stuartmesham's picture
Upload with huggingface_hub
e6f7d7f
raw
history blame
No virus
1.75 kB
{
"best_metric": 0.9420163463366094,
"best_model_checkpoint": "model_saves/deberta-large_lemon-spell_5k_4_p3/checkpoint-268",
"epoch": 4.0,
"global_step": 1072,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 1.0,
"eval_accuracy": 0.9420163463366094,
"eval_loss": 0.4153633117675781,
"eval_runtime": 4.6508,
"eval_samples_per_second": 942.213,
"eval_steps_per_second": 7.526,
"step": 268
},
{
"epoch": 2.0,
"eval_accuracy": 0.9409653506859423,
"eval_loss": 0.44057878851890564,
"eval_runtime": 4.6472,
"eval_samples_per_second": 942.934,
"eval_steps_per_second": 7.531,
"step": 536
},
{
"epoch": 3.0,
"eval_accuracy": 0.9407137825780699,
"eval_loss": 0.4833277463912964,
"eval_runtime": 4.6824,
"eval_samples_per_second": 935.835,
"eval_steps_per_second": 7.475,
"step": 804
},
{
"epoch": 3.73,
"learning_rate": 1e-05,
"loss": 0.2535,
"step": 1000
},
{
"epoch": 4.0,
"eval_accuracy": 0.9396348349154172,
"eval_loss": 0.5351554155349731,
"eval_runtime": 4.6531,
"eval_samples_per_second": 941.742,
"eval_steps_per_second": 7.522,
"step": 1072
},
{
"epoch": 4.0,
"step": 1072,
"total_flos": 1.60866272018432e+16,
"train_loss": 0.2470646803058795,
"train_runtime": 467.3854,
"train_samples_per_second": 1100.933,
"train_steps_per_second": 8.601
}
],
"max_steps": 4020,
"num_train_epochs": 15,
"total_flos": 1.60866272018432e+16,
"trial_name": null,
"trial_params": null
}