deberta-large_spell_5k_1_p3 / trainer_state.json
stuartmesham's picture
Upload with huggingface_hub
2ef1f76
raw
history blame contribute delete
No virus
2 kB
{
"best_metric": 0.9413231364393609,
"best_model_checkpoint": "model_saves/deberta-large_spell_5k_1_p3/checkpoint-536",
"epoch": 5.0,
"global_step": 1340,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 1.0,
"eval_accuracy": 0.9407696866020416,
"eval_loss": 0.41556528210639954,
"eval_runtime": 4.6348,
"eval_samples_per_second": 945.458,
"eval_steps_per_second": 7.552,
"step": 268
},
{
"epoch": 2.0,
"eval_accuracy": 0.9413231364393609,
"eval_loss": 0.4426611661911011,
"eval_runtime": 4.6378,
"eval_samples_per_second": 944.852,
"eval_steps_per_second": 7.547,
"step": 536
},
{
"epoch": 3.0,
"eval_accuracy": 0.9406522881517012,
"eval_loss": 0.47103145718574524,
"eval_runtime": 4.6337,
"eval_samples_per_second": 945.689,
"eval_steps_per_second": 7.553,
"step": 804
},
{
"epoch": 3.73,
"learning_rate": 1e-05,
"loss": 0.2543,
"step": 1000
},
{
"epoch": 4.0,
"eval_accuracy": 0.9397242813537718,
"eval_loss": 0.5292877554893494,
"eval_runtime": 4.6416,
"eval_samples_per_second": 944.076,
"eval_steps_per_second": 7.541,
"step": 1072
},
{
"epoch": 5.0,
"eval_accuracy": 0.9391428795044667,
"eval_loss": 0.5923100709915161,
"eval_runtime": 4.6379,
"eval_samples_per_second": 944.827,
"eval_steps_per_second": 7.547,
"step": 1340
},
{
"epoch": 5.0,
"step": 1340,
"total_flos": 2.0108696705040384e+16,
"train_loss": 0.22194259700490468,
"train_runtime": 572.4463,
"train_samples_per_second": 898.879,
"train_steps_per_second": 7.022
}
],
"max_steps": 4020,
"num_train_epochs": 15,
"total_flos": 2.0108696705040384e+16,
"trial_name": null,
"trial_params": null
}