stuartmesham's picture
Upload with huggingface_hub
6763ea7
raw
history blame contribute delete
No virus
2 kB
{
"best_metric": 0.9412560516105949,
"best_model_checkpoint": "model_saves/deberta-v3-large_lemon-spell_10k_2_p3/checkpoint-536",
"epoch": 5.0,
"global_step": 1340,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 1.0,
"eval_accuracy": 0.9412504612081978,
"eval_loss": 0.4289807677268982,
"eval_runtime": 4.5999,
"eval_samples_per_second": 952.625,
"eval_steps_per_second": 7.609,
"step": 268
},
{
"epoch": 2.0,
"eval_accuracy": 0.9412560516105949,
"eval_loss": 0.4463995695114136,
"eval_runtime": 4.5763,
"eval_samples_per_second": 957.55,
"eval_steps_per_second": 7.648,
"step": 536
},
{
"epoch": 3.0,
"eval_accuracy": 0.9404286720558146,
"eval_loss": 0.4729045033454895,
"eval_runtime": 4.5746,
"eval_samples_per_second": 957.902,
"eval_steps_per_second": 7.651,
"step": 804
},
{
"epoch": 3.73,
"learning_rate": 1e-05,
"loss": 0.2621,
"step": 1000
},
{
"epoch": 4.0,
"eval_accuracy": 0.9397354621585662,
"eval_loss": 0.5097540616989136,
"eval_runtime": 4.5699,
"eval_samples_per_second": 958.888,
"eval_steps_per_second": 7.659,
"step": 1072
},
{
"epoch": 5.0,
"eval_accuracy": 0.9393944476123391,
"eval_loss": 0.550991415977478,
"eval_runtime": 4.5697,
"eval_samples_per_second": 958.919,
"eval_steps_per_second": 7.659,
"step": 1340
},
{
"epoch": 5.0,
"step": 1340,
"total_flos": 1.7253490090835968e+16,
"train_loss": 0.23529494556028452,
"train_runtime": 594.4617,
"train_samples_per_second": 865.59,
"train_steps_per_second": 6.762
}
],
"max_steps": 4020,
"num_train_epochs": 15,
"total_flos": 1.7253490090835968e+16,
"trial_name": null,
"trial_params": null
}