wu981526092's picture
Upload 11 files
0b8ae2a
{
"best_metric": 0.647979736328125,
"best_model_checkpoint": "distilbert-base-uncased_cEXT_mypersonality/checkpoint-496",
"epoch": 3.0,
"eval_steps": 500,
"global_step": 744,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 1.0,
"eval_accuracy": 0.6038306451612904,
"eval_loss": 0.6622927188873291,
"eval_runtime": 3.8202,
"eval_samples_per_second": 519.349,
"eval_steps_per_second": 16.23,
"step": 248
},
{
"epoch": 2.0,
"eval_accuracy": 0.6144153225806451,
"eval_loss": 0.647979736328125,
"eval_runtime": 3.8437,
"eval_samples_per_second": 516.174,
"eval_steps_per_second": 16.13,
"step": 496
},
{
"epoch": 2.02,
"learning_rate": 1.1935483870967743e-05,
"loss": 0.6625,
"step": 500
},
{
"epoch": 3.0,
"eval_accuracy": 0.6038306451612904,
"eval_loss": 0.6553571820259094,
"eval_runtime": 3.8662,
"eval_samples_per_second": 513.169,
"eval_steps_per_second": 16.037,
"step": 744
}
],
"logging_steps": 500,
"max_steps": 1240,
"num_train_epochs": 5,
"save_steps": 500,
"total_flos": 485142443173500.0,
"trial_name": null,
"trial_params": null
}