gpt2_f_experiment_1_drug_data / trainer_state.json
mllm-dev's picture
Upload folder using huggingface_hub
adf9ec7 verified
raw
history blame
No virus
763 Bytes
{
"best_metric": 1.4772998094558716,
"best_model_checkpoint": "tam_test_out_drug_data/checkpoint-449",
"epoch": 1.0,
"eval_steps": 500,
"global_step": 449,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 1.0,
"eval_accuracy": 0.45615177159862363,
"eval_loss": 1.4772998094558716,
"eval_runtime": 17.4382,
"eval_samples_per_second": 616.636,
"eval_steps_per_second": 8.602,
"step": 449
}
],
"logging_steps": 500,
"max_steps": 449,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 500,
"total_flos": 4230514189025280.0,
"train_batch_size": 72,
"trial_name": null,
"trial_params": null
}