{ "best_metric": 0.4778162027316995, "best_model_checkpoint": "distilbert-base-uncased-finetuned-cola/run-3/checkpoint-536", "epoch": 2.0, "eval_steps": 500, "global_step": 536, "is_hyper_param_search": true, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 1.0, "eval_loss": 0.5673806071281433, "eval_matthews_correlation": 0.34158735475359947, "eval_runtime": 0.8364, "eval_samples_per_second": 1247.026, "eval_steps_per_second": 78.911, "step": 268 }, { "epoch": 1.87, "grad_norm": 2.8179821968078613, "learning_rate": 5.866638175800345e-06, "loss": 0.4135, "step": 500 }, { "epoch": 2.0, "eval_loss": 0.5291661620140076, "eval_matthews_correlation": 0.4778162027316995, "eval_runtime": 0.7922, "eval_samples_per_second": 1316.659, "eval_steps_per_second": 83.317, "step": 536 } ], "logging_steps": 500, "max_steps": 536, "num_input_tokens_seen": 0, "num_train_epochs": 2, "save_steps": 500, "total_flos": 95603170472208.0, "train_batch_size": 32, "trial_name": null, "trial_params": { "learning_rate": 8.734772395080514e-05, "num_train_epochs": 2, "per_device_train_batch_size": 32, "seed": 32 } }