{ "best_metric": 1.1144840717315674, "best_model_checkpoint": "/mnt/bn/qingyi-bn-lq/llama/saved_llamaalpacaGPT4/checkpoint-8", "epoch": 0.163787587971849, "global_step": 8, "is_hyper_param_search": false, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 0.08, "eval_loss": 1.2102158069610596, "eval_runtime": 40.7424, "eval_samples_per_second": 49.089, "eval_steps_per_second": 0.785, "step": 4 }, { "epoch": 0.16, "eval_loss": 1.1144840717315674, "eval_runtime": 40.6507, "eval_samples_per_second": 49.2, "eval_steps_per_second": 0.787, "step": 8 } ], "max_steps": 144, "num_train_epochs": 3, "total_flos": 1.2265264808722432e+17, "trial_name": null, "trial_params": null }