| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 5.0, | |
| "global_step": 21050, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 1.952684085510689e-05, | |
| "loss": 0.3622, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.9051781472684088e-05, | |
| "loss": 0.277, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.8576722090261282e-05, | |
| "loss": 0.2499, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.810166270783848e-05, | |
| "loss": 0.2266, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.7627553444180522e-05, | |
| "loss": 0.2035, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.715249406175772e-05, | |
| "loss": 0.2061, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.667743467933492e-05, | |
| "loss": 0.2024, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.6202375296912117e-05, | |
| "loss": 0.1887, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 1.5728266033254158e-05, | |
| "loss": 0.1641, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 1.5253206650831356e-05, | |
| "loss": 0.1314, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 1.4779097387173397e-05, | |
| "loss": 0.1363, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 1.4304038004750596e-05, | |
| "loss": 0.1261, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 1.382897862232779e-05, | |
| "loss": 0.1367, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 1.3353919239904989e-05, | |
| "loss": 0.1363, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 1.2878859857482187e-05, | |
| "loss": 0.1378, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 1.2403800475059384e-05, | |
| "loss": 0.1343, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 1.192874109263658e-05, | |
| "loss": 0.132, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 1.1453681710213777e-05, | |
| "loss": 0.0911, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 1.097957244655582e-05, | |
| "loss": 0.0806, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 1.0505463182897864e-05, | |
| "loss": 0.0852, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 1.003040380047506e-05, | |
| "loss": 0.0944, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 9.555344418052257e-06, | |
| "loss": 0.0936, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 9.080285035629454e-06, | |
| "loss": 0.0945, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 8.60522565320665e-06, | |
| "loss": 0.1064, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 8.130166270783849e-06, | |
| "loss": 0.0968, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 7.656057007125892e-06, | |
| "loss": 0.0597, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 7.180997624703089e-06, | |
| "loss": 0.05, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 6.705938242280286e-06, | |
| "loss": 0.0577, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 6.230878859857483e-06, | |
| "loss": 0.0589, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "learning_rate": 5.75581947743468e-06, | |
| "loss": 0.0616, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "learning_rate": 5.281710213776723e-06, | |
| "loss": 0.0633, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "learning_rate": 4.806650831353919e-06, | |
| "loss": 0.0593, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "learning_rate": 4.331591448931117e-06, | |
| "loss": 0.0698, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 4.04, | |
| "learning_rate": 3.856532066508314e-06, | |
| "loss": 0.0487, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "learning_rate": 3.381472684085511e-06, | |
| "loss": 0.0321, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 2.9073634204275536e-06, | |
| "loss": 0.0384, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 4.39, | |
| "learning_rate": 2.4323040380047506e-06, | |
| "loss": 0.0324, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 4.51, | |
| "learning_rate": 1.957244655581948e-06, | |
| "loss": 0.0359, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 4.63, | |
| "learning_rate": 1.4821852731591448e-06, | |
| "loss": 0.0342, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 4.75, | |
| "learning_rate": 1.0080760095011877e-06, | |
| "loss": 0.0338, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 4.87, | |
| "learning_rate": 5.330166270783848e-07, | |
| "loss": 0.0334, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 4.99, | |
| "learning_rate": 5.7957244655581954e-08, | |
| "loss": 0.0384, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "step": 21050, | |
| "total_flos": 2.21503330843008e+16, | |
| "train_loss": 0.111734983768146, | |
| "train_runtime": 1050.6373, | |
| "train_samples_per_second": 320.515, | |
| "train_steps_per_second": 20.035 | |
| } | |
| ], | |
| "max_steps": 21050, | |
| "num_train_epochs": 5, | |
| "total_flos": 2.21503330843008e+16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |