| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 1968, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.02540650406504065, | |
| "grad_norm": 7.65625, | |
| "learning_rate": 1.2436548223350254e-05, | |
| "loss": 1.3736, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.0508130081300813, | |
| "grad_norm": 2.609375, | |
| "learning_rate": 2.5126903553299492e-05, | |
| "loss": 0.9886, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.07621951219512195, | |
| "grad_norm": 2.25, | |
| "learning_rate": 3.7817258883248735e-05, | |
| "loss": 0.7345, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.1016260162601626, | |
| "grad_norm": 2.015625, | |
| "learning_rate": 4.994353472614343e-05, | |
| "loss": 0.6664, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.12703252032520326, | |
| "grad_norm": 2.078125, | |
| "learning_rate": 4.853190287972897e-05, | |
| "loss": 0.6045, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.1524390243902439, | |
| "grad_norm": 2.046875, | |
| "learning_rate": 4.7120271033314515e-05, | |
| "loss": 0.5898, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.17784552845528456, | |
| "grad_norm": 1.6875, | |
| "learning_rate": 4.570863918690006e-05, | |
| "loss": 0.5569, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.2032520325203252, | |
| "grad_norm": 1.9765625, | |
| "learning_rate": 4.42970073404856e-05, | |
| "loss": 0.5396, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.22865853658536586, | |
| "grad_norm": 1.796875, | |
| "learning_rate": 4.288537549407115e-05, | |
| "loss": 0.5298, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.2540650406504065, | |
| "grad_norm": 1.859375, | |
| "learning_rate": 4.147374364765669e-05, | |
| "loss": 0.5257, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.27947154471544716, | |
| "grad_norm": 1.84375, | |
| "learning_rate": 4.006211180124224e-05, | |
| "loss": 0.5151, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.3048780487804878, | |
| "grad_norm": 1.828125, | |
| "learning_rate": 3.8650479954827784e-05, | |
| "loss": 0.5239, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.33028455284552843, | |
| "grad_norm": 1.8828125, | |
| "learning_rate": 3.7238848108413324e-05, | |
| "loss": 0.5169, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.3556910569105691, | |
| "grad_norm": 1.8359375, | |
| "learning_rate": 3.5827216261998877e-05, | |
| "loss": 0.4942, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.38109756097560976, | |
| "grad_norm": 1.9921875, | |
| "learning_rate": 3.4415584415584416e-05, | |
| "loss": 0.4981, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.4065040650406504, | |
| "grad_norm": 1.7578125, | |
| "learning_rate": 3.300395256916996e-05, | |
| "loss": 0.4983, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.43191056910569103, | |
| "grad_norm": 2.046875, | |
| "learning_rate": 3.159232072275551e-05, | |
| "loss": 0.4882, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.4573170731707317, | |
| "grad_norm": 1.7890625, | |
| "learning_rate": 3.018068887634105e-05, | |
| "loss": 0.4869, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.48272357723577236, | |
| "grad_norm": 1.9375, | |
| "learning_rate": 2.8769057029926593e-05, | |
| "loss": 0.4898, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.508130081300813, | |
| "grad_norm": 1.7265625, | |
| "learning_rate": 2.7357425183512143e-05, | |
| "loss": 0.501, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.5335365853658537, | |
| "grad_norm": 1.7578125, | |
| "learning_rate": 2.5945793337097685e-05, | |
| "loss": 0.4843, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.5589430894308943, | |
| "grad_norm": 1.9453125, | |
| "learning_rate": 2.453416149068323e-05, | |
| "loss": 0.496, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.584349593495935, | |
| "grad_norm": 2.046875, | |
| "learning_rate": 2.3122529644268774e-05, | |
| "loss": 0.4878, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.6097560975609756, | |
| "grad_norm": 1.875, | |
| "learning_rate": 2.171089779785432e-05, | |
| "loss": 0.4892, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.6351626016260162, | |
| "grad_norm": 1.8203125, | |
| "learning_rate": 2.0299265951439866e-05, | |
| "loss": 0.4915, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.6605691056910569, | |
| "grad_norm": 1.890625, | |
| "learning_rate": 1.888763410502541e-05, | |
| "loss": 0.4868, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.6859756097560976, | |
| "grad_norm": 1.75, | |
| "learning_rate": 1.7476002258610955e-05, | |
| "loss": 0.4819, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.7113821138211383, | |
| "grad_norm": 1.7890625, | |
| "learning_rate": 1.60643704121965e-05, | |
| "loss": 0.486, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.7367886178861789, | |
| "grad_norm": 1.7265625, | |
| "learning_rate": 1.4652738565782046e-05, | |
| "loss": 0.4875, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.7621951219512195, | |
| "grad_norm": 1.9921875, | |
| "learning_rate": 1.3241106719367592e-05, | |
| "loss": 0.4874, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.7876016260162602, | |
| "grad_norm": 1.796875, | |
| "learning_rate": 1.1829474872953134e-05, | |
| "loss": 0.4725, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.8130081300813008, | |
| "grad_norm": 1.8359375, | |
| "learning_rate": 1.0417843026538679e-05, | |
| "loss": 0.4819, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.8384146341463414, | |
| "grad_norm": 1.8125, | |
| "learning_rate": 9.006211180124225e-06, | |
| "loss": 0.4808, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.8638211382113821, | |
| "grad_norm": 1.8125, | |
| "learning_rate": 7.594579333709768e-06, | |
| "loss": 0.4797, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.8892276422764228, | |
| "grad_norm": 1.71875, | |
| "learning_rate": 6.1829474872953135e-06, | |
| "loss": 0.4836, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.9146341463414634, | |
| "grad_norm": 1.8359375, | |
| "learning_rate": 4.771315640880859e-06, | |
| "loss": 0.4832, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.9400406504065041, | |
| "grad_norm": 1.859375, | |
| "learning_rate": 3.3596837944664035e-06, | |
| "loss": 0.4945, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.9654471544715447, | |
| "grad_norm": 1.6640625, | |
| "learning_rate": 1.948051948051948e-06, | |
| "loss": 0.4686, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.9908536585365854, | |
| "grad_norm": 1.796875, | |
| "learning_rate": 5.364201016374929e-07, | |
| "loss": 0.4889, | |
| "step": 1950 | |
| } | |
| ], | |
| "logging_steps": 50, | |
| "max_steps": 1968, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 200, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 0.0, | |
| "train_batch_size": 512, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |