| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 3.0, |
| "eval_steps": 500, |
| "global_step": 516, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.05813953488372093, |
| "grad_norm": 1.528863549232483, |
| "learning_rate": 1.9230769230769234e-06, |
| "loss": 0.4976, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.11627906976744186, |
| "grad_norm": 0.7564488053321838, |
| "learning_rate": 3.846153846153847e-06, |
| "loss": 0.4336, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.1744186046511628, |
| "grad_norm": 1.0575538873672485, |
| "learning_rate": 5.769230769230769e-06, |
| "loss": 0.3589, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.23255813953488372, |
| "grad_norm": 0.6464478969573975, |
| "learning_rate": 7.692307692307694e-06, |
| "loss": 0.3252, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.29069767441860467, |
| "grad_norm": 0.43869853019714355, |
| "learning_rate": 9.615384615384616e-06, |
| "loss": 0.3045, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.3488372093023256, |
| "grad_norm": 0.4740906357765198, |
| "learning_rate": 9.99266706925562e-06, |
| "loss": 0.3052, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.4069767441860465, |
| "grad_norm": 0.4225959777832031, |
| "learning_rate": 9.962913897416029e-06, |
| "loss": 0.2958, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.46511627906976744, |
| "grad_norm": 0.3857693374156952, |
| "learning_rate": 9.91041841371078e-06, |
| "loss": 0.2955, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.5232558139534884, |
| "grad_norm": 0.3972570598125458, |
| "learning_rate": 9.835421176144035e-06, |
| "loss": 0.2919, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.5813953488372093, |
| "grad_norm": 0.4061450660228729, |
| "learning_rate": 9.738265855914014e-06, |
| "loss": 0.2884, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.6395348837209303, |
| "grad_norm": 0.39139261841773987, |
| "learning_rate": 9.619397662556434e-06, |
| "loss": 0.2862, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.6976744186046512, |
| "grad_norm": 0.36877092719078064, |
| "learning_rate": 9.479361303793441e-06, |
| "loss": 0.282, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.7558139534883721, |
| "grad_norm": 0.40923571586608887, |
| "learning_rate": 9.318798489436917e-06, |
| "loss": 0.2834, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.813953488372093, |
| "grad_norm": 0.41997870802879333, |
| "learning_rate": 9.138444990784455e-06, |
| "loss": 0.2789, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.872093023255814, |
| "grad_norm": 0.38587668538093567, |
| "learning_rate": 8.93912726898311e-06, |
| "loss": 0.2759, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.9302325581395349, |
| "grad_norm": 0.3747125267982483, |
| "learning_rate": 8.721758687811353e-06, |
| "loss": 0.2791, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.9883720930232558, |
| "grad_norm": 0.39039668440818787, |
| "learning_rate": 8.487335328233912e-06, |
| "loss": 0.2738, |
| "step": 170 |
| }, |
| { |
| "epoch": 1.0465116279069768, |
| "grad_norm": 0.36734411120414734, |
| "learning_rate": 8.23693142390914e-06, |
| "loss": 0.2461, |
| "step": 180 |
| }, |
| { |
| "epoch": 1.1046511627906976, |
| "grad_norm": 0.34951621294021606, |
| "learning_rate": 7.97169443856545e-06, |
| "loss": 0.2321, |
| "step": 190 |
| }, |
| { |
| "epoch": 1.1627906976744187, |
| "grad_norm": 0.4765775203704834, |
| "learning_rate": 7.692839807804522e-06, |
| "loss": 0.2334, |
| "step": 200 |
| }, |
| { |
| "epoch": 1.2209302325581395, |
| "grad_norm": 0.4162239730358124, |
| "learning_rate": 7.401645369426697e-06, |
| "loss": 0.2314, |
| "step": 210 |
| }, |
| { |
| "epoch": 1.2790697674418605, |
| "grad_norm": 0.3959456980228424, |
| "learning_rate": 7.099445507801324e-06, |
| "loss": 0.233, |
| "step": 220 |
| }, |
| { |
| "epoch": 1.3372093023255813, |
| "grad_norm": 0.39358898997306824, |
| "learning_rate": 6.7876250391152e-06, |
| "loss": 0.2328, |
| "step": 230 |
| }, |
| { |
| "epoch": 1.3953488372093024, |
| "grad_norm": 0.39183884859085083, |
| "learning_rate": 6.467612865519674e-06, |
| "loss": 0.2316, |
| "step": 240 |
| }, |
| { |
| "epoch": 1.4534883720930232, |
| "grad_norm": 0.3894769847393036, |
| "learning_rate": 6.14087542725593e-06, |
| "loss": 0.2323, |
| "step": 250 |
| }, |
| { |
| "epoch": 1.5116279069767442, |
| "grad_norm": 0.3784314692020416, |
| "learning_rate": 5.808909982763825e-06, |
| "loss": 0.2283, |
| "step": 260 |
| }, |
| { |
| "epoch": 1.5697674418604652, |
| "grad_norm": 0.3567565381526947, |
| "learning_rate": 5.473237747567805e-06, |
| "loss": 0.2309, |
| "step": 270 |
| }, |
| { |
| "epoch": 1.627906976744186, |
| "grad_norm": 0.4400705099105835, |
| "learning_rate": 5.1353969233806735e-06, |
| "loss": 0.2317, |
| "step": 280 |
| }, |
| { |
| "epoch": 1.6860465116279069, |
| "grad_norm": 0.38926878571510315, |
| "learning_rate": 4.796935649368936e-06, |
| "loss": 0.2272, |
| "step": 290 |
| }, |
| { |
| "epoch": 1.744186046511628, |
| "grad_norm": 0.37996888160705566, |
| "learning_rate": 4.459404907880293e-06, |
| "loss": 0.2319, |
| "step": 300 |
| }, |
| { |
| "epoch": 1.802325581395349, |
| "grad_norm": 0.4081181287765503, |
| "learning_rate": 4.1243514171423465e-06, |
| "loss": 0.227, |
| "step": 310 |
| }, |
| { |
| "epoch": 1.8604651162790697, |
| "grad_norm": 0.36687538027763367, |
| "learning_rate": 3.7933105435014727e-06, |
| "loss": 0.2288, |
| "step": 320 |
| }, |
| { |
| "epoch": 1.9186046511627906, |
| "grad_norm": 0.3728639483451843, |
| "learning_rate": 3.4677992656811054e-06, |
| "loss": 0.224, |
| "step": 330 |
| }, |
| { |
| "epoch": 1.9767441860465116, |
| "grad_norm": 0.3709273934364319, |
| "learning_rate": 3.149309223300428e-06, |
| "loss": 0.2293, |
| "step": 340 |
| }, |
| { |
| "epoch": 2.0348837209302326, |
| "grad_norm": 0.3464743196964264, |
| "learning_rate": 2.839299881508272e-06, |
| "loss": 0.1976, |
| "step": 350 |
| }, |
| { |
| "epoch": 2.0930232558139537, |
| "grad_norm": 0.3613881766796112, |
| "learning_rate": 2.5391918430549635e-06, |
| "loss": 0.1831, |
| "step": 360 |
| }, |
| { |
| "epoch": 2.1511627906976742, |
| "grad_norm": 0.37775981426239014, |
| "learning_rate": 2.250360338449226e-06, |
| "loss": 0.1833, |
| "step": 370 |
| }, |
| { |
| "epoch": 2.2093023255813953, |
| "grad_norm": 0.37163475155830383, |
| "learning_rate": 1.9741289240311757e-06, |
| "loss": 0.187, |
| "step": 380 |
| }, |
| { |
| "epoch": 2.2674418604651163, |
| "grad_norm": 0.3699796199798584, |
| "learning_rate": 1.7117634168396774e-06, |
| "loss": 0.1856, |
| "step": 390 |
| }, |
| { |
| "epoch": 2.3255813953488373, |
| "grad_norm": 0.36632710695266724, |
| "learning_rate": 1.4644660940672628e-06, |
| "loss": 0.1812, |
| "step": 400 |
| }, |
| { |
| "epoch": 2.383720930232558, |
| "grad_norm": 0.38165929913520813, |
| "learning_rate": 1.2333701836832812e-06, |
| "loss": 0.183, |
| "step": 410 |
| }, |
| { |
| "epoch": 2.441860465116279, |
| "grad_norm": 0.3638046681880951, |
| "learning_rate": 1.0195346714717813e-06, |
| "loss": 0.1825, |
| "step": 420 |
| }, |
| { |
| "epoch": 2.5, |
| "grad_norm": 0.38040053844451904, |
| "learning_rate": 8.239394482805996e-07, |
| "loss": 0.181, |
| "step": 430 |
| }, |
| { |
| "epoch": 2.558139534883721, |
| "grad_norm": 0.3494938313961029, |
| "learning_rate": 6.474808197191401e-07, |
| "loss": 0.1856, |
| "step": 440 |
| }, |
| { |
| "epoch": 2.616279069767442, |
| "grad_norm": 0.3780662417411804, |
| "learning_rate": 4.9096739888146e-07, |
| "loss": 0.18, |
| "step": 450 |
| }, |
| { |
| "epoch": 2.6744186046511627, |
| "grad_norm": 0.3449888229370117, |
| "learning_rate": 3.5511640091604293e-07, |
| "loss": 0.1845, |
| "step": 460 |
| }, |
| { |
| "epoch": 2.7325581395348837, |
| "grad_norm": 0.44790518283843994, |
| "learning_rate": 2.4055035642222225e-07, |
| "loss": 0.185, |
| "step": 470 |
| }, |
| { |
| "epoch": 2.7906976744186047, |
| "grad_norm": 0.3460833728313446, |
| "learning_rate": 1.477942587339426e-07, |
| "loss": 0.1811, |
| "step": 480 |
| }, |
| { |
| "epoch": 2.8488372093023253, |
| "grad_norm": 0.3444211184978485, |
| "learning_rate": 7.727315816331515e-08, |
| "loss": 0.1841, |
| "step": 490 |
| }, |
| { |
| "epoch": 2.9069767441860463, |
| "grad_norm": 0.3577885925769806, |
| "learning_rate": 2.9310214228202016e-08, |
| "loss": 0.183, |
| "step": 500 |
| }, |
| { |
| "epoch": 2.9651162790697674, |
| "grad_norm": 0.3537665903568268, |
| "learning_rate": 4.125214789427734e-09, |
| "loss": 0.1837, |
| "step": 510 |
| }, |
| { |
| "epoch": 3.0, |
| "step": 516, |
| "total_flos": 3.6014407804780544e+18, |
| "train_loss": 0.2428539801937665, |
| "train_runtime": 11457.3527, |
| "train_samples_per_second": 5.736, |
| "train_steps_per_second": 0.045 |
| } |
| ], |
| "logging_steps": 10, |
| "max_steps": 516, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 3.6014407804780544e+18, |
| "train_batch_size": 16, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|