| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 1.0, |
| "eval_steps": 500, |
| "global_step": 327, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.015313935681470138, |
| "grad_norm": 1.1070387363433838, |
| "learning_rate": 1.4634146341463416e-06, |
| "loss": 1.2917, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.030627871362940276, |
| "grad_norm": 1.122970700263977, |
| "learning_rate": 3.2926829268292685e-06, |
| "loss": 1.2965, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.045941807044410414, |
| "grad_norm": 0.8600009083747864, |
| "learning_rate": 5.121951219512195e-06, |
| "loss": 1.3084, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.06125574272588055, |
| "grad_norm": 0.6501789093017578, |
| "learning_rate": 6.951219512195123e-06, |
| "loss": 1.3178, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.07656967840735068, |
| "grad_norm": 0.6199907660484314, |
| "learning_rate": 8.780487804878048e-06, |
| "loss": 1.258, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.09188361408882083, |
| "grad_norm": 0.525578498840332, |
| "learning_rate": 1.0609756097560975e-05, |
| "loss": 1.2632, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.10719754977029096, |
| "grad_norm": 0.5301306843757629, |
| "learning_rate": 1.2439024390243903e-05, |
| "loss": 1.1863, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.1225114854517611, |
| "grad_norm": 0.45140478014945984, |
| "learning_rate": 1.4268292682926829e-05, |
| "loss": 1.1721, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.13782542113323124, |
| "grad_norm": 0.5104884505271912, |
| "learning_rate": 1.6097560975609757e-05, |
| "loss": 1.2507, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.15313935681470137, |
| "grad_norm": 0.4204311668872833, |
| "learning_rate": 1.7926829268292684e-05, |
| "loss": 1.1973, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.16845329249617153, |
| "grad_norm": 0.43958738446235657, |
| "learning_rate": 1.975609756097561e-05, |
| "loss": 1.1404, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.18376722817764166, |
| "grad_norm": 0.45204630494117737, |
| "learning_rate": 2.1585365853658537e-05, |
| "loss": 1.1811, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.1990811638591118, |
| "grad_norm": 0.4730817973613739, |
| "learning_rate": 2.3414634146341466e-05, |
| "loss": 1.1368, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.21439509954058192, |
| "grad_norm": 0.5665387511253357, |
| "learning_rate": 2.524390243902439e-05, |
| "loss": 1.1341, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.22970903522205208, |
| "grad_norm": 0.49841874837875366, |
| "learning_rate": 2.707317073170732e-05, |
| "loss": 1.1277, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.2450229709035222, |
| "grad_norm": 0.4274734556674957, |
| "learning_rate": 2.8902439024390242e-05, |
| "loss": 1.0674, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.26033690658499237, |
| "grad_norm": 0.6278162598609924, |
| "learning_rate": 2.9999877234172298e-05, |
| "loss": 1.0953, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.27565084226646247, |
| "grad_norm": 0.5454415082931519, |
| "learning_rate": 2.999849614168868e-05, |
| "loss": 1.0974, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.29096477794793263, |
| "grad_norm": 0.5087949633598328, |
| "learning_rate": 2.999558064119925e-05, |
| "loss": 1.1079, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.30627871362940273, |
| "grad_norm": 0.5586227774620056, |
| "learning_rate": 2.999113103097174e-05, |
| "loss": 1.0655, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.3215926493108729, |
| "grad_norm": 0.5441133975982666, |
| "learning_rate": 2.9985147766219615e-05, |
| "loss": 1.0631, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.33690658499234305, |
| "grad_norm": 0.6769384741783142, |
| "learning_rate": 2.9977631459055537e-05, |
| "loss": 1.0438, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.35222052067381315, |
| "grad_norm": 0.523868978023529, |
| "learning_rate": 2.996858287842873e-05, |
| "loss": 1.0131, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.3675344563552833, |
| "grad_norm": 0.5572935342788696, |
| "learning_rate": 2.995800295004629e-05, |
| "loss": 1.0501, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.38284839203675347, |
| "grad_norm": 0.6209707856178284, |
| "learning_rate": 2.9945892756278543e-05, |
| "loss": 1.0125, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.3981623277182236, |
| "grad_norm": 0.6483351588249207, |
| "learning_rate": 2.993225353604824e-05, |
| "loss": 0.998, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.41347626339969373, |
| "grad_norm": 0.6352364420890808, |
| "learning_rate": 2.9917086684703844e-05, |
| "loss": 1.0163, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.42879019908116384, |
| "grad_norm": 0.6867502331733704, |
| "learning_rate": 2.9900393753876816e-05, |
| "loss": 0.9824, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.444104134762634, |
| "grad_norm": 0.6153353452682495, |
| "learning_rate": 2.9882176451322798e-05, |
| "loss": 0.9395, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.45941807044410415, |
| "grad_norm": 0.645903468132019, |
| "learning_rate": 2.9862436640746973e-05, |
| "loss": 0.914, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.47473200612557426, |
| "grad_norm": 0.671872615814209, |
| "learning_rate": 2.9841176341613364e-05, |
| "loss": 0.9177, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.4900459418070444, |
| "grad_norm": 0.6303296089172363, |
| "learning_rate": 2.981839772893825e-05, |
| "loss": 0.9576, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.5053598774885145, |
| "grad_norm": 0.7237289547920227, |
| "learning_rate": 2.9794103133067637e-05, |
| "loss": 0.9649, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.5206738131699847, |
| "grad_norm": 0.6527206301689148, |
| "learning_rate": 2.9768295039438868e-05, |
| "loss": 0.9158, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.5359877488514548, |
| "grad_norm": 0.7285829782485962, |
| "learning_rate": 2.974097608832635e-05, |
| "loss": 0.9173, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.5513016845329249, |
| "grad_norm": 0.8059846758842468, |
| "learning_rate": 2.9712149074571433e-05, |
| "loss": 0.8667, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.5666156202143952, |
| "grad_norm": 0.7382646799087524, |
| "learning_rate": 2.968181694729651e-05, |
| "loss": 0.877, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.5819295558958653, |
| "grad_norm": 0.7463013529777527, |
| "learning_rate": 2.964998280960328e-05, |
| "loss": 0.9148, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.5972434915773354, |
| "grad_norm": 0.7870151400566101, |
| "learning_rate": 2.961664991825531e-05, |
| "loss": 0.8881, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.6125574272588055, |
| "grad_norm": 0.8590981364250183, |
| "learning_rate": 2.9581821683344832e-05, |
| "loss": 0.8352, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.6278713629402757, |
| "grad_norm": 0.7620300650596619, |
| "learning_rate": 2.954550166794391e-05, |
| "loss": 0.7867, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.6431852986217458, |
| "grad_norm": 0.8273492455482483, |
| "learning_rate": 2.9507693587739895e-05, |
| "loss": 0.8456, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.6584992343032159, |
| "grad_norm": 0.7399709224700928, |
| "learning_rate": 2.9468401310655303e-05, |
| "loss": 0.8156, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.6738131699846861, |
| "grad_norm": 0.851788341999054, |
| "learning_rate": 2.942762885645211e-05, |
| "loss": 0.7952, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.6891271056661562, |
| "grad_norm": 0.8381015658378601, |
| "learning_rate": 2.9385380396320523e-05, |
| "loss": 0.8027, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.7044410413476263, |
| "grad_norm": 0.8373646140098572, |
| "learning_rate": 2.934166025245223e-05, |
| "loss": 0.7943, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.7197549770290965, |
| "grad_norm": 0.8320114612579346, |
| "learning_rate": 2.9296472897598246e-05, |
| "loss": 0.8064, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.7350689127105666, |
| "grad_norm": 1.0138018131256104, |
| "learning_rate": 2.924982295461131e-05, |
| "loss": 0.761, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.7503828483920367, |
| "grad_norm": 0.9759603142738342, |
| "learning_rate": 2.920171519597297e-05, |
| "loss": 0.7114, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.7656967840735069, |
| "grad_norm": 0.844030499458313, |
| "learning_rate": 2.9152154543305316e-05, |
| "loss": 0.7461, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.781010719754977, |
| "grad_norm": 0.8687477111816406, |
| "learning_rate": 2.9101146066867502e-05, |
| "loss": 0.728, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.7963246554364471, |
| "grad_norm": 0.9759243726730347, |
| "learning_rate": 2.9048694985037e-05, |
| "loss": 0.7475, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.8116385911179173, |
| "grad_norm": 0.9230566620826721, |
| "learning_rate": 2.8994806663775792e-05, |
| "loss": 0.8098, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.8269525267993875, |
| "grad_norm": 0.8410807251930237, |
| "learning_rate": 2.893948661608136e-05, |
| "loss": 0.7465, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.8422664624808576, |
| "grad_norm": 1.0104124546051025, |
| "learning_rate": 2.888274050142271e-05, |
| "loss": 0.7799, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.8575803981623277, |
| "grad_norm": 0.9490285515785217, |
| "learning_rate": 2.8824574125161384e-05, |
| "loss": 0.7541, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.8728943338437979, |
| "grad_norm": 0.9112844467163086, |
| "learning_rate": 2.876499343795754e-05, |
| "loss": 0.7424, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.888208269525268, |
| "grad_norm": 0.8824840784072876, |
| "learning_rate": 2.8704004535161172e-05, |
| "loss": 0.7448, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.9035222052067381, |
| "grad_norm": 1.3259707689285278, |
| "learning_rate": 2.864161365618854e-05, |
| "loss": 0.7165, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.9188361408882083, |
| "grad_norm": 0.938458263874054, |
| "learning_rate": 2.8577827183883853e-05, |
| "loss": 0.6724, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.9341500765696784, |
| "grad_norm": 0.940185010433197, |
| "learning_rate": 2.851265164386627e-05, |
| "loss": 0.6722, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.9494640122511485, |
| "grad_norm": 1.0995936393737793, |
| "learning_rate": 2.84460937038623e-05, |
| "loss": 0.6628, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.9647779479326187, |
| "grad_norm": 0.9380448460578918, |
| "learning_rate": 2.837816017302368e-05, |
| "loss": 0.6519, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.9800918836140888, |
| "grad_norm": 1.0398368835449219, |
| "learning_rate": 2.8308858001230757e-05, |
| "loss": 0.6781, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.9954058192955589, |
| "grad_norm": 1.1420120000839233, |
| "learning_rate": 2.823819427838149e-05, |
| "loss": 0.6554, |
| "step": 325 |
| } |
| ], |
| "logging_steps": 5, |
| "max_steps": 1635, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 5, |
| "save_steps": 2000, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 4.7283530433246e+17, |
| "train_batch_size": 2, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|