| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 0.4985835694050991, |
| "eval_steps": 500, |
| "global_step": 88, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0056657223796034, |
| "grad_norm": 24.973131796915897, |
| "learning_rate": 1.0000000000000002e-06, |
| "loss": 1.8537, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0113314447592068, |
| "grad_norm": 32.79573813738381, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 2.0212, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.0169971671388102, |
| "grad_norm": 23.800880905805656, |
| "learning_rate": 3e-06, |
| "loss": 2.1456, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.0226628895184136, |
| "grad_norm": 19.091198715081358, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 1.9808, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.028328611898016998, |
| "grad_norm": 14.124470348172405, |
| "learning_rate": 5e-06, |
| "loss": 2.1825, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.0339943342776204, |
| "grad_norm": 11.461608032959802, |
| "learning_rate": 6e-06, |
| "loss": 1.6353, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.039660056657223795, |
| "grad_norm": 10.354681496346823, |
| "learning_rate": 7e-06, |
| "loss": 1.9076, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.0453257790368272, |
| "grad_norm": 10.167669680172194, |
| "learning_rate": 8.000000000000001e-06, |
| "loss": 1.4754, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.05099150141643059, |
| "grad_norm": 7.5541696713086255, |
| "learning_rate": 9e-06, |
| "loss": 1.6213, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.056657223796033995, |
| "grad_norm": 4.087852973173369, |
| "learning_rate": 1e-05, |
| "loss": 1.5217, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.06232294617563739, |
| "grad_norm": 4.071392878063137, |
| "learning_rate": 9.999948174819623e-06, |
| "loss": 1.6551, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.0679886685552408, |
| "grad_norm": 5.2075402015034, |
| "learning_rate": 9.999792700352826e-06, |
| "loss": 1.4474, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.07365439093484419, |
| "grad_norm": 3.6492933345906637, |
| "learning_rate": 9.999533579822611e-06, |
| "loss": 1.5585, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.07932011331444759, |
| "grad_norm": 6.482920810973195, |
| "learning_rate": 9.999170818600562e-06, |
| "loss": 1.3317, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.08498583569405099, |
| "grad_norm": 4.137365745831386, |
| "learning_rate": 9.998704424206747e-06, |
| "loss": 1.4029, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.0906515580736544, |
| "grad_norm": 4.745717244720069, |
| "learning_rate": 9.998134406309555e-06, |
| "loss": 1.6586, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.09631728045325778, |
| "grad_norm": 5.4377770096801346, |
| "learning_rate": 9.997460776725497e-06, |
| "loss": 1.365, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.10198300283286119, |
| "grad_norm": 3.317130182493388, |
| "learning_rate": 9.996683549418964e-06, |
| "loss": 1.4956, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.10764872521246459, |
| "grad_norm": 1.7845609616841893, |
| "learning_rate": 9.995802740501933e-06, |
| "loss": 1.3472, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.11331444759206799, |
| "grad_norm": 14.387033772755194, |
| "learning_rate": 9.994818368233639e-06, |
| "loss": 1.4116, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.11898016997167139, |
| "grad_norm": 6.920700020611593, |
| "learning_rate": 9.993730453020187e-06, |
| "loss": 1.2776, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.12464589235127478, |
| "grad_norm": 6.05951274644599, |
| "learning_rate": 9.99253901741414e-06, |
| "loss": 1.4433, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.13031161473087818, |
| "grad_norm": 3.0541449788715935, |
| "learning_rate": 9.991244086114046e-06, |
| "loss": 1.3396, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.1359773371104816, |
| "grad_norm": 1.8438099140328046, |
| "learning_rate": 9.989845685963917e-06, |
| "loss": 1.3061, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.141643059490085, |
| "grad_norm": 4.048301070320613, |
| "learning_rate": 9.988343845952697e-06, |
| "loss": 1.2283, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.14730878186968838, |
| "grad_norm": 3.5627296346591457, |
| "learning_rate": 9.986738597213633e-06, |
| "loss": 1.2865, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.1529745042492918, |
| "grad_norm": 2.237494567304501, |
| "learning_rate": 9.98502997302365e-06, |
| "loss": 1.3233, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.15864022662889518, |
| "grad_norm": 3.479719952104877, |
| "learning_rate": 9.983218008802648e-06, |
| "loss": 1.3033, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.1643059490084986, |
| "grad_norm": 2.066121083229141, |
| "learning_rate": 9.98130274211278e-06, |
| "loss": 1.3326, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.16997167138810199, |
| "grad_norm": 4.090684571263736, |
| "learning_rate": 9.979284212657658e-06, |
| "loss": 1.3102, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.17563739376770537, |
| "grad_norm": 2.369637256277251, |
| "learning_rate": 9.977162462281544e-06, |
| "loss": 1.4067, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.1813031161473088, |
| "grad_norm": 1.4378564529803546, |
| "learning_rate": 9.97493753496848e-06, |
| "loss": 1.2409, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.18696883852691218, |
| "grad_norm": 1.810353068849482, |
| "learning_rate": 9.972609476841368e-06, |
| "loss": 1.2659, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.19263456090651557, |
| "grad_norm": 2.954930884156565, |
| "learning_rate": 9.970178336161018e-06, |
| "loss": 1.3727, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.19830028328611898, |
| "grad_norm": 2.053307140265503, |
| "learning_rate": 9.967644163325157e-06, |
| "loss": 1.3463, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.20396600566572237, |
| "grad_norm": 1.8032124432327943, |
| "learning_rate": 9.965007010867366e-06, |
| "loss": 1.1998, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.2096317280453258, |
| "grad_norm": 1.4952983263862012, |
| "learning_rate": 9.962266933456008e-06, |
| "loss": 1.2829, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.21529745042492918, |
| "grad_norm": 1.3649794008291625, |
| "learning_rate": 9.959423987893086e-06, |
| "loss": 1.2056, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.22096317280453256, |
| "grad_norm": 1.4380773398306634, |
| "learning_rate": 9.956478233113066e-06, |
| "loss": 1.29, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.22662889518413598, |
| "grad_norm": 1.6072540934424309, |
| "learning_rate": 9.953429730181653e-06, |
| "loss": 1.2593, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.23229461756373937, |
| "grad_norm": 1.6010739399694889, |
| "learning_rate": 9.95027854229454e-06, |
| "loss": 1.2117, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.23796033994334279, |
| "grad_norm": 1.2474393925785745, |
| "learning_rate": 9.947024734776076e-06, |
| "loss": 1.2022, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.24362606232294617, |
| "grad_norm": 1.4019264249340568, |
| "learning_rate": 9.943668375077926e-06, |
| "loss": 1.2365, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.24929178470254956, |
| "grad_norm": 1.5087040675714003, |
| "learning_rate": 9.940209532777666e-06, |
| "loss": 1.274, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.254957507082153, |
| "grad_norm": 1.1953570915609946, |
| "learning_rate": 9.93664827957735e-06, |
| "loss": 1.2526, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.26062322946175637, |
| "grad_norm": 1.4826450819224886, |
| "learning_rate": 9.932984689302012e-06, |
| "loss": 1.1978, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.26628895184135976, |
| "grad_norm": 1.1937833972167977, |
| "learning_rate": 9.929218837898143e-06, |
| "loss": 1.1816, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.2719546742209632, |
| "grad_norm": 1.1238100782353855, |
| "learning_rate": 9.925350803432112e-06, |
| "loss": 1.1931, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.2776203966005666, |
| "grad_norm": 1.3338900623153498, |
| "learning_rate": 9.921380666088558e-06, |
| "loss": 1.1978, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.28328611898017, |
| "grad_norm": 1.3236848667289738, |
| "learning_rate": 9.917308508168712e-06, |
| "loss": 1.2551, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.28895184135977336, |
| "grad_norm": 1.425578635546673, |
| "learning_rate": 9.913134414088698e-06, |
| "loss": 1.2441, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.29461756373937675, |
| "grad_norm": 1.171581674684746, |
| "learning_rate": 9.908858470377793e-06, |
| "loss": 1.2369, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.3002832861189802, |
| "grad_norm": 1.1564744150302062, |
| "learning_rate": 9.904480765676617e-06, |
| "loss": 1.209, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.3059490084985836, |
| "grad_norm": 1.1357504524893798, |
| "learning_rate": 9.9000013907353e-06, |
| "loss": 1.2152, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.311614730878187, |
| "grad_norm": 1.0498825437855333, |
| "learning_rate": 9.895420438411616e-06, |
| "loss": 1.2043, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.31728045325779036, |
| "grad_norm": 1.6465219316145685, |
| "learning_rate": 9.890738003669029e-06, |
| "loss": 1.2289, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.32294617563739375, |
| "grad_norm": 1.711551232749367, |
| "learning_rate": 9.885954183574753e-06, |
| "loss": 1.1831, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.3286118980169972, |
| "grad_norm": 1.2636664413259953, |
| "learning_rate": 9.881069077297724e-06, |
| "loss": 1.2061, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.3342776203966006, |
| "grad_norm": 1.4260407982081962, |
| "learning_rate": 9.876082786106546e-06, |
| "loss": 1.1998, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.33994334277620397, |
| "grad_norm": 1.95604739866899, |
| "learning_rate": 9.870995413367397e-06, |
| "loss": 1.2215, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.34560906515580736, |
| "grad_norm": 1.2316545141521473, |
| "learning_rate": 9.865807064541878e-06, |
| "loss": 1.1599, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.35127478753541075, |
| "grad_norm": 1.1178440688886253, |
| "learning_rate": 9.860517847184837e-06, |
| "loss": 1.1907, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.35694050991501414, |
| "grad_norm": 1.305376049095191, |
| "learning_rate": 9.855127870942131e-06, |
| "loss": 1.1474, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.3626062322946176, |
| "grad_norm": 1.0495122657744762, |
| "learning_rate": 9.849637247548356e-06, |
| "loss": 1.2424, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.36827195467422097, |
| "grad_norm": 1.141538926125254, |
| "learning_rate": 9.844046090824533e-06, |
| "loss": 1.1689, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.37393767705382436, |
| "grad_norm": 1.26961257521241, |
| "learning_rate": 9.83835451667574e-06, |
| "loss": 1.2106, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.37960339943342775, |
| "grad_norm": 1.081533609255719, |
| "learning_rate": 9.832562643088724e-06, |
| "loss": 1.1834, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.38526912181303113, |
| "grad_norm": 1.443083776392187, |
| "learning_rate": 9.826670590129442e-06, |
| "loss": 1.1505, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.3909348441926346, |
| "grad_norm": 1.135777382976375, |
| "learning_rate": 9.820678479940573e-06, |
| "loss": 1.1489, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.39660056657223797, |
| "grad_norm": 1.8779005247112062, |
| "learning_rate": 9.814586436738998e-06, |
| "loss": 1.1643, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.40226628895184136, |
| "grad_norm": 1.7980060811236744, |
| "learning_rate": 9.808394586813209e-06, |
| "loss": 1.1594, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.40793201133144474, |
| "grad_norm": 2.572405910372765, |
| "learning_rate": 9.802103058520704e-06, |
| "loss": 1.1854, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.41359773371104813, |
| "grad_norm": 2.0253448122778606, |
| "learning_rate": 9.795711982285317e-06, |
| "loss": 1.1826, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.4192634560906516, |
| "grad_norm": 6.483254642683073, |
| "learning_rate": 9.78922149059452e-06, |
| "loss": 1.1646, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.42492917847025496, |
| "grad_norm": 1.2964281102887218, |
| "learning_rate": 9.782631717996675e-06, |
| "loss": 1.2379, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.43059490084985835, |
| "grad_norm": 1.9517402996335103, |
| "learning_rate": 9.775942801098241e-06, |
| "loss": 1.164, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.43626062322946174, |
| "grad_norm": 3.064531007561859, |
| "learning_rate": 9.76915487856095e-06, |
| "loss": 1.1418, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.44192634560906513, |
| "grad_norm": 1.5009905490397355, |
| "learning_rate": 9.762268091098926e-06, |
| "loss": 1.1653, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.4475920679886686, |
| "grad_norm": 1.104518219439204, |
| "learning_rate": 9.755282581475769e-06, |
| "loss": 1.2025, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.45325779036827196, |
| "grad_norm": 7.807500502849419, |
| "learning_rate": 9.748198494501598e-06, |
| "loss": 1.148, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.45892351274787535, |
| "grad_norm": 6.196503908242147, |
| "learning_rate": 9.741015977030046e-06, |
| "loss": 1.1819, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.46458923512747874, |
| "grad_norm": 2.2714978855142736, |
| "learning_rate": 9.733735177955219e-06, |
| "loss": 1.1907, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.4702549575070821, |
| "grad_norm": 1.834743890260826, |
| "learning_rate": 9.72635624820861e-06, |
| "loss": 1.1381, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.47592067988668557, |
| "grad_norm": 1.28470626171519, |
| "learning_rate": 9.71887934075596e-06, |
| "loss": 1.2079, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.48158640226628896, |
| "grad_norm": 6.197048819949928, |
| "learning_rate": 9.711304610594104e-06, |
| "loss": 1.1272, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.48725212464589235, |
| "grad_norm": 3.412508821399008, |
| "learning_rate": 9.703632214747742e-06, |
| "loss": 1.2382, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.49291784702549574, |
| "grad_norm": 1.57336480270559, |
| "learning_rate": 9.695862312266195e-06, |
| "loss": 1.157, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.4985835694050991, |
| "grad_norm": 7.383065472181884, |
| "learning_rate": 9.687995064220102e-06, |
| "loss": 1.1684, |
| "step": 88 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 700, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 4, |
| "save_steps": 88, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 9.300089449218048e+16, |
| "train_batch_size": 2, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|