| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 4.992, |
| "eval_steps": 500, |
| "global_step": 390, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0128, |
| "grad_norm": 6.233467267119553, |
| "learning_rate": 1.0256410256410257e-06, |
| "loss": 0.8108, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0256, |
| "grad_norm": 5.904196326876289, |
| "learning_rate": 2.0512820512820513e-06, |
| "loss": 0.7847, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.0384, |
| "grad_norm": 6.23851753064735, |
| "learning_rate": 3.0769230769230774e-06, |
| "loss": 0.8192, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.0512, |
| "grad_norm": 5.415128171631446, |
| "learning_rate": 4.102564102564103e-06, |
| "loss": 0.7829, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.064, |
| "grad_norm": 4.04380559994215, |
| "learning_rate": 5.128205128205128e-06, |
| "loss": 0.7687, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.0768, |
| "grad_norm": 2.2587637623659984, |
| "learning_rate": 6.153846153846155e-06, |
| "loss": 0.7336, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.0896, |
| "grad_norm": 2.046066080232856, |
| "learning_rate": 7.17948717948718e-06, |
| "loss": 0.7165, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.1024, |
| "grad_norm": 4.069511156688382, |
| "learning_rate": 8.205128205128205e-06, |
| "loss": 0.7287, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.1152, |
| "grad_norm": 4.2611185393960005, |
| "learning_rate": 9.230769230769232e-06, |
| "loss": 0.7252, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.128, |
| "grad_norm": 4.260307749363621, |
| "learning_rate": 1.0256410256410256e-05, |
| "loss": 0.6916, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.1408, |
| "grad_norm": 3.5860528236126874, |
| "learning_rate": 1.1282051282051283e-05, |
| "loss": 0.6714, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.1536, |
| "grad_norm": 2.0408678402576386, |
| "learning_rate": 1.230769230769231e-05, |
| "loss": 0.6509, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.1664, |
| "grad_norm": 1.7405635844975456, |
| "learning_rate": 1.3333333333333333e-05, |
| "loss": 0.6462, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.1792, |
| "grad_norm": 2.089378409802929, |
| "learning_rate": 1.435897435897436e-05, |
| "loss": 0.6075, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.192, |
| "grad_norm": 1.5136568318392694, |
| "learning_rate": 1.5384615384615387e-05, |
| "loss": 0.6198, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.2048, |
| "grad_norm": 1.1263674006123536, |
| "learning_rate": 1.641025641025641e-05, |
| "loss": 0.5743, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.2176, |
| "grad_norm": 1.0303264886226746, |
| "learning_rate": 1.7435897435897438e-05, |
| "loss": 0.5916, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.2304, |
| "grad_norm": 0.9294082822832469, |
| "learning_rate": 1.8461538461538465e-05, |
| "loss": 0.5856, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.2432, |
| "grad_norm": 0.947170872331032, |
| "learning_rate": 1.9487179487179488e-05, |
| "loss": 0.5385, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.256, |
| "grad_norm": 0.8518591702550199, |
| "learning_rate": 2.0512820512820512e-05, |
| "loss": 0.568, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.2688, |
| "grad_norm": 0.8725288325536481, |
| "learning_rate": 2.153846153846154e-05, |
| "loss": 0.5631, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.2816, |
| "grad_norm": 1.0592322309688051, |
| "learning_rate": 2.2564102564102566e-05, |
| "loss": 0.5431, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.2944, |
| "grad_norm": 0.7594966772209739, |
| "learning_rate": 2.3589743589743593e-05, |
| "loss": 0.5403, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.3072, |
| "grad_norm": 1.0147100967231533, |
| "learning_rate": 2.461538461538462e-05, |
| "loss": 0.5346, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 0.8092853991109913, |
| "learning_rate": 2.5641025641025646e-05, |
| "loss": 0.5391, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.3328, |
| "grad_norm": 0.7480207959415263, |
| "learning_rate": 2.6666666666666667e-05, |
| "loss": 0.5183, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.3456, |
| "grad_norm": 0.6725540446498556, |
| "learning_rate": 2.7692307692307694e-05, |
| "loss": 0.5211, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.3584, |
| "grad_norm": 0.752524063992389, |
| "learning_rate": 2.871794871794872e-05, |
| "loss": 0.5116, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.3712, |
| "grad_norm": 0.8714792547605933, |
| "learning_rate": 2.9743589743589747e-05, |
| "loss": 0.5252, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.384, |
| "grad_norm": 0.587686864084582, |
| "learning_rate": 3.0769230769230774e-05, |
| "loss": 0.5117, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.3968, |
| "grad_norm": 0.7359163335855232, |
| "learning_rate": 3.1794871794871795e-05, |
| "loss": 0.5165, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.4096, |
| "grad_norm": 0.683074670763304, |
| "learning_rate": 3.282051282051282e-05, |
| "loss": 0.5302, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.4224, |
| "grad_norm": 0.7899607579501703, |
| "learning_rate": 3.384615384615385e-05, |
| "loss": 0.5131, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.4352, |
| "grad_norm": 0.7203007550703167, |
| "learning_rate": 3.4871794871794875e-05, |
| "loss": 0.4912, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.448, |
| "grad_norm": 1.0497807505193113, |
| "learning_rate": 3.58974358974359e-05, |
| "loss": 0.5064, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.4608, |
| "grad_norm": 1.1723249764639616, |
| "learning_rate": 3.692307692307693e-05, |
| "loss": 0.5085, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.4736, |
| "grad_norm": 0.8733264133015174, |
| "learning_rate": 3.794871794871795e-05, |
| "loss": 0.5093, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.4864, |
| "grad_norm": 1.2145299440192203, |
| "learning_rate": 3.8974358974358976e-05, |
| "loss": 0.5151, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.4992, |
| "grad_norm": 0.8295361766624695, |
| "learning_rate": 4e-05, |
| "loss": 0.4846, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.512, |
| "grad_norm": 0.8655831204630615, |
| "learning_rate": 3.9999198907597046e-05, |
| "loss": 0.4902, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.5248, |
| "grad_norm": 0.7831772024924701, |
| "learning_rate": 3.9996795694563096e-05, |
| "loss": 0.499, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.5376, |
| "grad_norm": 1.084114214652019, |
| "learning_rate": 3.999279055341771e-05, |
| "loss": 0.4961, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.5504, |
| "grad_norm": 0.9639426639425154, |
| "learning_rate": 3.998718380500971e-05, |
| "loss": 0.5058, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.5632, |
| "grad_norm": 0.6394336440537755, |
| "learning_rate": 3.997997589849145e-05, |
| "loss": 0.4806, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.576, |
| "grad_norm": 0.7819635417728371, |
| "learning_rate": 3.9971167411282835e-05, |
| "loss": 0.5032, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.5888, |
| "grad_norm": 0.7955295880993887, |
| "learning_rate": 3.99607590490251e-05, |
| "loss": 0.5021, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.6016, |
| "grad_norm": 0.6671700101502559, |
| "learning_rate": 3.9948751645524235e-05, |
| "loss": 0.4857, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.6144, |
| "grad_norm": 0.5514514988706577, |
| "learning_rate": 3.9935146162684206e-05, |
| "loss": 0.4747, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.6272, |
| "grad_norm": 0.6556176731195519, |
| "learning_rate": 3.9919943690429906e-05, |
| "loss": 0.4726, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 0.6240651998663801, |
| "learning_rate": 3.9903145446619837e-05, |
| "loss": 0.4884, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.6528, |
| "grad_norm": 0.5551386581113544, |
| "learning_rate": 3.9884752776948564e-05, |
| "loss": 0.4906, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.6656, |
| "grad_norm": 0.5904946318868938, |
| "learning_rate": 3.9864767154838864e-05, |
| "loss": 0.4821, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.6784, |
| "grad_norm": 0.538919779997332, |
| "learning_rate": 3.9843190181323744e-05, |
| "loss": 0.4823, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.6912, |
| "grad_norm": 0.5333470534988501, |
| "learning_rate": 3.982002358491817e-05, |
| "loss": 0.4719, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.704, |
| "grad_norm": 0.4850010563636148, |
| "learning_rate": 3.979526922148058e-05, |
| "loss": 0.4665, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.7168, |
| "grad_norm": 0.44539126503300186, |
| "learning_rate": 3.9768929074064206e-05, |
| "loss": 0.4743, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.7296, |
| "grad_norm": 0.5032408043020593, |
| "learning_rate": 3.9741005252758255e-05, |
| "loss": 0.4723, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.7424, |
| "grad_norm": 0.46974810278602536, |
| "learning_rate": 3.971149999451886e-05, |
| "loss": 0.4726, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.7552, |
| "grad_norm": 0.4608092351714356, |
| "learning_rate": 3.9680415662989806e-05, |
| "loss": 0.4671, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.768, |
| "grad_norm": 0.45516143398106607, |
| "learning_rate": 3.9647754748313294e-05, |
| "loss": 0.466, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.7808, |
| "grad_norm": 0.4744623675372948, |
| "learning_rate": 3.96135198669304e-05, |
| "loss": 0.4741, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.7936, |
| "grad_norm": 0.43856795287418615, |
| "learning_rate": 3.957771376137144e-05, |
| "loss": 0.4601, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.8064, |
| "grad_norm": 0.3761545034457208, |
| "learning_rate": 3.954033930003634e-05, |
| "loss": 0.4706, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.8192, |
| "grad_norm": 0.41810263359022676, |
| "learning_rate": 3.9501399476964806e-05, |
| "loss": 0.467, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.832, |
| "grad_norm": 0.40583434815588343, |
| "learning_rate": 3.946089741159648e-05, |
| "loss": 0.4695, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.8448, |
| "grad_norm": 0.3914490774493594, |
| "learning_rate": 3.9418836348521045e-05, |
| "loss": 0.4748, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.8576, |
| "grad_norm": 0.389130816543567, |
| "learning_rate": 3.937521965721831e-05, |
| "loss": 0.4537, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.8704, |
| "grad_norm": 0.4581445439882817, |
| "learning_rate": 3.933005083178828e-05, |
| "loss": 0.4586, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.8832, |
| "grad_norm": 0.5126383880525731, |
| "learning_rate": 3.928333349067125e-05, |
| "loss": 0.4724, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.896, |
| "grad_norm": 0.5181747207489492, |
| "learning_rate": 3.923507137635792e-05, |
| "loss": 0.4674, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.9088, |
| "grad_norm": 0.4295166893762927, |
| "learning_rate": 3.9185268355089606e-05, |
| "loss": 0.4508, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.9216, |
| "grad_norm": 0.4868403059192456, |
| "learning_rate": 3.913392841654851e-05, |
| "loss": 0.4655, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.9344, |
| "grad_norm": 0.5989861771380917, |
| "learning_rate": 3.9081055673538093e-05, |
| "loss": 0.4661, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.9472, |
| "grad_norm": 0.5656347544570441, |
| "learning_rate": 3.902665436165364e-05, |
| "loss": 0.4803, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.96, |
| "grad_norm": 0.46441753698075355, |
| "learning_rate": 3.897072883894291e-05, |
| "loss": 0.4639, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.9728, |
| "grad_norm": 0.510393828467784, |
| "learning_rate": 3.8913283585557054e-05, |
| "loss": 0.4708, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.9856, |
| "grad_norm": 0.48942431176760787, |
| "learning_rate": 3.885432320339167e-05, |
| "loss": 0.4524, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.9984, |
| "grad_norm": 0.4968874160759365, |
| "learning_rate": 3.879385241571817e-05, |
| "loss": 0.4643, |
| "step": 78 |
| }, |
| { |
| "epoch": 1.0112, |
| "grad_norm": 0.8861501634736185, |
| "learning_rate": 3.873187606680543e-05, |
| "loss": 0.8012, |
| "step": 79 |
| }, |
| { |
| "epoch": 1.024, |
| "grad_norm": 1.231652143097992, |
| "learning_rate": 3.866839912153168e-05, |
| "loss": 0.4398, |
| "step": 80 |
| }, |
| { |
| "epoch": 1.0368, |
| "grad_norm": 0.6712625292151638, |
| "learning_rate": 3.860342666498677e-05, |
| "loss": 0.4121, |
| "step": 81 |
| }, |
| { |
| "epoch": 1.0496, |
| "grad_norm": 1.1356345596864996, |
| "learning_rate": 3.853696390206484e-05, |
| "loss": 0.4285, |
| "step": 82 |
| }, |
| { |
| "epoch": 1.0624, |
| "grad_norm": 0.6662175711028467, |
| "learning_rate": 3.846901615704734e-05, |
| "loss": 0.4184, |
| "step": 83 |
| }, |
| { |
| "epoch": 1.0752, |
| "grad_norm": 0.8930266388840874, |
| "learning_rate": 3.839958887317649e-05, |
| "loss": 0.4219, |
| "step": 84 |
| }, |
| { |
| "epoch": 1.088, |
| "grad_norm": 0.5713183757359342, |
| "learning_rate": 3.832868761221926e-05, |
| "loss": 0.4266, |
| "step": 85 |
| }, |
| { |
| "epoch": 1.1008, |
| "grad_norm": 0.6446930790537315, |
| "learning_rate": 3.825631805402182e-05, |
| "loss": 0.4306, |
| "step": 86 |
| }, |
| { |
| "epoch": 1.1136, |
| "grad_norm": 0.5449251995753388, |
| "learning_rate": 3.818248599605448e-05, |
| "loss": 0.4363, |
| "step": 87 |
| }, |
| { |
| "epoch": 1.1264, |
| "grad_norm": 0.49840540333271255, |
| "learning_rate": 3.810719735294731e-05, |
| "loss": 0.4198, |
| "step": 88 |
| }, |
| { |
| "epoch": 1.1392, |
| "grad_norm": 0.636524520280604, |
| "learning_rate": 3.8030458156016326e-05, |
| "loss": 0.427, |
| "step": 89 |
| }, |
| { |
| "epoch": 1.152, |
| "grad_norm": 0.5763577377012762, |
| "learning_rate": 3.795227455278029e-05, |
| "loss": 0.434, |
| "step": 90 |
| }, |
| { |
| "epoch": 1.1648, |
| "grad_norm": 0.5518205698347075, |
| "learning_rate": 3.787265280646825e-05, |
| "loss": 0.4081, |
| "step": 91 |
| }, |
| { |
| "epoch": 1.1776, |
| "grad_norm": 0.5685776152816718, |
| "learning_rate": 3.7791599295517825e-05, |
| "loss": 0.4277, |
| "step": 92 |
| }, |
| { |
| "epoch": 1.1904, |
| "grad_norm": 0.6757526217251246, |
| "learning_rate": 3.7709120513064196e-05, |
| "loss": 0.4176, |
| "step": 93 |
| }, |
| { |
| "epoch": 1.2032, |
| "grad_norm": 0.536729337543259, |
| "learning_rate": 3.762522306641998e-05, |
| "loss": 0.4153, |
| "step": 94 |
| }, |
| { |
| "epoch": 1.216, |
| "grad_norm": 0.42544799934531624, |
| "learning_rate": 3.7539913676545874e-05, |
| "loss": 0.4172, |
| "step": 95 |
| }, |
| { |
| "epoch": 1.2288000000000001, |
| "grad_norm": 0.42010888741086944, |
| "learning_rate": 3.745319917751229e-05, |
| "loss": 0.4102, |
| "step": 96 |
| }, |
| { |
| "epoch": 1.2416, |
| "grad_norm": 0.43678400955846786, |
| "learning_rate": 3.736508651595188e-05, |
| "loss": 0.4161, |
| "step": 97 |
| }, |
| { |
| "epoch": 1.2544, |
| "grad_norm": 0.5082670907410142, |
| "learning_rate": 3.727558275050301e-05, |
| "loss": 0.4197, |
| "step": 98 |
| }, |
| { |
| "epoch": 1.2671999999999999, |
| "grad_norm": 0.357137321779293, |
| "learning_rate": 3.718469505124434e-05, |
| "loss": 0.4048, |
| "step": 99 |
| }, |
| { |
| "epoch": 1.28, |
| "grad_norm": 0.40817714129576205, |
| "learning_rate": 3.709243069912041e-05, |
| "loss": 0.419, |
| "step": 100 |
| }, |
| { |
| "epoch": 1.2928, |
| "grad_norm": 0.47645094237883096, |
| "learning_rate": 3.699879708535838e-05, |
| "loss": 0.4417, |
| "step": 101 |
| }, |
| { |
| "epoch": 1.3056, |
| "grad_norm": 0.39257867572254557, |
| "learning_rate": 3.69038017108759e-05, |
| "loss": 0.4215, |
| "step": 102 |
| }, |
| { |
| "epoch": 1.3184, |
| "grad_norm": 0.4206670690315583, |
| "learning_rate": 3.680745218568026e-05, |
| "loss": 0.4201, |
| "step": 103 |
| }, |
| { |
| "epoch": 1.3312, |
| "grad_norm": 0.3865293036754881, |
| "learning_rate": 3.6709756228258735e-05, |
| "loss": 0.4347, |
| "step": 104 |
| }, |
| { |
| "epoch": 1.3439999999999999, |
| "grad_norm": 0.39367421741174213, |
| "learning_rate": 3.6610721664960236e-05, |
| "loss": 0.4206, |
| "step": 105 |
| }, |
| { |
| "epoch": 1.3568, |
| "grad_norm": 0.4656673614605536, |
| "learning_rate": 3.65103564293684e-05, |
| "loss": 0.3881, |
| "step": 106 |
| }, |
| { |
| "epoch": 1.3696, |
| "grad_norm": 0.35335946805147883, |
| "learning_rate": 3.640866856166601e-05, |
| "loss": 0.4118, |
| "step": 107 |
| }, |
| { |
| "epoch": 1.3824, |
| "grad_norm": 0.4628742760548775, |
| "learning_rate": 3.6305666207990886e-05, |
| "loss": 0.416, |
| "step": 108 |
| }, |
| { |
| "epoch": 1.3952, |
| "grad_norm": 0.39568236935918416, |
| "learning_rate": 3.6201357619783336e-05, |
| "loss": 0.4135, |
| "step": 109 |
| }, |
| { |
| "epoch": 1.408, |
| "grad_norm": 0.5466276296066325, |
| "learning_rate": 3.609575115312511e-05, |
| "loss": 0.417, |
| "step": 110 |
| }, |
| { |
| "epoch": 1.4208, |
| "grad_norm": 0.5090028901174583, |
| "learning_rate": 3.598885526807003e-05, |
| "loss": 0.4111, |
| "step": 111 |
| }, |
| { |
| "epoch": 1.4336, |
| "grad_norm": 0.48855888787545754, |
| "learning_rate": 3.5880678527966224e-05, |
| "loss": 0.4036, |
| "step": 112 |
| }, |
| { |
| "epoch": 1.4464000000000001, |
| "grad_norm": 0.45101035460617356, |
| "learning_rate": 3.577122959877017e-05, |
| "loss": 0.4207, |
| "step": 113 |
| }, |
| { |
| "epoch": 1.4592, |
| "grad_norm": 0.5737035543815443, |
| "learning_rate": 3.566051724835245e-05, |
| "loss": 0.4089, |
| "step": 114 |
| }, |
| { |
| "epoch": 1.472, |
| "grad_norm": 0.39146919766674915, |
| "learning_rate": 3.554855034579532e-05, |
| "loss": 0.4164, |
| "step": 115 |
| }, |
| { |
| "epoch": 1.4848, |
| "grad_norm": 0.3630072800119681, |
| "learning_rate": 3.5435337860682304e-05, |
| "loss": 0.3981, |
| "step": 116 |
| }, |
| { |
| "epoch": 1.4976, |
| "grad_norm": 0.4547222351106011, |
| "learning_rate": 3.532088886237956e-05, |
| "loss": 0.4196, |
| "step": 117 |
| }, |
| { |
| "epoch": 1.5104, |
| "grad_norm": 0.37216593927043445, |
| "learning_rate": 3.520521251930941e-05, |
| "loss": 0.4107, |
| "step": 118 |
| }, |
| { |
| "epoch": 1.5232, |
| "grad_norm": 0.4050307805559563, |
| "learning_rate": 3.5088318098215805e-05, |
| "loss": 0.4122, |
| "step": 119 |
| }, |
| { |
| "epoch": 1.536, |
| "grad_norm": 0.38269243213429416, |
| "learning_rate": 3.497021496342203e-05, |
| "loss": 0.4073, |
| "step": 120 |
| }, |
| { |
| "epoch": 1.5488, |
| "grad_norm": 0.36260299781848226, |
| "learning_rate": 3.485091257608047e-05, |
| "loss": 0.4252, |
| "step": 121 |
| }, |
| { |
| "epoch": 1.5615999999999999, |
| "grad_norm": 0.4438690588669624, |
| "learning_rate": 3.473042049341474e-05, |
| "loss": 0.4122, |
| "step": 122 |
| }, |
| { |
| "epoch": 1.5744, |
| "grad_norm": 0.41544202097631083, |
| "learning_rate": 3.4608748367954064e-05, |
| "loss": 0.416, |
| "step": 123 |
| }, |
| { |
| "epoch": 1.5872000000000002, |
| "grad_norm": 0.641936060850495, |
| "learning_rate": 3.4485905946759965e-05, |
| "loss": 0.4205, |
| "step": 124 |
| }, |
| { |
| "epoch": 1.6, |
| "grad_norm": 0.5487268480206667, |
| "learning_rate": 3.4361903070645484e-05, |
| "loss": 0.4185, |
| "step": 125 |
| }, |
| { |
| "epoch": 1.6128, |
| "grad_norm": 0.3876577101511856, |
| "learning_rate": 3.423674967338681e-05, |
| "loss": 0.4103, |
| "step": 126 |
| }, |
| { |
| "epoch": 1.6256, |
| "grad_norm": 0.42818180937132194, |
| "learning_rate": 3.411045578092754e-05, |
| "loss": 0.4205, |
| "step": 127 |
| }, |
| { |
| "epoch": 1.6383999999999999, |
| "grad_norm": 0.2680370619157418, |
| "learning_rate": 3.398303151057543e-05, |
| "loss": 0.4256, |
| "step": 128 |
| }, |
| { |
| "epoch": 1.6512, |
| "grad_norm": 0.39573261758619904, |
| "learning_rate": 3.385448707019199e-05, |
| "loss": 0.4168, |
| "step": 129 |
| }, |
| { |
| "epoch": 1.6640000000000001, |
| "grad_norm": 0.3543678918298844, |
| "learning_rate": 3.372483275737468e-05, |
| "loss": 0.411, |
| "step": 130 |
| }, |
| { |
| "epoch": 1.6768, |
| "grad_norm": 0.3717876519061496, |
| "learning_rate": 3.359407895863199e-05, |
| "loss": 0.4166, |
| "step": 131 |
| }, |
| { |
| "epoch": 1.6896, |
| "grad_norm": 0.41594670318211513, |
| "learning_rate": 3.34622361485514e-05, |
| "loss": 0.4185, |
| "step": 132 |
| }, |
| { |
| "epoch": 1.7024, |
| "grad_norm": 0.39133110888303313, |
| "learning_rate": 3.332931488896029e-05, |
| "loss": 0.4139, |
| "step": 133 |
| }, |
| { |
| "epoch": 1.7151999999999998, |
| "grad_norm": 0.43836287131270635, |
| "learning_rate": 3.319532582807977e-05, |
| "loss": 0.4159, |
| "step": 134 |
| }, |
| { |
| "epoch": 1.728, |
| "grad_norm": 0.3757188844435322, |
| "learning_rate": 3.30602796996717e-05, |
| "loss": 0.4196, |
| "step": 135 |
| }, |
| { |
| "epoch": 1.7408000000000001, |
| "grad_norm": 0.34971023678161894, |
| "learning_rate": 3.2924187322178865e-05, |
| "loss": 0.3985, |
| "step": 136 |
| }, |
| { |
| "epoch": 1.7536, |
| "grad_norm": 0.3512090580148879, |
| "learning_rate": 3.278705959785821e-05, |
| "loss": 0.4073, |
| "step": 137 |
| }, |
| { |
| "epoch": 1.7664, |
| "grad_norm": 0.34629713258117534, |
| "learning_rate": 3.2648907511907544e-05, |
| "loss": 0.4201, |
| "step": 138 |
| }, |
| { |
| "epoch": 1.7792, |
| "grad_norm": 0.46977338380502903, |
| "learning_rate": 3.250974213158555e-05, |
| "loss": 0.4087, |
| "step": 139 |
| }, |
| { |
| "epoch": 1.792, |
| "grad_norm": 0.40101157445187413, |
| "learning_rate": 3.23695746053251e-05, |
| "loss": 0.4147, |
| "step": 140 |
| }, |
| { |
| "epoch": 1.8048, |
| "grad_norm": 0.47691626196033376, |
| "learning_rate": 3.222841616184025e-05, |
| "loss": 0.4004, |
| "step": 141 |
| }, |
| { |
| "epoch": 1.8176, |
| "grad_norm": 0.3435761452724062, |
| "learning_rate": 3.208627810922665e-05, |
| "loss": 0.4126, |
| "step": 142 |
| }, |
| { |
| "epoch": 1.8304, |
| "grad_norm": 0.3645128134974153, |
| "learning_rate": 3.194317183405573e-05, |
| "loss": 0.4007, |
| "step": 143 |
| }, |
| { |
| "epoch": 1.8432, |
| "grad_norm": 0.35491499023320583, |
| "learning_rate": 3.1799108800462466e-05, |
| "loss": 0.4087, |
| "step": 144 |
| }, |
| { |
| "epoch": 1.8559999999999999, |
| "grad_norm": 0.3734869903646821, |
| "learning_rate": 3.1654100549227024e-05, |
| "loss": 0.419, |
| "step": 145 |
| }, |
| { |
| "epoch": 1.8688, |
| "grad_norm": 0.30082037154900365, |
| "learning_rate": 3.1508158696850275e-05, |
| "loss": 0.4138, |
| "step": 146 |
| }, |
| { |
| "epoch": 1.8816000000000002, |
| "grad_norm": 0.4590029162532056, |
| "learning_rate": 3.136129493462312e-05, |
| "loss": 0.4081, |
| "step": 147 |
| }, |
| { |
| "epoch": 1.8944, |
| "grad_norm": 0.29447660882935095, |
| "learning_rate": 3.121352102768998e-05, |
| "loss": 0.4009, |
| "step": 148 |
| }, |
| { |
| "epoch": 1.9072, |
| "grad_norm": 0.48573583205938736, |
| "learning_rate": 3.106484881410628e-05, |
| "loss": 0.4258, |
| "step": 149 |
| }, |
| { |
| "epoch": 1.92, |
| "grad_norm": 0.3631988369037435, |
| "learning_rate": 3.091529020389009e-05, |
| "loss": 0.4203, |
| "step": 150 |
| }, |
| { |
| "epoch": 1.9327999999999999, |
| "grad_norm": 0.3332268388037149, |
| "learning_rate": 3.076485717806808e-05, |
| "loss": 0.4006, |
| "step": 151 |
| }, |
| { |
| "epoch": 1.9456, |
| "grad_norm": 0.35727991875553283, |
| "learning_rate": 3.061356178771564e-05, |
| "loss": 0.4094, |
| "step": 152 |
| }, |
| { |
| "epoch": 1.9584000000000001, |
| "grad_norm": 0.3559631224614753, |
| "learning_rate": 3.0461416152991555e-05, |
| "loss": 0.4213, |
| "step": 153 |
| }, |
| { |
| "epoch": 1.9712, |
| "grad_norm": 0.37804917682629285, |
| "learning_rate": 3.0308432462167045e-05, |
| "loss": 0.4199, |
| "step": 154 |
| }, |
| { |
| "epoch": 1.984, |
| "grad_norm": 0.37950577893585047, |
| "learning_rate": 3.015462297064936e-05, |
| "loss": 0.3961, |
| "step": 155 |
| }, |
| { |
| "epoch": 1.9968, |
| "grad_norm": 0.32608515028658464, |
| "learning_rate": 3.0000000000000004e-05, |
| "loss": 0.4055, |
| "step": 156 |
| }, |
| { |
| "epoch": 2.0096, |
| "grad_norm": 0.7669011198805715, |
| "learning_rate": 2.98445759369477e-05, |
| "loss": 0.7441, |
| "step": 157 |
| }, |
| { |
| "epoch": 2.0224, |
| "grad_norm": 0.6652887388218975, |
| "learning_rate": 2.9688363232396056e-05, |
| "loss": 0.3507, |
| "step": 158 |
| }, |
| { |
| "epoch": 2.0352, |
| "grad_norm": 0.7385081279732356, |
| "learning_rate": 2.9531374400426158e-05, |
| "loss": 0.3736, |
| "step": 159 |
| }, |
| { |
| "epoch": 2.048, |
| "grad_norm": 0.7320172922150582, |
| "learning_rate": 2.9373622017294075e-05, |
| "loss": 0.3587, |
| "step": 160 |
| }, |
| { |
| "epoch": 2.0608, |
| "grad_norm": 0.49807265418424446, |
| "learning_rate": 2.9215118720423375e-05, |
| "loss": 0.3623, |
| "step": 161 |
| }, |
| { |
| "epoch": 2.0736, |
| "grad_norm": 0.48163598394041085, |
| "learning_rate": 2.9055877207392752e-05, |
| "loss": 0.3659, |
| "step": 162 |
| }, |
| { |
| "epoch": 2.0864, |
| "grad_norm": 0.48731905853762586, |
| "learning_rate": 2.8895910234918828e-05, |
| "loss": 0.3476, |
| "step": 163 |
| }, |
| { |
| "epoch": 2.0992, |
| "grad_norm": 0.42827838103728594, |
| "learning_rate": 2.873523061783426e-05, |
| "loss": 0.3553, |
| "step": 164 |
| }, |
| { |
| "epoch": 2.112, |
| "grad_norm": 0.34579240618250967, |
| "learning_rate": 2.8573851228061084e-05, |
| "loss": 0.36, |
| "step": 165 |
| }, |
| { |
| "epoch": 2.1248, |
| "grad_norm": 0.38008557988183295, |
| "learning_rate": 2.8411784993579633e-05, |
| "loss": 0.3481, |
| "step": 166 |
| }, |
| { |
| "epoch": 2.1376, |
| "grad_norm": 0.3376533317787802, |
| "learning_rate": 2.8249044897392814e-05, |
| "loss": 0.3645, |
| "step": 167 |
| }, |
| { |
| "epoch": 2.1504, |
| "grad_norm": 0.348488923971927, |
| "learning_rate": 2.80856439764861e-05, |
| "loss": 0.3635, |
| "step": 168 |
| }, |
| { |
| "epoch": 2.1632, |
| "grad_norm": 0.3599933955892379, |
| "learning_rate": 2.792159532078314e-05, |
| "loss": 0.3542, |
| "step": 169 |
| }, |
| { |
| "epoch": 2.176, |
| "grad_norm": 0.333690464884174, |
| "learning_rate": 2.77569120720971e-05, |
| "loss": 0.3556, |
| "step": 170 |
| }, |
| { |
| "epoch": 2.1888, |
| "grad_norm": 0.3350844064785929, |
| "learning_rate": 2.7591607423077932e-05, |
| "loss": 0.3647, |
| "step": 171 |
| }, |
| { |
| "epoch": 2.2016, |
| "grad_norm": 0.37148148955239746, |
| "learning_rate": 2.7425694616155474e-05, |
| "loss": 0.3554, |
| "step": 172 |
| }, |
| { |
| "epoch": 2.2144, |
| "grad_norm": 0.3280010701875189, |
| "learning_rate": 2.7259186942478656e-05, |
| "loss": 0.3532, |
| "step": 173 |
| }, |
| { |
| "epoch": 2.2272, |
| "grad_norm": 0.332491142609001, |
| "learning_rate": 2.7092097740850712e-05, |
| "loss": 0.3544, |
| "step": 174 |
| }, |
| { |
| "epoch": 2.24, |
| "grad_norm": 0.2881390860119507, |
| "learning_rate": 2.692444039666066e-05, |
| "loss": 0.3455, |
| "step": 175 |
| }, |
| { |
| "epoch": 2.2528, |
| "grad_norm": 0.32116573700586765, |
| "learning_rate": 2.6756228340810946e-05, |
| "loss": 0.3621, |
| "step": 176 |
| }, |
| { |
| "epoch": 2.2656, |
| "grad_norm": 0.30659625060436463, |
| "learning_rate": 2.6587475048641596e-05, |
| "loss": 0.3419, |
| "step": 177 |
| }, |
| { |
| "epoch": 2.2784, |
| "grad_norm": 0.30519135259940783, |
| "learning_rate": 2.6418194038850634e-05, |
| "loss": 0.3524, |
| "step": 178 |
| }, |
| { |
| "epoch": 2.2912, |
| "grad_norm": 0.2980472772642158, |
| "learning_rate": 2.624839887241115e-05, |
| "loss": 0.3493, |
| "step": 179 |
| }, |
| { |
| "epoch": 2.304, |
| "grad_norm": 0.32253352509389444, |
| "learning_rate": 2.607810315148494e-05, |
| "loss": 0.3587, |
| "step": 180 |
| }, |
| { |
| "epoch": 2.3168, |
| "grad_norm": 0.27321091859832713, |
| "learning_rate": 2.5907320518332827e-05, |
| "loss": 0.35, |
| "step": 181 |
| }, |
| { |
| "epoch": 2.3296, |
| "grad_norm": 0.2985443161344337, |
| "learning_rate": 2.5736064654221808e-05, |
| "loss": 0.3582, |
| "step": 182 |
| }, |
| { |
| "epoch": 2.3424, |
| "grad_norm": 0.323527392124704, |
| "learning_rate": 2.5564349278329056e-05, |
| "loss": 0.3559, |
| "step": 183 |
| }, |
| { |
| "epoch": 2.3552, |
| "grad_norm": 0.2948657008807223, |
| "learning_rate": 2.539218814664288e-05, |
| "loss": 0.3568, |
| "step": 184 |
| }, |
| { |
| "epoch": 2.368, |
| "grad_norm": 0.2991813048805828, |
| "learning_rate": 2.521959505086075e-05, |
| "loss": 0.3532, |
| "step": 185 |
| }, |
| { |
| "epoch": 2.3808, |
| "grad_norm": 0.2783668607947485, |
| "learning_rate": 2.5046583817284437e-05, |
| "loss": 0.3458, |
| "step": 186 |
| }, |
| { |
| "epoch": 2.3936, |
| "grad_norm": 0.28042445346441125, |
| "learning_rate": 2.487316830571244e-05, |
| "loss": 0.3421, |
| "step": 187 |
| }, |
| { |
| "epoch": 2.4064, |
| "grad_norm": 0.3471962042525829, |
| "learning_rate": 2.4699362408329646e-05, |
| "loss": 0.3453, |
| "step": 188 |
| }, |
| { |
| "epoch": 2.4192, |
| "grad_norm": 0.2823227915730697, |
| "learning_rate": 2.4525180048594452e-05, |
| "loss": 0.3432, |
| "step": 189 |
| }, |
| { |
| "epoch": 2.432, |
| "grad_norm": 0.3530570165142917, |
| "learning_rate": 2.435063518012335e-05, |
| "loss": 0.3548, |
| "step": 190 |
| }, |
| { |
| "epoch": 2.4448, |
| "grad_norm": 0.3056427441596844, |
| "learning_rate": 2.4175741785573177e-05, |
| "loss": 0.3613, |
| "step": 191 |
| }, |
| { |
| "epoch": 2.4576000000000002, |
| "grad_norm": 0.3086741723859052, |
| "learning_rate": 2.4000513875520892e-05, |
| "loss": 0.3417, |
| "step": 192 |
| }, |
| { |
| "epoch": 2.4704, |
| "grad_norm": 0.31428784464812703, |
| "learning_rate": 2.3824965487341247e-05, |
| "loss": 0.3581, |
| "step": 193 |
| }, |
| { |
| "epoch": 2.4832, |
| "grad_norm": 0.2859896069564287, |
| "learning_rate": 2.3649110684082258e-05, |
| "loss": 0.3474, |
| "step": 194 |
| }, |
| { |
| "epoch": 2.496, |
| "grad_norm": 0.27059096851688597, |
| "learning_rate": 2.3472963553338614e-05, |
| "loss": 0.3442, |
| "step": 195 |
| }, |
| { |
| "epoch": 2.5088, |
| "grad_norm": 0.3326884769262199, |
| "learning_rate": 2.3296538206123134e-05, |
| "loss": 0.3683, |
| "step": 196 |
| }, |
| { |
| "epoch": 2.5216, |
| "grad_norm": 0.29505923783406746, |
| "learning_rate": 2.311984877573636e-05, |
| "loss": 0.3477, |
| "step": 197 |
| }, |
| { |
| "epoch": 2.5343999999999998, |
| "grad_norm": 0.32073943104242425, |
| "learning_rate": 2.2942909416634326e-05, |
| "loss": 0.3595, |
| "step": 198 |
| }, |
| { |
| "epoch": 2.5472, |
| "grad_norm": 0.2840222270592631, |
| "learning_rate": 2.2765734303294666e-05, |
| "loss": 0.3473, |
| "step": 199 |
| }, |
| { |
| "epoch": 2.56, |
| "grad_norm": 0.3418714380147271, |
| "learning_rate": 2.2588337629081107e-05, |
| "loss": 0.3521, |
| "step": 200 |
| }, |
| { |
| "epoch": 2.5728, |
| "grad_norm": 0.2797908282100119, |
| "learning_rate": 2.2410733605106462e-05, |
| "loss": 0.3543, |
| "step": 201 |
| }, |
| { |
| "epoch": 2.5856, |
| "grad_norm": 0.36420961265518004, |
| "learning_rate": 2.2232936459094158e-05, |
| "loss": 0.3533, |
| "step": 202 |
| }, |
| { |
| "epoch": 2.5984, |
| "grad_norm": 0.314568754851869, |
| "learning_rate": 2.205496043423849e-05, |
| "loss": 0.3575, |
| "step": 203 |
| }, |
| { |
| "epoch": 2.6112, |
| "grad_norm": 0.3342111687945849, |
| "learning_rate": 2.1876819788063586e-05, |
| "loss": 0.346, |
| "step": 204 |
| }, |
| { |
| "epoch": 2.624, |
| "grad_norm": 0.3064734258766498, |
| "learning_rate": 2.16985287912813e-05, |
| "loss": 0.3671, |
| "step": 205 |
| }, |
| { |
| "epoch": 2.6368, |
| "grad_norm": 0.29854230408342164, |
| "learning_rate": 2.1520101726647922e-05, |
| "loss": 0.3424, |
| "step": 206 |
| }, |
| { |
| "epoch": 2.6496, |
| "grad_norm": 0.2827054055912993, |
| "learning_rate": 2.1341552887820048e-05, |
| "loss": 0.3526, |
| "step": 207 |
| }, |
| { |
| "epoch": 2.6624, |
| "grad_norm": 0.28275958845846305, |
| "learning_rate": 2.1162896578209517e-05, |
| "loss": 0.3441, |
| "step": 208 |
| }, |
| { |
| "epoch": 2.6752000000000002, |
| "grad_norm": 0.312064605803253, |
| "learning_rate": 2.0984147109837564e-05, |
| "loss": 0.3598, |
| "step": 209 |
| }, |
| { |
| "epoch": 2.6879999999999997, |
| "grad_norm": 0.28452488535848225, |
| "learning_rate": 2.0805318802188307e-05, |
| "loss": 0.3595, |
| "step": 210 |
| }, |
| { |
| "epoch": 2.7008, |
| "grad_norm": 0.2744945291582551, |
| "learning_rate": 2.0626425981061608e-05, |
| "loss": 0.3514, |
| "step": 211 |
| }, |
| { |
| "epoch": 2.7136, |
| "grad_norm": 0.2851263975583895, |
| "learning_rate": 2.0447482977425465e-05, |
| "loss": 0.3578, |
| "step": 212 |
| }, |
| { |
| "epoch": 2.7264, |
| "grad_norm": 0.24934062786332797, |
| "learning_rate": 2.0268504126267952e-05, |
| "loss": 0.3658, |
| "step": 213 |
| }, |
| { |
| "epoch": 2.7392, |
| "grad_norm": 0.32531564637396143, |
| "learning_rate": 2.008950376544887e-05, |
| "loss": 0.3475, |
| "step": 214 |
| }, |
| { |
| "epoch": 2.752, |
| "grad_norm": 0.24195021899743993, |
| "learning_rate": 1.9910496234551132e-05, |
| "loss": 0.3509, |
| "step": 215 |
| }, |
| { |
| "epoch": 2.7648, |
| "grad_norm": 0.33103816433572836, |
| "learning_rate": 1.9731495873732055e-05, |
| "loss": 0.3541, |
| "step": 216 |
| }, |
| { |
| "epoch": 2.7776, |
| "grad_norm": 0.2269071264266118, |
| "learning_rate": 1.9552517022574542e-05, |
| "loss": 0.3602, |
| "step": 217 |
| }, |
| { |
| "epoch": 2.7904, |
| "grad_norm": 0.29798422862653967, |
| "learning_rate": 1.93735740189384e-05, |
| "loss": 0.3433, |
| "step": 218 |
| }, |
| { |
| "epoch": 2.8032, |
| "grad_norm": 0.25616892838087085, |
| "learning_rate": 1.9194681197811703e-05, |
| "loss": 0.3547, |
| "step": 219 |
| }, |
| { |
| "epoch": 2.816, |
| "grad_norm": 0.2569631346633302, |
| "learning_rate": 1.901585289016244e-05, |
| "loss": 0.3678, |
| "step": 220 |
| }, |
| { |
| "epoch": 2.8288, |
| "grad_norm": 0.32778475734128903, |
| "learning_rate": 1.8837103421790486e-05, |
| "loss": 0.3523, |
| "step": 221 |
| }, |
| { |
| "epoch": 2.8416, |
| "grad_norm": 0.2369726285253331, |
| "learning_rate": 1.8658447112179952e-05, |
| "loss": 0.3449, |
| "step": 222 |
| }, |
| { |
| "epoch": 2.8544, |
| "grad_norm": 0.2810512494548086, |
| "learning_rate": 1.8479898273352084e-05, |
| "loss": 0.3631, |
| "step": 223 |
| }, |
| { |
| "epoch": 2.8672, |
| "grad_norm": 0.24745825973149008, |
| "learning_rate": 1.83014712087187e-05, |
| "loss": 0.3526, |
| "step": 224 |
| }, |
| { |
| "epoch": 2.88, |
| "grad_norm": 0.2586808546029739, |
| "learning_rate": 1.8123180211936417e-05, |
| "loss": 0.3428, |
| "step": 225 |
| }, |
| { |
| "epoch": 2.8928000000000003, |
| "grad_norm": 0.23360130898404668, |
| "learning_rate": 1.794503956576152e-05, |
| "loss": 0.345, |
| "step": 226 |
| }, |
| { |
| "epoch": 2.9055999999999997, |
| "grad_norm": 0.240900500337055, |
| "learning_rate": 1.776706354090585e-05, |
| "loss": 0.3487, |
| "step": 227 |
| }, |
| { |
| "epoch": 2.9184, |
| "grad_norm": 0.23747073767970822, |
| "learning_rate": 1.758926639489354e-05, |
| "loss": 0.356, |
| "step": 228 |
| }, |
| { |
| "epoch": 2.9312, |
| "grad_norm": 0.2201875141981078, |
| "learning_rate": 1.7411662370918893e-05, |
| "loss": 0.3578, |
| "step": 229 |
| }, |
| { |
| "epoch": 2.944, |
| "grad_norm": 0.23449074731345043, |
| "learning_rate": 1.7234265696705344e-05, |
| "loss": 0.3619, |
| "step": 230 |
| }, |
| { |
| "epoch": 2.9568, |
| "grad_norm": 0.23953740851380273, |
| "learning_rate": 1.7057090583365678e-05, |
| "loss": 0.3587, |
| "step": 231 |
| }, |
| { |
| "epoch": 2.9696, |
| "grad_norm": 0.20232912490254967, |
| "learning_rate": 1.6880151224263646e-05, |
| "loss": 0.3447, |
| "step": 232 |
| }, |
| { |
| "epoch": 2.9824, |
| "grad_norm": 0.25646246387237664, |
| "learning_rate": 1.6703461793876876e-05, |
| "loss": 0.359, |
| "step": 233 |
| }, |
| { |
| "epoch": 2.9952, |
| "grad_norm": 0.22729675252261325, |
| "learning_rate": 1.6527036446661396e-05, |
| "loss": 0.3487, |
| "step": 234 |
| }, |
| { |
| "epoch": 3.008, |
| "grad_norm": 0.6215353715874271, |
| "learning_rate": 1.635088931591775e-05, |
| "loss": 0.6241, |
| "step": 235 |
| }, |
| { |
| "epoch": 3.0208, |
| "grad_norm": 0.3696909872798802, |
| "learning_rate": 1.6175034512658753e-05, |
| "loss": 0.3034, |
| "step": 236 |
| }, |
| { |
| "epoch": 3.0336, |
| "grad_norm": 0.5375575773965741, |
| "learning_rate": 1.5999486124479115e-05, |
| "loss": 0.3032, |
| "step": 237 |
| }, |
| { |
| "epoch": 3.0464, |
| "grad_norm": 0.38516960983552195, |
| "learning_rate": 1.5824258214426833e-05, |
| "loss": 0.3061, |
| "step": 238 |
| }, |
| { |
| "epoch": 3.0592, |
| "grad_norm": 0.4742991871870546, |
| "learning_rate": 1.5649364819876655e-05, |
| "loss": 0.2937, |
| "step": 239 |
| }, |
| { |
| "epoch": 3.072, |
| "grad_norm": 0.4135969453525089, |
| "learning_rate": 1.547481995140556e-05, |
| "loss": 0.3138, |
| "step": 240 |
| }, |
| { |
| "epoch": 3.0848, |
| "grad_norm": 0.42172855815907073, |
| "learning_rate": 1.5300637591670357e-05, |
| "loss": 0.308, |
| "step": 241 |
| }, |
| { |
| "epoch": 3.0976, |
| "grad_norm": 0.3771894608374879, |
| "learning_rate": 1.5126831694287564e-05, |
| "loss": 0.3076, |
| "step": 242 |
| }, |
| { |
| "epoch": 3.1104, |
| "grad_norm": 0.4163462974642452, |
| "learning_rate": 1.4953416182715566e-05, |
| "loss": 0.3071, |
| "step": 243 |
| }, |
| { |
| "epoch": 3.1232, |
| "grad_norm": 0.3704830350749835, |
| "learning_rate": 1.478040494913926e-05, |
| "loss": 0.3056, |
| "step": 244 |
| }, |
| { |
| "epoch": 3.136, |
| "grad_norm": 0.36115694049544883, |
| "learning_rate": 1.460781185335713e-05, |
| "loss": 0.3047, |
| "step": 245 |
| }, |
| { |
| "epoch": 3.1488, |
| "grad_norm": 0.3358650426314368, |
| "learning_rate": 1.443565072167095e-05, |
| "loss": 0.2925, |
| "step": 246 |
| }, |
| { |
| "epoch": 3.1616, |
| "grad_norm": 0.3744792395559403, |
| "learning_rate": 1.4263935345778202e-05, |
| "loss": 0.2965, |
| "step": 247 |
| }, |
| { |
| "epoch": 3.1744, |
| "grad_norm": 0.3361916360656167, |
| "learning_rate": 1.409267948166718e-05, |
| "loss": 0.3127, |
| "step": 248 |
| }, |
| { |
| "epoch": 3.1872, |
| "grad_norm": 0.3091988714486053, |
| "learning_rate": 1.3921896848515064e-05, |
| "loss": 0.3035, |
| "step": 249 |
| }, |
| { |
| "epoch": 3.2, |
| "grad_norm": 0.3277119477509228, |
| "learning_rate": 1.3751601127588849e-05, |
| "loss": 0.299, |
| "step": 250 |
| }, |
| { |
| "epoch": 3.2128, |
| "grad_norm": 0.2873917613244393, |
| "learning_rate": 1.3581805961149371e-05, |
| "loss": 0.2928, |
| "step": 251 |
| }, |
| { |
| "epoch": 3.2256, |
| "grad_norm": 0.28358641106288984, |
| "learning_rate": 1.341252495135841e-05, |
| "loss": 0.291, |
| "step": 252 |
| }, |
| { |
| "epoch": 3.2384, |
| "grad_norm": 0.27138723040994367, |
| "learning_rate": 1.324377165918906e-05, |
| "loss": 0.2888, |
| "step": 253 |
| }, |
| { |
| "epoch": 3.2512, |
| "grad_norm": 0.2753898544881945, |
| "learning_rate": 1.3075559603339354e-05, |
| "loss": 0.2953, |
| "step": 254 |
| }, |
| { |
| "epoch": 3.2640000000000002, |
| "grad_norm": 0.2647672143999282, |
| "learning_rate": 1.2907902259149287e-05, |
| "loss": 0.2912, |
| "step": 255 |
| }, |
| { |
| "epoch": 3.2768, |
| "grad_norm": 0.2879829562601937, |
| "learning_rate": 1.274081305752135e-05, |
| "loss": 0.3111, |
| "step": 256 |
| }, |
| { |
| "epoch": 3.2896, |
| "grad_norm": 0.2685828458948742, |
| "learning_rate": 1.2574305383844528e-05, |
| "loss": 0.2899, |
| "step": 257 |
| }, |
| { |
| "epoch": 3.3024, |
| "grad_norm": 0.2618038733902564, |
| "learning_rate": 1.2408392576922075e-05, |
| "loss": 0.2916, |
| "step": 258 |
| }, |
| { |
| "epoch": 3.3152, |
| "grad_norm": 0.22753375801417833, |
| "learning_rate": 1.2243087927902905e-05, |
| "loss": 0.305, |
| "step": 259 |
| }, |
| { |
| "epoch": 3.328, |
| "grad_norm": 0.2930244026853143, |
| "learning_rate": 1.2078404679216864e-05, |
| "loss": 0.3039, |
| "step": 260 |
| }, |
| { |
| "epoch": 3.3407999999999998, |
| "grad_norm": 0.22977159381404028, |
| "learning_rate": 1.1914356023513904e-05, |
| "loss": 0.2878, |
| "step": 261 |
| }, |
| { |
| "epoch": 3.3536, |
| "grad_norm": 0.24270939761725097, |
| "learning_rate": 1.1750955102607193e-05, |
| "loss": 0.2949, |
| "step": 262 |
| }, |
| { |
| "epoch": 3.3664, |
| "grad_norm": 0.24447085983026712, |
| "learning_rate": 1.1588215006420374e-05, |
| "loss": 0.2993, |
| "step": 263 |
| }, |
| { |
| "epoch": 3.3792, |
| "grad_norm": 0.23152423854583742, |
| "learning_rate": 1.1426148771938915e-05, |
| "loss": 0.2944, |
| "step": 264 |
| }, |
| { |
| "epoch": 3.392, |
| "grad_norm": 0.22985233893166343, |
| "learning_rate": 1.1264769382165748e-05, |
| "loss": 0.2955, |
| "step": 265 |
| }, |
| { |
| "epoch": 3.4048, |
| "grad_norm": 0.23372953797080312, |
| "learning_rate": 1.110408976508118e-05, |
| "loss": 0.288, |
| "step": 266 |
| }, |
| { |
| "epoch": 3.4176, |
| "grad_norm": 0.21690130609713476, |
| "learning_rate": 1.094412279260726e-05, |
| "loss": 0.2904, |
| "step": 267 |
| }, |
| { |
| "epoch": 3.4304, |
| "grad_norm": 0.2246463430594481, |
| "learning_rate": 1.0784881279576635e-05, |
| "loss": 0.2989, |
| "step": 268 |
| }, |
| { |
| "epoch": 3.4432, |
| "grad_norm": 0.21332209794451956, |
| "learning_rate": 1.0626377982705929e-05, |
| "loss": 0.2975, |
| "step": 269 |
| }, |
| { |
| "epoch": 3.456, |
| "grad_norm": 0.24970279498059386, |
| "learning_rate": 1.0468625599573842e-05, |
| "loss": 0.3064, |
| "step": 270 |
| }, |
| { |
| "epoch": 3.4688, |
| "grad_norm": 0.21261729326140266, |
| "learning_rate": 1.0311636767603952e-05, |
| "loss": 0.2862, |
| "step": 271 |
| }, |
| { |
| "epoch": 3.4816, |
| "grad_norm": 0.22206764450175856, |
| "learning_rate": 1.0155424063052306e-05, |
| "loss": 0.3092, |
| "step": 272 |
| }, |
| { |
| "epoch": 3.4944, |
| "grad_norm": 0.24047571831325013, |
| "learning_rate": 1.0000000000000006e-05, |
| "loss": 0.2971, |
| "step": 273 |
| }, |
| { |
| "epoch": 3.5072, |
| "grad_norm": 0.21849137758843346, |
| "learning_rate": 9.84537702935065e-06, |
| "loss": 0.2984, |
| "step": 274 |
| }, |
| { |
| "epoch": 3.52, |
| "grad_norm": 0.2229834366517268, |
| "learning_rate": 9.691567537832964e-06, |
| "loss": 0.2961, |
| "step": 275 |
| }, |
| { |
| "epoch": 3.5328, |
| "grad_norm": 0.22968975608492073, |
| "learning_rate": 9.538583847008452e-06, |
| "loss": 0.3033, |
| "step": 276 |
| }, |
| { |
| "epoch": 3.5456, |
| "grad_norm": 0.22151889834022945, |
| "learning_rate": 9.386438212284372e-06, |
| "loss": 0.3039, |
| "step": 277 |
| }, |
| { |
| "epoch": 3.5584, |
| "grad_norm": 0.2128895075658792, |
| "learning_rate": 9.235142821931928e-06, |
| "loss": 0.2943, |
| "step": 278 |
| }, |
| { |
| "epoch": 3.5712, |
| "grad_norm": 0.21164754242909364, |
| "learning_rate": 9.084709796109907e-06, |
| "loss": 0.2919, |
| "step": 279 |
| }, |
| { |
| "epoch": 3.584, |
| "grad_norm": 0.20884461582582384, |
| "learning_rate": 8.93515118589373e-06, |
| "loss": 0.2997, |
| "step": 280 |
| }, |
| { |
| "epoch": 3.5968, |
| "grad_norm": 0.21541996842202368, |
| "learning_rate": 8.786478972310023e-06, |
| "loss": 0.3002, |
| "step": 281 |
| }, |
| { |
| "epoch": 3.6096, |
| "grad_norm": 0.20772455990874986, |
| "learning_rate": 8.638705065376887e-06, |
| "loss": 0.2921, |
| "step": 282 |
| }, |
| { |
| "epoch": 3.6224, |
| "grad_norm": 0.21297651398483633, |
| "learning_rate": 8.491841303149728e-06, |
| "loss": 0.311, |
| "step": 283 |
| }, |
| { |
| "epoch": 3.6352, |
| "grad_norm": 0.2190112768874125, |
| "learning_rate": 8.345899450772975e-06, |
| "loss": 0.2919, |
| "step": 284 |
| }, |
| { |
| "epoch": 3.648, |
| "grad_norm": 0.21673928683300617, |
| "learning_rate": 8.200891199537549e-06, |
| "loss": 0.2841, |
| "step": 285 |
| }, |
| { |
| "epoch": 3.6608, |
| "grad_norm": 0.21487488035766797, |
| "learning_rate": 8.056828165944282e-06, |
| "loss": 0.3014, |
| "step": 286 |
| }, |
| { |
| "epoch": 3.6736, |
| "grad_norm": 0.21578163765129071, |
| "learning_rate": 7.913721890773354e-06, |
| "loss": 0.3019, |
| "step": 287 |
| }, |
| { |
| "epoch": 3.6864, |
| "grad_norm": 0.224427277921113, |
| "learning_rate": 7.771583838159756e-06, |
| "loss": 0.2976, |
| "step": 288 |
| }, |
| { |
| "epoch": 3.6992000000000003, |
| "grad_norm": 0.22042655864613764, |
| "learning_rate": 7.630425394674903e-06, |
| "loss": 0.2989, |
| "step": 289 |
| }, |
| { |
| "epoch": 3.7119999999999997, |
| "grad_norm": 0.20374940342407785, |
| "learning_rate": 7.49025786841445e-06, |
| "loss": 0.2923, |
| "step": 290 |
| }, |
| { |
| "epoch": 3.7248, |
| "grad_norm": 0.2114696586781579, |
| "learning_rate": 7.3510924880924575e-06, |
| "loss": 0.2969, |
| "step": 291 |
| }, |
| { |
| "epoch": 3.7376, |
| "grad_norm": 0.20549511788083236, |
| "learning_rate": 7.212940402141808e-06, |
| "loss": 0.3061, |
| "step": 292 |
| }, |
| { |
| "epoch": 3.7504, |
| "grad_norm": 0.19979906515975762, |
| "learning_rate": 7.075812677821145e-06, |
| "loss": 0.2823, |
| "step": 293 |
| }, |
| { |
| "epoch": 3.7632, |
| "grad_norm": 0.19229453116296086, |
| "learning_rate": 6.939720300328303e-06, |
| "loss": 0.2997, |
| "step": 294 |
| }, |
| { |
| "epoch": 3.776, |
| "grad_norm": 0.20712539472061123, |
| "learning_rate": 6.8046741719202385e-06, |
| "loss": 0.2959, |
| "step": 295 |
| }, |
| { |
| "epoch": 3.7888, |
| "grad_norm": 0.2015468139758988, |
| "learning_rate": 6.67068511103971e-06, |
| "loss": 0.2979, |
| "step": 296 |
| }, |
| { |
| "epoch": 3.8016, |
| "grad_norm": 0.20097170185132207, |
| "learning_rate": 6.537763851448593e-06, |
| "loss": 0.2926, |
| "step": 297 |
| }, |
| { |
| "epoch": 3.8144, |
| "grad_norm": 0.19852055662210327, |
| "learning_rate": 6.4059210413680175e-06, |
| "loss": 0.2913, |
| "step": 298 |
| }, |
| { |
| "epoch": 3.8272, |
| "grad_norm": 0.18906994406437755, |
| "learning_rate": 6.275167242625331e-06, |
| "loss": 0.3006, |
| "step": 299 |
| }, |
| { |
| "epoch": 3.84, |
| "grad_norm": 0.2021153809962757, |
| "learning_rate": 6.145512929808013e-06, |
| "loss": 0.2923, |
| "step": 300 |
| }, |
| { |
| "epoch": 3.8528000000000002, |
| "grad_norm": 0.1999899974233003, |
| "learning_rate": 6.016968489424572e-06, |
| "loss": 0.2915, |
| "step": 301 |
| }, |
| { |
| "epoch": 3.8656, |
| "grad_norm": 0.19630347526748365, |
| "learning_rate": 5.889544219072465e-06, |
| "loss": 0.2866, |
| "step": 302 |
| }, |
| { |
| "epoch": 3.8784, |
| "grad_norm": 0.1920809980534528, |
| "learning_rate": 5.7632503266131925e-06, |
| "loss": 0.3032, |
| "step": 303 |
| }, |
| { |
| "epoch": 3.8912, |
| "grad_norm": 0.1901195320725009, |
| "learning_rate": 5.638096929354522e-06, |
| "loss": 0.2909, |
| "step": 304 |
| }, |
| { |
| "epoch": 3.904, |
| "grad_norm": 0.21032225123095197, |
| "learning_rate": 5.514094053240035e-06, |
| "loss": 0.3084, |
| "step": 305 |
| }, |
| { |
| "epoch": 3.9168, |
| "grad_norm": 0.1868686917904852, |
| "learning_rate": 5.39125163204594e-06, |
| "loss": 0.2905, |
| "step": 306 |
| }, |
| { |
| "epoch": 3.9295999999999998, |
| "grad_norm": 0.18502790597756422, |
| "learning_rate": 5.269579506585259e-06, |
| "loss": 0.2937, |
| "step": 307 |
| }, |
| { |
| "epoch": 3.9424, |
| "grad_norm": 0.1928945785679494, |
| "learning_rate": 5.149087423919541e-06, |
| "loss": 0.2989, |
| "step": 308 |
| }, |
| { |
| "epoch": 3.9552, |
| "grad_norm": 0.19560615341315463, |
| "learning_rate": 5.029785036577976e-06, |
| "loss": 0.3126, |
| "step": 309 |
| }, |
| { |
| "epoch": 3.968, |
| "grad_norm": 0.18840138055047603, |
| "learning_rate": 4.911681901784198e-06, |
| "loss": 0.2934, |
| "step": 310 |
| }, |
| { |
| "epoch": 3.9808, |
| "grad_norm": 0.19588915112265848, |
| "learning_rate": 4.794787480690597e-06, |
| "loss": 0.2905, |
| "step": 311 |
| }, |
| { |
| "epoch": 3.9936, |
| "grad_norm": 0.19730007784376413, |
| "learning_rate": 4.679111137620442e-06, |
| "loss": 0.3044, |
| "step": 312 |
| }, |
| { |
| "epoch": 4.0064, |
| "grad_norm": 0.572586285609381, |
| "learning_rate": 4.5646621393177e-06, |
| "loss": 0.5472, |
| "step": 313 |
| }, |
| { |
| "epoch": 4.0192, |
| "grad_norm": 0.2949768631584251, |
| "learning_rate": 4.451449654204685e-06, |
| "loss": 0.2704, |
| "step": 314 |
| }, |
| { |
| "epoch": 4.032, |
| "grad_norm": 0.2574596792309237, |
| "learning_rate": 4.339482751647557e-06, |
| "loss": 0.2549, |
| "step": 315 |
| }, |
| { |
| "epoch": 4.0448, |
| "grad_norm": 0.21477368558781973, |
| "learning_rate": 4.228770401229824e-06, |
| "loss": 0.2621, |
| "step": 316 |
| }, |
| { |
| "epoch": 4.0576, |
| "grad_norm": 0.2832750553273873, |
| "learning_rate": 4.119321472033779e-06, |
| "loss": 0.2838, |
| "step": 317 |
| }, |
| { |
| "epoch": 4.0704, |
| "grad_norm": 0.31022611032250574, |
| "learning_rate": 4.011144731929981e-06, |
| "loss": 0.2622, |
| "step": 318 |
| }, |
| { |
| "epoch": 4.0832, |
| "grad_norm": 0.2628883778841394, |
| "learning_rate": 3.904248846874894e-06, |
| "loss": 0.2482, |
| "step": 319 |
| }, |
| { |
| "epoch": 4.096, |
| "grad_norm": 0.25960167507018217, |
| "learning_rate": 3.7986423802166705e-06, |
| "loss": 0.2637, |
| "step": 320 |
| }, |
| { |
| "epoch": 4.1088, |
| "grad_norm": 0.23079097623497014, |
| "learning_rate": 3.694333792009115e-06, |
| "loss": 0.2583, |
| "step": 321 |
| }, |
| { |
| "epoch": 4.1216, |
| "grad_norm": 0.23734069583525863, |
| "learning_rate": 3.5913314383339937e-06, |
| "loss": 0.2636, |
| "step": 322 |
| }, |
| { |
| "epoch": 4.1344, |
| "grad_norm": 0.24231979125793324, |
| "learning_rate": 3.4896435706316e-06, |
| "loss": 0.2677, |
| "step": 323 |
| }, |
| { |
| "epoch": 4.1472, |
| "grad_norm": 0.222245031459651, |
| "learning_rate": 3.3892783350397675e-06, |
| "loss": 0.2611, |
| "step": 324 |
| }, |
| { |
| "epoch": 4.16, |
| "grad_norm": 0.20312084213998194, |
| "learning_rate": 3.290243771741275e-06, |
| "loss": 0.2585, |
| "step": 325 |
| }, |
| { |
| "epoch": 4.1728, |
| "grad_norm": 0.197752485913063, |
| "learning_rate": 3.1925478143197418e-06, |
| "loss": 0.259, |
| "step": 326 |
| }, |
| { |
| "epoch": 4.1856, |
| "grad_norm": 0.2026654928471488, |
| "learning_rate": 3.0961982891241083e-06, |
| "loss": 0.2514, |
| "step": 327 |
| }, |
| { |
| "epoch": 4.1984, |
| "grad_norm": 0.2226046433033049, |
| "learning_rate": 3.001202914641628e-06, |
| "loss": 0.2717, |
| "step": 328 |
| }, |
| { |
| "epoch": 4.2112, |
| "grad_norm": 0.19738358880511364, |
| "learning_rate": 2.907569300879596e-06, |
| "loss": 0.2568, |
| "step": 329 |
| }, |
| { |
| "epoch": 4.224, |
| "grad_norm": 0.20518193085549377, |
| "learning_rate": 2.815304948755664e-06, |
| "loss": 0.2593, |
| "step": 330 |
| }, |
| { |
| "epoch": 4.2368, |
| "grad_norm": 0.2010784131419505, |
| "learning_rate": 2.7244172494969978e-06, |
| "loss": 0.2672, |
| "step": 331 |
| }, |
| { |
| "epoch": 4.2496, |
| "grad_norm": 0.2040581583418926, |
| "learning_rate": 2.6349134840481294e-06, |
| "loss": 0.2672, |
| "step": 332 |
| }, |
| { |
| "epoch": 4.2624, |
| "grad_norm": 0.2051153804496881, |
| "learning_rate": 2.546800822487714e-06, |
| "loss": 0.2577, |
| "step": 333 |
| }, |
| { |
| "epoch": 4.2752, |
| "grad_norm": 0.19036151835844245, |
| "learning_rate": 2.4600863234541338e-06, |
| "loss": 0.2608, |
| "step": 334 |
| }, |
| { |
| "epoch": 4.288, |
| "grad_norm": 0.1878364416043157, |
| "learning_rate": 2.374776933580025e-06, |
| "loss": 0.2523, |
| "step": 335 |
| }, |
| { |
| "epoch": 4.3008, |
| "grad_norm": 0.19031269408103865, |
| "learning_rate": 2.2908794869358044e-06, |
| "loss": 0.2545, |
| "step": 336 |
| }, |
| { |
| "epoch": 4.3136, |
| "grad_norm": 0.18967066362788515, |
| "learning_rate": 2.2084007044821764e-06, |
| "loss": 0.2589, |
| "step": 337 |
| }, |
| { |
| "epoch": 4.3264, |
| "grad_norm": 0.18796101285613187, |
| "learning_rate": 2.127347193531757e-06, |
| "loss": 0.264, |
| "step": 338 |
| }, |
| { |
| "epoch": 4.3392, |
| "grad_norm": 0.18728077492824852, |
| "learning_rate": 2.0477254472197237e-06, |
| "loss": 0.275, |
| "step": 339 |
| }, |
| { |
| "epoch": 4.352, |
| "grad_norm": 0.1920736215287993, |
| "learning_rate": 1.96954184398368e-06, |
| "loss": 0.2668, |
| "step": 340 |
| }, |
| { |
| "epoch": 4.3648, |
| "grad_norm": 0.18710726206783124, |
| "learning_rate": 1.8928026470526917e-06, |
| "loss": 0.2626, |
| "step": 341 |
| }, |
| { |
| "epoch": 4.3776, |
| "grad_norm": 0.18070835403401606, |
| "learning_rate": 1.817514003945524e-06, |
| "loss": 0.2539, |
| "step": 342 |
| }, |
| { |
| "epoch": 4.3904, |
| "grad_norm": 0.18040022199428507, |
| "learning_rate": 1.743681945978184e-06, |
| "loss": 0.2683, |
| "step": 343 |
| }, |
| { |
| "epoch": 4.4032, |
| "grad_norm": 0.18024941762685995, |
| "learning_rate": 1.6713123877807413e-06, |
| "loss": 0.2629, |
| "step": 344 |
| }, |
| { |
| "epoch": 4.416, |
| "grad_norm": 0.17679299731428164, |
| "learning_rate": 1.6004111268235156e-06, |
| "loss": 0.2574, |
| "step": 345 |
| }, |
| { |
| "epoch": 4.4288, |
| "grad_norm": 0.17535840405973557, |
| "learning_rate": 1.5309838429526714e-06, |
| "loss": 0.2583, |
| "step": 346 |
| }, |
| { |
| "epoch": 4.4416, |
| "grad_norm": 0.17209557575763554, |
| "learning_rate": 1.4630360979351644e-06, |
| "loss": 0.2538, |
| "step": 347 |
| }, |
| { |
| "epoch": 4.4544, |
| "grad_norm": 0.17269823781217156, |
| "learning_rate": 1.396573335013236e-06, |
| "loss": 0.2573, |
| "step": 348 |
| }, |
| { |
| "epoch": 4.4672, |
| "grad_norm": 0.17569630903609573, |
| "learning_rate": 1.3316008784683265e-06, |
| "loss": 0.2636, |
| "step": 349 |
| }, |
| { |
| "epoch": 4.48, |
| "grad_norm": 0.17222091170740247, |
| "learning_rate": 1.2681239331945695e-06, |
| "loss": 0.2564, |
| "step": 350 |
| }, |
| { |
| "epoch": 4.4928, |
| "grad_norm": 0.17986994282527735, |
| "learning_rate": 1.2061475842818337e-06, |
| "loss": 0.2658, |
| "step": 351 |
| }, |
| { |
| "epoch": 4.5056, |
| "grad_norm": 0.17749306159579448, |
| "learning_rate": 1.1456767966083393e-06, |
| "loss": 0.2642, |
| "step": 352 |
| }, |
| { |
| "epoch": 4.5184, |
| "grad_norm": 0.17369779306753141, |
| "learning_rate": 1.086716414442952e-06, |
| "loss": 0.2585, |
| "step": 353 |
| }, |
| { |
| "epoch": 4.5312, |
| "grad_norm": 0.1762932829589361, |
| "learning_rate": 1.0292711610570904e-06, |
| "loss": 0.2676, |
| "step": 354 |
| }, |
| { |
| "epoch": 4.5440000000000005, |
| "grad_norm": 0.17826711105619464, |
| "learning_rate": 9.733456383463658e-07, |
| "loss": 0.2707, |
| "step": 355 |
| }, |
| { |
| "epoch": 4.5568, |
| "grad_norm": 0.17362813852251957, |
| "learning_rate": 9.189443264619102e-07, |
| "loss": 0.2622, |
| "step": 356 |
| }, |
| { |
| "epoch": 4.5696, |
| "grad_norm": 0.17946540544791956, |
| "learning_rate": 8.660715834514977e-07, |
| "loss": 0.262, |
| "step": 357 |
| }, |
| { |
| "epoch": 4.5824, |
| "grad_norm": 0.17382409916390584, |
| "learning_rate": 8.147316449103959e-07, |
| "loss": 0.2547, |
| "step": 358 |
| }, |
| { |
| "epoch": 4.5952, |
| "grad_norm": 0.17625896763947824, |
| "learning_rate": 7.649286236420806e-07, |
| "loss": 0.2623, |
| "step": 359 |
| }, |
| { |
| "epoch": 4.608, |
| "grad_norm": 0.1699245760301564, |
| "learning_rate": 7.166665093287539e-07, |
| "loss": 0.2694, |
| "step": 360 |
| }, |
| { |
| "epoch": 4.6208, |
| "grad_norm": 0.16895611272526004, |
| "learning_rate": 6.69949168211721e-07, |
| "loss": 0.2575, |
| "step": 361 |
| }, |
| { |
| "epoch": 4.6336, |
| "grad_norm": 0.1693241796677865, |
| "learning_rate": 6.247803427816945e-07, |
| "loss": 0.2459, |
| "step": 362 |
| }, |
| { |
| "epoch": 4.6464, |
| "grad_norm": 0.17362449206981356, |
| "learning_rate": 5.811636514789598e-07, |
| "loss": 0.2716, |
| "step": 363 |
| }, |
| { |
| "epoch": 4.6592, |
| "grad_norm": 0.17025045249936588, |
| "learning_rate": 5.391025884035239e-07, |
| "loss": 0.2534, |
| "step": 364 |
| }, |
| { |
| "epoch": 4.672, |
| "grad_norm": 0.16875385878273286, |
| "learning_rate": 4.986005230351954e-07, |
| "loss": 0.2586, |
| "step": 365 |
| }, |
| { |
| "epoch": 4.6848, |
| "grad_norm": 0.17389976665994425, |
| "learning_rate": 4.5966069996365993e-07, |
| "loss": 0.2645, |
| "step": 366 |
| }, |
| { |
| "epoch": 4.6975999999999996, |
| "grad_norm": 0.17023072304351422, |
| "learning_rate": 4.22286238628562e-07, |
| "loss": 0.2569, |
| "step": 367 |
| }, |
| { |
| "epoch": 4.7104, |
| "grad_norm": 0.17186663723394052, |
| "learning_rate": 3.8648013306960664e-07, |
| "loss": 0.2643, |
| "step": 368 |
| }, |
| { |
| "epoch": 4.7232, |
| "grad_norm": 0.1680634909255728, |
| "learning_rate": 3.522452516867048e-07, |
| "loss": 0.2623, |
| "step": 369 |
| }, |
| { |
| "epoch": 4.736, |
| "grad_norm": 0.17120199576002865, |
| "learning_rate": 3.1958433701019697e-07, |
| "loss": 0.2613, |
| "step": 370 |
| }, |
| { |
| "epoch": 4.7488, |
| "grad_norm": 0.16412195443224825, |
| "learning_rate": 2.8850000548115155e-07, |
| "loss": 0.2589, |
| "step": 371 |
| }, |
| { |
| "epoch": 4.7616, |
| "grad_norm": 0.17379655511927136, |
| "learning_rate": 2.5899474724174313e-07, |
| "loss": 0.2632, |
| "step": 372 |
| }, |
| { |
| "epoch": 4.7744, |
| "grad_norm": 0.1656832409615132, |
| "learning_rate": 2.3107092593579905e-07, |
| "loss": 0.2478, |
| "step": 373 |
| }, |
| { |
| "epoch": 4.7872, |
| "grad_norm": 0.16951075627616047, |
| "learning_rate": 2.0473077851942858e-07, |
| "loss": 0.2516, |
| "step": 374 |
| }, |
| { |
| "epoch": 4.8, |
| "grad_norm": 0.17537557692366856, |
| "learning_rate": 1.799764150818306e-07, |
| "loss": 0.2676, |
| "step": 375 |
| }, |
| { |
| "epoch": 4.8128, |
| "grad_norm": 0.17374885403540272, |
| "learning_rate": 1.5680981867625566e-07, |
| "loss": 0.2605, |
| "step": 376 |
| }, |
| { |
| "epoch": 4.8256, |
| "grad_norm": 0.16888552813586052, |
| "learning_rate": 1.3523284516113955e-07, |
| "loss": 0.2718, |
| "step": 377 |
| }, |
| { |
| "epoch": 4.8384, |
| "grad_norm": 0.17354897645874484, |
| "learning_rate": 1.1524722305144231e-07, |
| "loss": 0.2706, |
| "step": 378 |
| }, |
| { |
| "epoch": 4.8512, |
| "grad_norm": 0.1700437193077696, |
| "learning_rate": 9.685455338016347e-08, |
| "loss": 0.2622, |
| "step": 379 |
| }, |
| { |
| "epoch": 4.864, |
| "grad_norm": 0.16925795308526098, |
| "learning_rate": 8.005630957010014e-08, |
| "loss": 0.2532, |
| "step": 380 |
| }, |
| { |
| "epoch": 4.8768, |
| "grad_norm": 0.17169956156014887, |
| "learning_rate": 6.485383731580142e-08, |
| "loss": 0.2543, |
| "step": 381 |
| }, |
| { |
| "epoch": 4.8896, |
| "grad_norm": 0.16991884248389066, |
| "learning_rate": 5.1248354475768034e-08, |
| "loss": 0.254, |
| "step": 382 |
| }, |
| { |
| "epoch": 4.9024, |
| "grad_norm": 0.17543705217318195, |
| "learning_rate": 3.924095097489922e-08, |
| "loss": 0.2678, |
| "step": 383 |
| }, |
| { |
| "epoch": 4.9152000000000005, |
| "grad_norm": 0.17309881142676284, |
| "learning_rate": 2.8832588717164766e-08, |
| "loss": 0.2528, |
| "step": 384 |
| }, |
| { |
| "epoch": 4.928, |
| "grad_norm": 0.16669028171466665, |
| "learning_rate": 2.0024101508555604e-08, |
| "loss": 0.2568, |
| "step": 385 |
| }, |
| { |
| "epoch": 4.9408, |
| "grad_norm": 0.16908323842167242, |
| "learning_rate": 1.281619499029274e-08, |
| "loss": 0.2625, |
| "step": 386 |
| }, |
| { |
| "epoch": 4.9536, |
| "grad_norm": 0.16654126250710174, |
| "learning_rate": 7.209446582292501e-09, |
| "loss": 0.2552, |
| "step": 387 |
| }, |
| { |
| "epoch": 4.9664, |
| "grad_norm": 0.16486951649258705, |
| "learning_rate": 3.2043054369057523e-09, |
| "loss": 0.2553, |
| "step": 388 |
| }, |
| { |
| "epoch": 4.9792, |
| "grad_norm": 0.16746724674913815, |
| "learning_rate": 8.010924029533406e-10, |
| "loss": 0.2546, |
| "step": 389 |
| }, |
| { |
| "epoch": 4.992, |
| "grad_norm": 0.17103013097653133, |
| "learning_rate": 0.0, |
| "loss": 0.268, |
| "step": 390 |
| }, |
| { |
| "epoch": 4.992, |
| "step": 390, |
| "total_flos": 1.8773457441390592e+18, |
| "train_loss": 0.3770592579474816, |
| "train_runtime": 19616.6723, |
| "train_samples_per_second": 2.549, |
| "train_steps_per_second": 0.02 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 390, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 5, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 1.8773457441390592e+18, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|