ErrorAI's picture
Training in progress, step 209, checkpoint
6cff4d0 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.0,
"eval_steps": 500,
"global_step": 209,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.004784688995215311,
"grad_norm": 0.14709724485874176,
"learning_rate": 2e-05,
"loss": 1.2491,
"step": 1
},
{
"epoch": 0.009569377990430622,
"grad_norm": 0.17019398510456085,
"learning_rate": 4e-05,
"loss": 1.4909,
"step": 2
},
{
"epoch": 0.014354066985645933,
"grad_norm": 0.17654545605182648,
"learning_rate": 6e-05,
"loss": 1.5363,
"step": 3
},
{
"epoch": 0.019138755980861243,
"grad_norm": 0.1930554211139679,
"learning_rate": 8e-05,
"loss": 1.6395,
"step": 4
},
{
"epoch": 0.023923444976076555,
"grad_norm": 0.2117353081703186,
"learning_rate": 0.0001,
"loss": 1.7391,
"step": 5
},
{
"epoch": 0.028708133971291867,
"grad_norm": 0.20174962282180786,
"learning_rate": 9.999407114490384e-05,
"loss": 1.6012,
"step": 6
},
{
"epoch": 0.03349282296650718,
"grad_norm": 0.21089714765548706,
"learning_rate": 9.99762859856683e-05,
"loss": 1.5543,
"step": 7
},
{
"epoch": 0.03827751196172249,
"grad_norm": 0.21190762519836426,
"learning_rate": 9.994664874011863e-05,
"loss": 1.4511,
"step": 8
},
{
"epoch": 0.0430622009569378,
"grad_norm": 0.22505144774913788,
"learning_rate": 9.990516643685222e-05,
"loss": 1.4619,
"step": 9
},
{
"epoch": 0.04784688995215311,
"grad_norm": 0.23618264496326447,
"learning_rate": 9.985184891357164e-05,
"loss": 1.5113,
"step": 10
},
{
"epoch": 0.05263157894736842,
"grad_norm": 0.23488004505634308,
"learning_rate": 9.978670881475172e-05,
"loss": 1.502,
"step": 11
},
{
"epoch": 0.05741626794258373,
"grad_norm": 0.24843598902225494,
"learning_rate": 9.970976158864073e-05,
"loss": 1.5901,
"step": 12
},
{
"epoch": 0.06220095693779904,
"grad_norm": 0.25334247946739197,
"learning_rate": 9.96210254835968e-05,
"loss": 1.4698,
"step": 13
},
{
"epoch": 0.06698564593301436,
"grad_norm": 0.2642665505409241,
"learning_rate": 9.952052154376026e-05,
"loss": 1.5525,
"step": 14
},
{
"epoch": 0.07177033492822966,
"grad_norm": 0.2637348175048828,
"learning_rate": 9.940827360406297e-05,
"loss": 1.4046,
"step": 15
},
{
"epoch": 0.07655502392344497,
"grad_norm": 0.28210246562957764,
"learning_rate": 9.928430828457572e-05,
"loss": 1.3994,
"step": 16
},
{
"epoch": 0.08133971291866028,
"grad_norm": 0.3060890734195709,
"learning_rate": 9.91486549841951e-05,
"loss": 1.6673,
"step": 17
},
{
"epoch": 0.0861244019138756,
"grad_norm": 0.2893538475036621,
"learning_rate": 9.90013458736716e-05,
"loss": 1.5797,
"step": 18
},
{
"epoch": 0.09090909090909091,
"grad_norm": 0.29651427268981934,
"learning_rate": 9.884241588798005e-05,
"loss": 1.4553,
"step": 19
},
{
"epoch": 0.09569377990430622,
"grad_norm": 0.311667263507843,
"learning_rate": 9.867190271803465e-05,
"loss": 1.4259,
"step": 20
},
{
"epoch": 0.10047846889952153,
"grad_norm": 0.32705116271972656,
"learning_rate": 9.848984680175049e-05,
"loss": 1.5493,
"step": 21
},
{
"epoch": 0.10526315789473684,
"grad_norm": 0.33383113145828247,
"learning_rate": 9.829629131445342e-05,
"loss": 1.5034,
"step": 22
},
{
"epoch": 0.11004784688995216,
"grad_norm": 0.3618900179862976,
"learning_rate": 9.809128215864097e-05,
"loss": 1.7059,
"step": 23
},
{
"epoch": 0.11483253588516747,
"grad_norm": 0.36600548028945923,
"learning_rate": 9.787486795309621e-05,
"loss": 1.7853,
"step": 24
},
{
"epoch": 0.11961722488038277,
"grad_norm": 0.3755670189857483,
"learning_rate": 9.764710002135784e-05,
"loss": 1.5303,
"step": 25
},
{
"epoch": 0.12440191387559808,
"grad_norm": 0.3820343613624573,
"learning_rate": 9.74080323795483e-05,
"loss": 1.687,
"step": 26
},
{
"epoch": 0.1291866028708134,
"grad_norm": 0.3974711000919342,
"learning_rate": 9.715772172356388e-05,
"loss": 1.7679,
"step": 27
},
{
"epoch": 0.1339712918660287,
"grad_norm": 0.4264979064464569,
"learning_rate": 9.689622741562892e-05,
"loss": 1.6876,
"step": 28
},
{
"epoch": 0.13875598086124402,
"grad_norm": 0.4498402178287506,
"learning_rate": 9.662361147021779e-05,
"loss": 1.7944,
"step": 29
},
{
"epoch": 0.14354066985645933,
"grad_norm": 0.4634323716163635,
"learning_rate": 9.633993853934803e-05,
"loss": 2.0416,
"step": 30
},
{
"epoch": 0.14832535885167464,
"grad_norm": 0.467655211687088,
"learning_rate": 9.60452758972477e-05,
"loss": 1.9583,
"step": 31
},
{
"epoch": 0.15311004784688995,
"grad_norm": 0.4708121716976166,
"learning_rate": 9.573969342440106e-05,
"loss": 1.7343,
"step": 32
},
{
"epoch": 0.15789473684210525,
"grad_norm": 0.49835681915283203,
"learning_rate": 9.542326359097619e-05,
"loss": 1.7304,
"step": 33
},
{
"epoch": 0.16267942583732056,
"grad_norm": 0.4945959150791168,
"learning_rate": 9.509606143963832e-05,
"loss": 1.8515,
"step": 34
},
{
"epoch": 0.1674641148325359,
"grad_norm": 0.532650887966156,
"learning_rate": 9.475816456775313e-05,
"loss": 1.955,
"step": 35
},
{
"epoch": 0.1722488038277512,
"grad_norm": 0.5782840847969055,
"learning_rate": 9.440965310898424e-05,
"loss": 1.9727,
"step": 36
},
{
"epoch": 0.17703349282296652,
"grad_norm": 0.6085786819458008,
"learning_rate": 9.405060971428923e-05,
"loss": 1.9417,
"step": 37
},
{
"epoch": 0.18181818181818182,
"grad_norm": 0.5588628053665161,
"learning_rate": 9.368111953231848e-05,
"loss": 1.9105,
"step": 38
},
{
"epoch": 0.18660287081339713,
"grad_norm": 0.6089487671852112,
"learning_rate": 9.330127018922194e-05,
"loss": 1.9613,
"step": 39
},
{
"epoch": 0.19138755980861244,
"grad_norm": 0.6214700937271118,
"learning_rate": 9.291115176786814e-05,
"loss": 2.0116,
"step": 40
},
{
"epoch": 0.19617224880382775,
"grad_norm": 0.6787968277931213,
"learning_rate": 9.251085678648072e-05,
"loss": 1.9211,
"step": 41
},
{
"epoch": 0.20095693779904306,
"grad_norm": 0.8012039065361023,
"learning_rate": 9.210048017669726e-05,
"loss": 2.0415,
"step": 42
},
{
"epoch": 0.20574162679425836,
"grad_norm": 0.7107148766517639,
"learning_rate": 9.168011926105598e-05,
"loss": 1.9067,
"step": 43
},
{
"epoch": 0.21052631578947367,
"grad_norm": 0.8137962818145752,
"learning_rate": 9.124987372991511e-05,
"loss": 1.9049,
"step": 44
},
{
"epoch": 0.215311004784689,
"grad_norm": 0.8808148503303528,
"learning_rate": 9.08098456178111e-05,
"loss": 2.0898,
"step": 45
},
{
"epoch": 0.22009569377990432,
"grad_norm": 1.1123590469360352,
"learning_rate": 9.036013927926048e-05,
"loss": 2.2945,
"step": 46
},
{
"epoch": 0.22488038277511962,
"grad_norm": 1.2723033428192139,
"learning_rate": 8.9900861364012e-05,
"loss": 2.0564,
"step": 47
},
{
"epoch": 0.22966507177033493,
"grad_norm": 1.601427674293518,
"learning_rate": 8.943212079175391e-05,
"loss": 1.8703,
"step": 48
},
{
"epoch": 0.23444976076555024,
"grad_norm": 2.2030014991760254,
"learning_rate": 8.895402872628352e-05,
"loss": 2.0263,
"step": 49
},
{
"epoch": 0.23923444976076555,
"grad_norm": 3.696323871612549,
"learning_rate": 8.846669854914396e-05,
"loss": 2.6416,
"step": 50
},
{
"epoch": 0.24401913875598086,
"grad_norm": 0.3211597800254822,
"learning_rate": 8.797024583273537e-05,
"loss": 1.5625,
"step": 51
},
{
"epoch": 0.24880382775119617,
"grad_norm": 0.36418232321739197,
"learning_rate": 8.746478831290648e-05,
"loss": 1.6543,
"step": 52
},
{
"epoch": 0.2535885167464115,
"grad_norm": 0.4147177040576935,
"learning_rate": 8.695044586103296e-05,
"loss": 1.5142,
"step": 53
},
{
"epoch": 0.2583732057416268,
"grad_norm": 0.40443918108940125,
"learning_rate": 8.642734045558952e-05,
"loss": 1.5673,
"step": 54
},
{
"epoch": 0.2631578947368421,
"grad_norm": 0.39546626806259155,
"learning_rate": 8.58955961532221e-05,
"loss": 1.6391,
"step": 55
},
{
"epoch": 0.2679425837320574,
"grad_norm": 0.3606320321559906,
"learning_rate": 8.535533905932738e-05,
"loss": 1.605,
"step": 56
},
{
"epoch": 0.2727272727272727,
"grad_norm": 0.35251346230506897,
"learning_rate": 8.480669729814635e-05,
"loss": 1.3353,
"step": 57
},
{
"epoch": 0.27751196172248804,
"grad_norm": 0.3432234227657318,
"learning_rate": 8.424980098237903e-05,
"loss": 1.59,
"step": 58
},
{
"epoch": 0.2822966507177033,
"grad_norm": 0.3411220908164978,
"learning_rate": 8.368478218232787e-05,
"loss": 1.5685,
"step": 59
},
{
"epoch": 0.28708133971291866,
"grad_norm": 0.2974628210067749,
"learning_rate": 8.311177489457652e-05,
"loss": 1.6432,
"step": 60
},
{
"epoch": 0.291866028708134,
"grad_norm": 0.32686975598335266,
"learning_rate": 8.25309150102121e-05,
"loss": 1.6631,
"step": 61
},
{
"epoch": 0.2966507177033493,
"grad_norm": 0.28340205550193787,
"learning_rate": 8.194234028259806e-05,
"loss": 1.4545,
"step": 62
},
{
"epoch": 0.3014354066985646,
"grad_norm": 0.2994033098220825,
"learning_rate": 8.134619029470534e-05,
"loss": 1.5692,
"step": 63
},
{
"epoch": 0.3062200956937799,
"grad_norm": 0.31368395686149597,
"learning_rate": 8.074260642600964e-05,
"loss": 1.4963,
"step": 64
},
{
"epoch": 0.31100478468899523,
"grad_norm": 0.298289030790329,
"learning_rate": 8.013173181896283e-05,
"loss": 1.499,
"step": 65
},
{
"epoch": 0.3157894736842105,
"grad_norm": 0.30118173360824585,
"learning_rate": 7.951371134504599e-05,
"loss": 1.3931,
"step": 66
},
{
"epoch": 0.32057416267942584,
"grad_norm": 0.3297058045864105,
"learning_rate": 7.888869157041257e-05,
"loss": 1.6554,
"step": 67
},
{
"epoch": 0.3253588516746411,
"grad_norm": 0.3186734616756439,
"learning_rate": 7.82568207211296e-05,
"loss": 1.4705,
"step": 68
},
{
"epoch": 0.33014354066985646,
"grad_norm": 0.3129718601703644,
"learning_rate": 7.76182486480253e-05,
"loss": 1.4966,
"step": 69
},
{
"epoch": 0.3349282296650718,
"grad_norm": 0.31505700945854187,
"learning_rate": 7.697312679115125e-05,
"loss": 1.5135,
"step": 70
},
{
"epoch": 0.3397129186602871,
"grad_norm": 0.3435811996459961,
"learning_rate": 7.63216081438678e-05,
"loss": 1.4485,
"step": 71
},
{
"epoch": 0.3444976076555024,
"grad_norm": 0.3319874703884125,
"learning_rate": 7.566384721656104e-05,
"loss": 1.5681,
"step": 72
},
{
"epoch": 0.3492822966507177,
"grad_norm": 0.3534000813961029,
"learning_rate": 7.500000000000001e-05,
"loss": 1.5403,
"step": 73
},
{
"epoch": 0.35406698564593303,
"grad_norm": 0.33617714047431946,
"learning_rate": 7.433022392834282e-05,
"loss": 1.4595,
"step": 74
},
{
"epoch": 0.3588516746411483,
"grad_norm": 0.3658270537853241,
"learning_rate": 7.365467784180051e-05,
"loss": 1.6116,
"step": 75
},
{
"epoch": 0.36363636363636365,
"grad_norm": 0.3672615885734558,
"learning_rate": 7.297352194896739e-05,
"loss": 1.5343,
"step": 76
},
{
"epoch": 0.3684210526315789,
"grad_norm": 0.4313615560531616,
"learning_rate": 7.228691778882693e-05,
"loss": 1.6585,
"step": 77
},
{
"epoch": 0.37320574162679426,
"grad_norm": 0.3559150695800781,
"learning_rate": 7.159502819244206e-05,
"loss": 1.5485,
"step": 78
},
{
"epoch": 0.37799043062200954,
"grad_norm": 0.44446811079978943,
"learning_rate": 7.089801724433917e-05,
"loss": 1.8183,
"step": 79
},
{
"epoch": 0.3827751196172249,
"grad_norm": 0.4031125009059906,
"learning_rate": 7.019605024359474e-05,
"loss": 1.7408,
"step": 80
},
{
"epoch": 0.3875598086124402,
"grad_norm": 0.4458540678024292,
"learning_rate": 6.948929366463396e-05,
"loss": 1.8305,
"step": 81
},
{
"epoch": 0.3923444976076555,
"grad_norm": 0.47091686725616455,
"learning_rate": 6.877791511775063e-05,
"loss": 1.6138,
"step": 82
},
{
"epoch": 0.39712918660287083,
"grad_norm": 0.46907365322113037,
"learning_rate": 6.806208330935766e-05,
"loss": 1.7399,
"step": 83
},
{
"epoch": 0.4019138755980861,
"grad_norm": 0.5174430012702942,
"learning_rate": 6.734196800197762e-05,
"loss": 1.8187,
"step": 84
},
{
"epoch": 0.40669856459330145,
"grad_norm": 0.5630610585212708,
"learning_rate": 6.661773997398298e-05,
"loss": 2.0938,
"step": 85
},
{
"epoch": 0.41148325358851673,
"grad_norm": 0.49577152729034424,
"learning_rate": 6.588957097909508e-05,
"loss": 1.8769,
"step": 86
},
{
"epoch": 0.41626794258373206,
"grad_norm": 0.5209452509880066,
"learning_rate": 6.515763370565218e-05,
"loss": 1.8037,
"step": 87
},
{
"epoch": 0.42105263157894735,
"grad_norm": 0.5780702829360962,
"learning_rate": 6.442210173565561e-05,
"loss": 2.012,
"step": 88
},
{
"epoch": 0.4258373205741627,
"grad_norm": 0.5638837814331055,
"learning_rate": 6.368314950360415e-05,
"loss": 1.9684,
"step": 89
},
{
"epoch": 0.430622009569378,
"grad_norm": 0.6348680257797241,
"learning_rate": 6.294095225512603e-05,
"loss": 1.7803,
"step": 90
},
{
"epoch": 0.4354066985645933,
"grad_norm": 0.6502403616905212,
"learning_rate": 6.219568600541886e-05,
"loss": 1.8694,
"step": 91
},
{
"epoch": 0.44019138755980863,
"grad_norm": 0.7062990665435791,
"learning_rate": 6.14475274975067e-05,
"loss": 2.005,
"step": 92
},
{
"epoch": 0.4449760765550239,
"grad_norm": 0.7276941537857056,
"learning_rate": 6.069665416032487e-05,
"loss": 2.0282,
"step": 93
},
{
"epoch": 0.44976076555023925,
"grad_norm": 0.8595307469367981,
"learning_rate": 5.9943244066641834e-05,
"loss": 2.1893,
"step": 94
},
{
"epoch": 0.45454545454545453,
"grad_norm": 0.9870045781135559,
"learning_rate": 5.918747589082853e-05,
"loss": 2.1732,
"step": 95
},
{
"epoch": 0.45933014354066987,
"grad_norm": 1.0758846998214722,
"learning_rate": 5.842952886648496e-05,
"loss": 2.1482,
"step": 96
},
{
"epoch": 0.46411483253588515,
"grad_norm": 1.308895468711853,
"learning_rate": 5.7669582743934284e-05,
"loss": 2.0192,
"step": 97
},
{
"epoch": 0.4688995215311005,
"grad_norm": 1.2809703350067139,
"learning_rate": 5.6907817747594116e-05,
"loss": 1.5592,
"step": 98
},
{
"epoch": 0.47368421052631576,
"grad_norm": 2.426992893218994,
"learning_rate": 5.614441453323571e-05,
"loss": 2.0581,
"step": 99
},
{
"epoch": 0.4784688995215311,
"grad_norm": 4.334367752075195,
"learning_rate": 5.5379554145140574e-05,
"loss": 2.1415,
"step": 100
},
{
"epoch": 0.48325358851674644,
"grad_norm": 0.22294622659683228,
"learning_rate": 5.4613417973165106e-05,
"loss": 1.477,
"step": 101
},
{
"epoch": 0.4880382775119617,
"grad_norm": 0.23113781213760376,
"learning_rate": 5.38461877097232e-05,
"loss": 1.3895,
"step": 102
},
{
"epoch": 0.49282296650717705,
"grad_norm": 0.2687288522720337,
"learning_rate": 5.307804530669716e-05,
"loss": 1.5935,
"step": 103
},
{
"epoch": 0.49760765550239233,
"grad_norm": 0.3015460968017578,
"learning_rate": 5.230917293228699e-05,
"loss": 1.7237,
"step": 104
},
{
"epoch": 0.5023923444976076,
"grad_norm": 0.31039535999298096,
"learning_rate": 5.153975292780853e-05,
"loss": 1.4133,
"step": 105
},
{
"epoch": 0.507177033492823,
"grad_norm": 0.2957954406738281,
"learning_rate": 5.0769967764450345e-05,
"loss": 1.4559,
"step": 106
},
{
"epoch": 0.5119617224880383,
"grad_norm": 0.3313244581222534,
"learning_rate": 5e-05,
"loss": 1.4098,
"step": 107
},
{
"epoch": 0.5167464114832536,
"grad_norm": 0.2960905432701111,
"learning_rate": 4.9230032235549667e-05,
"loss": 1.5134,
"step": 108
},
{
"epoch": 0.5215311004784688,
"grad_norm": 0.32173770666122437,
"learning_rate": 4.8460247072191496e-05,
"loss": 1.5187,
"step": 109
},
{
"epoch": 0.5263157894736842,
"grad_norm": 0.3100273311138153,
"learning_rate": 4.7690827067713035e-05,
"loss": 1.4619,
"step": 110
},
{
"epoch": 0.5311004784688995,
"grad_norm": 0.31192484498023987,
"learning_rate": 4.692195469330286e-05,
"loss": 1.5889,
"step": 111
},
{
"epoch": 0.5358851674641149,
"grad_norm": 0.3112650215625763,
"learning_rate": 4.6153812290276813e-05,
"loss": 1.4634,
"step": 112
},
{
"epoch": 0.5406698564593302,
"grad_norm": 0.31642478704452515,
"learning_rate": 4.5386582026834906e-05,
"loss": 1.6003,
"step": 113
},
{
"epoch": 0.5454545454545454,
"grad_norm": 0.30912068486213684,
"learning_rate": 4.462044585485944e-05,
"loss": 1.5149,
"step": 114
},
{
"epoch": 0.5502392344497608,
"grad_norm": 0.3080631196498871,
"learning_rate": 4.38555854667643e-05,
"loss": 1.3925,
"step": 115
},
{
"epoch": 0.5550239234449761,
"grad_norm": 0.32206517457962036,
"learning_rate": 4.30921822524059e-05,
"loss": 1.549,
"step": 116
},
{
"epoch": 0.5598086124401914,
"grad_norm": 0.31533685326576233,
"learning_rate": 4.233041725606572e-05,
"loss": 1.4718,
"step": 117
},
{
"epoch": 0.5645933014354066,
"grad_norm": 0.3389468491077423,
"learning_rate": 4.157047113351504e-05,
"loss": 1.4761,
"step": 118
},
{
"epoch": 0.569377990430622,
"grad_norm": 0.33409473299980164,
"learning_rate": 4.0812524109171476e-05,
"loss": 1.3456,
"step": 119
},
{
"epoch": 0.5741626794258373,
"grad_norm": 0.345503568649292,
"learning_rate": 4.0056755933358184e-05,
"loss": 1.6427,
"step": 120
},
{
"epoch": 0.5789473684210527,
"grad_norm": 0.34861519932746887,
"learning_rate": 3.930334583967514e-05,
"loss": 1.6905,
"step": 121
},
{
"epoch": 0.583732057416268,
"grad_norm": 0.36137065291404724,
"learning_rate": 3.855247250249331e-05,
"loss": 1.4516,
"step": 122
},
{
"epoch": 0.5885167464114832,
"grad_norm": 0.375118225812912,
"learning_rate": 3.780431399458114e-05,
"loss": 1.7034,
"step": 123
},
{
"epoch": 0.5933014354066986,
"grad_norm": 0.3760150074958801,
"learning_rate": 3.705904774487396e-05,
"loss": 1.5659,
"step": 124
},
{
"epoch": 0.5980861244019139,
"grad_norm": 0.3561256229877472,
"learning_rate": 3.631685049639586e-05,
"loss": 1.6253,
"step": 125
},
{
"epoch": 0.6028708133971292,
"grad_norm": 0.392549604177475,
"learning_rate": 3.557789826434439e-05,
"loss": 1.6744,
"step": 126
},
{
"epoch": 0.6076555023923444,
"grad_norm": 0.3839954733848572,
"learning_rate": 3.484236629434783e-05,
"loss": 1.5695,
"step": 127
},
{
"epoch": 0.6124401913875598,
"grad_norm": 0.39694321155548096,
"learning_rate": 3.411042902090492e-05,
"loss": 1.5518,
"step": 128
},
{
"epoch": 0.6172248803827751,
"grad_norm": 0.39925724267959595,
"learning_rate": 3.338226002601703e-05,
"loss": 1.723,
"step": 129
},
{
"epoch": 0.6220095693779905,
"grad_norm": 0.45537644624710083,
"learning_rate": 3.265803199802237e-05,
"loss": 1.8658,
"step": 130
},
{
"epoch": 0.6267942583732058,
"grad_norm": 0.4865153729915619,
"learning_rate": 3.1937916690642356e-05,
"loss": 1.9033,
"step": 131
},
{
"epoch": 0.631578947368421,
"grad_norm": 0.4416426718235016,
"learning_rate": 3.1222084882249375e-05,
"loss": 1.6994,
"step": 132
},
{
"epoch": 0.6363636363636364,
"grad_norm": 0.4817049503326416,
"learning_rate": 3.0510706335366035e-05,
"loss": 1.8274,
"step": 133
},
{
"epoch": 0.6411483253588517,
"grad_norm": 0.4855473041534424,
"learning_rate": 2.980394975640526e-05,
"loss": 1.7856,
"step": 134
},
{
"epoch": 0.645933014354067,
"grad_norm": 0.5136681199073792,
"learning_rate": 2.910198275566085e-05,
"loss": 1.7244,
"step": 135
},
{
"epoch": 0.6507177033492823,
"grad_norm": 0.5696388483047485,
"learning_rate": 2.8404971807557957e-05,
"loss": 1.6867,
"step": 136
},
{
"epoch": 0.6555023923444976,
"grad_norm": 0.5509561896324158,
"learning_rate": 2.771308221117309e-05,
"loss": 1.9106,
"step": 137
},
{
"epoch": 0.6602870813397129,
"grad_norm": 0.6965003609657288,
"learning_rate": 2.7026478051032623e-05,
"loss": 2.0887,
"step": 138
},
{
"epoch": 0.6650717703349283,
"grad_norm": 0.6352129578590393,
"learning_rate": 2.6345322158199503e-05,
"loss": 1.8677,
"step": 139
},
{
"epoch": 0.6698564593301436,
"grad_norm": 0.7062574625015259,
"learning_rate": 2.5669776071657192e-05,
"loss": 1.94,
"step": 140
},
{
"epoch": 0.6746411483253588,
"grad_norm": 0.6748818159103394,
"learning_rate": 2.500000000000001e-05,
"loss": 1.7784,
"step": 141
},
{
"epoch": 0.6794258373205742,
"grad_norm": 0.9134660959243774,
"learning_rate": 2.4336152783438982e-05,
"loss": 2.0779,
"step": 142
},
{
"epoch": 0.6842105263157895,
"grad_norm": 0.8519887924194336,
"learning_rate": 2.3678391856132204e-05,
"loss": 2.0711,
"step": 143
},
{
"epoch": 0.6889952153110048,
"grad_norm": 0.8988573551177979,
"learning_rate": 2.302687320884876e-05,
"loss": 1.9372,
"step": 144
},
{
"epoch": 0.69377990430622,
"grad_norm": 0.9712769389152527,
"learning_rate": 2.238175135197471e-05,
"loss": 2.0032,
"step": 145
},
{
"epoch": 0.6985645933014354,
"grad_norm": 1.3098163604736328,
"learning_rate": 2.1743179278870407e-05,
"loss": 2.4697,
"step": 146
},
{
"epoch": 0.7033492822966507,
"grad_norm": 1.3078206777572632,
"learning_rate": 2.1111308429587444e-05,
"loss": 1.8509,
"step": 147
},
{
"epoch": 0.7081339712918661,
"grad_norm": 1.5340124368667603,
"learning_rate": 2.0486288654954028e-05,
"loss": 1.687,
"step": 148
},
{
"epoch": 0.7129186602870813,
"grad_norm": 1.5090495347976685,
"learning_rate": 1.9868268181037185e-05,
"loss": 1.4265,
"step": 149
},
{
"epoch": 0.7177033492822966,
"grad_norm": 2.595984697341919,
"learning_rate": 1.925739357399038e-05,
"loss": 1.6117,
"step": 150
},
{
"epoch": 0.722488038277512,
"grad_norm": 0.20171192288398743,
"learning_rate": 1.8653809705294688e-05,
"loss": 1.5114,
"step": 151
},
{
"epoch": 0.7272727272727273,
"grad_norm": 0.22950993478298187,
"learning_rate": 1.8057659717401947e-05,
"loss": 1.4048,
"step": 152
},
{
"epoch": 0.7320574162679426,
"grad_norm": 0.22736592590808868,
"learning_rate": 1.746908498978791e-05,
"loss": 1.4318,
"step": 153
},
{
"epoch": 0.7368421052631579,
"grad_norm": 0.22380989789962769,
"learning_rate": 1.6888225105423507e-05,
"loss": 1.3012,
"step": 154
},
{
"epoch": 0.7416267942583732,
"grad_norm": 0.2610418498516083,
"learning_rate": 1.631521781767214e-05,
"loss": 1.5871,
"step": 155
},
{
"epoch": 0.7464114832535885,
"grad_norm": 0.24435089528560638,
"learning_rate": 1.575019901762097e-05,
"loss": 1.3297,
"step": 156
},
{
"epoch": 0.7511961722488039,
"grad_norm": 0.27125418186187744,
"learning_rate": 1.5193302701853673e-05,
"loss": 1.3504,
"step": 157
},
{
"epoch": 0.7559808612440191,
"grad_norm": 0.2774478495121002,
"learning_rate": 1.4644660940672627e-05,
"loss": 1.5265,
"step": 158
},
{
"epoch": 0.7607655502392344,
"grad_norm": 0.28624045848846436,
"learning_rate": 1.4104403846777909e-05,
"loss": 1.4913,
"step": 159
},
{
"epoch": 0.7655502392344498,
"grad_norm": 0.2924661338329315,
"learning_rate": 1.3572659544410494e-05,
"loss": 1.5388,
"step": 160
},
{
"epoch": 0.7703349282296651,
"grad_norm": 0.30911317467689514,
"learning_rate": 1.3049554138967051e-05,
"loss": 1.5633,
"step": 161
},
{
"epoch": 0.7751196172248804,
"grad_norm": 0.31692859530448914,
"learning_rate": 1.2535211687093535e-05,
"loss": 1.5382,
"step": 162
},
{
"epoch": 0.7799043062200957,
"grad_norm": 0.32245802879333496,
"learning_rate": 1.202975416726464e-05,
"loss": 1.5613,
"step": 163
},
{
"epoch": 0.784688995215311,
"grad_norm": 0.3302960991859436,
"learning_rate": 1.1533301450856054e-05,
"loss": 1.4634,
"step": 164
},
{
"epoch": 0.7894736842105263,
"grad_norm": 0.33953121304512024,
"learning_rate": 1.1045971273716477e-05,
"loss": 1.5284,
"step": 165
},
{
"epoch": 0.7942583732057417,
"grad_norm": 0.3454139530658722,
"learning_rate": 1.0567879208246084e-05,
"loss": 1.5508,
"step": 166
},
{
"epoch": 0.7990430622009569,
"grad_norm": 0.3438206613063812,
"learning_rate": 1.0099138635988026e-05,
"loss": 1.5568,
"step": 167
},
{
"epoch": 0.8038277511961722,
"grad_norm": 0.3348008692264557,
"learning_rate": 9.639860720739525e-06,
"loss": 1.3964,
"step": 168
},
{
"epoch": 0.8086124401913876,
"grad_norm": 0.33726564049720764,
"learning_rate": 9.190154382188921e-06,
"loss": 1.4585,
"step": 169
},
{
"epoch": 0.8133971291866029,
"grad_norm": 0.3814249038696289,
"learning_rate": 8.75012627008489e-06,
"loss": 1.4147,
"step": 170
},
{
"epoch": 0.8181818181818182,
"grad_norm": 0.3630184531211853,
"learning_rate": 8.31988073894403e-06,
"loss": 1.539,
"step": 171
},
{
"epoch": 0.8229665071770335,
"grad_norm": 0.39133140444755554,
"learning_rate": 7.899519823302743e-06,
"loss": 1.7378,
"step": 172
},
{
"epoch": 0.8277511961722488,
"grad_norm": 0.36332792043685913,
"learning_rate": 7.489143213519301e-06,
"loss": 1.5765,
"step": 173
},
{
"epoch": 0.8325358851674641,
"grad_norm": 0.39158034324645996,
"learning_rate": 7.088848232131861e-06,
"loss": 1.5155,
"step": 174
},
{
"epoch": 0.8373205741626795,
"grad_norm": 0.40499481558799744,
"learning_rate": 6.698729810778065e-06,
"loss": 1.5312,
"step": 175
},
{
"epoch": 0.8421052631578947,
"grad_norm": 0.41012099385261536,
"learning_rate": 6.318880467681526e-06,
"loss": 1.7301,
"step": 176
},
{
"epoch": 0.84688995215311,
"grad_norm": 0.4328974783420563,
"learning_rate": 5.949390285710776e-06,
"loss": 1.6651,
"step": 177
},
{
"epoch": 0.8516746411483254,
"grad_norm": 0.4051513373851776,
"learning_rate": 5.590346891015758e-06,
"loss": 1.5258,
"step": 178
},
{
"epoch": 0.8564593301435407,
"grad_norm": 0.49103522300720215,
"learning_rate": 5.241835432246889e-06,
"loss": 1.8134,
"step": 179
},
{
"epoch": 0.861244019138756,
"grad_norm": 0.4615950286388397,
"learning_rate": 4.903938560361698e-06,
"loss": 1.5319,
"step": 180
},
{
"epoch": 0.8660287081339713,
"grad_norm": 0.47922465205192566,
"learning_rate": 4.576736409023813e-06,
"loss": 1.7446,
"step": 181
},
{
"epoch": 0.8708133971291866,
"grad_norm": 0.5150140523910522,
"learning_rate": 4.260306575598949e-06,
"loss": 1.6205,
"step": 182
},
{
"epoch": 0.8755980861244019,
"grad_norm": 0.5273977518081665,
"learning_rate": 3.954724102752316e-06,
"loss": 1.7335,
"step": 183
},
{
"epoch": 0.8803827751196173,
"grad_norm": 0.529906690120697,
"learning_rate": 3.660061460651981e-06,
"loss": 1.8156,
"step": 184
},
{
"epoch": 0.8851674641148325,
"grad_norm": 0.5719719529151917,
"learning_rate": 3.376388529782215e-06,
"loss": 1.9336,
"step": 185
},
{
"epoch": 0.8899521531100478,
"grad_norm": 0.5587736964225769,
"learning_rate": 3.1037725843711062e-06,
"loss": 1.7484,
"step": 186
},
{
"epoch": 0.8947368421052632,
"grad_norm": 0.6443825364112854,
"learning_rate": 2.842278276436128e-06,
"loss": 2.0777,
"step": 187
},
{
"epoch": 0.8995215311004785,
"grad_norm": 0.6428802609443665,
"learning_rate": 2.591967620451707e-06,
"loss": 1.8287,
"step": 188
},
{
"epoch": 0.9043062200956937,
"grad_norm": 0.7180678844451904,
"learning_rate": 2.3528999786421756e-06,
"loss": 1.9069,
"step": 189
},
{
"epoch": 0.9090909090909091,
"grad_norm": 0.6900708079338074,
"learning_rate": 2.1251320469037827e-06,
"loss": 1.9678,
"step": 190
},
{
"epoch": 0.9138755980861244,
"grad_norm": 0.6777265667915344,
"learning_rate": 1.908717841359048e-06,
"loss": 1.7879,
"step": 191
},
{
"epoch": 0.9186602870813397,
"grad_norm": 0.760741651058197,
"learning_rate": 1.70370868554659e-06,
"loss": 1.8427,
"step": 192
},
{
"epoch": 0.9234449760765551,
"grad_norm": 0.776866614818573,
"learning_rate": 1.5101531982495308e-06,
"loss": 1.787,
"step": 193
},
{
"epoch": 0.9282296650717703,
"grad_norm": 0.9523407816886902,
"learning_rate": 1.328097281965357e-06,
"loss": 1.978,
"step": 194
},
{
"epoch": 0.9330143540669856,
"grad_norm": 0.9729501605033875,
"learning_rate": 1.157584112019966e-06,
"loss": 2.0224,
"step": 195
},
{
"epoch": 0.937799043062201,
"grad_norm": 1.0963469743728638,
"learning_rate": 9.986541263284077e-07,
"loss": 1.7795,
"step": 196
},
{
"epoch": 0.9425837320574163,
"grad_norm": 1.3530256748199463,
"learning_rate": 8.513450158049108e-07,
"loss": 1.9669,
"step": 197
},
{
"epoch": 0.9473684210526315,
"grad_norm": 1.6831023693084717,
"learning_rate": 7.156917154243048e-07,
"loss": 1.9834,
"step": 198
},
{
"epoch": 0.9521531100478469,
"grad_norm": 1.99193274974823,
"learning_rate": 5.917263959370312e-07,
"loss": 2.0608,
"step": 199
},
{
"epoch": 0.9569377990430622,
"grad_norm": 2.2735681533813477,
"learning_rate": 4.794784562397458e-07,
"loss": 1.5297,
"step": 200
},
{
"epoch": 0.9617224880382775,
"grad_norm": 0.24847882986068726,
"learning_rate": 3.7897451640321323e-07,
"loss": 1.4625,
"step": 201
},
{
"epoch": 0.9665071770334929,
"grad_norm": 0.2849666178226471,
"learning_rate": 2.902384113592782e-07,
"loss": 1.7225,
"step": 202
},
{
"epoch": 0.9712918660287081,
"grad_norm": 0.3164761960506439,
"learning_rate": 2.1329118524827662e-07,
"loss": 1.4438,
"step": 203
},
{
"epoch": 0.9760765550239234,
"grad_norm": 0.34920385479927063,
"learning_rate": 1.481510864283553e-07,
"loss": 1.6457,
"step": 204
},
{
"epoch": 0.9808612440191388,
"grad_norm": 0.42475542426109314,
"learning_rate": 9.483356314779479e-08,
"loss": 1.7947,
"step": 205
},
{
"epoch": 0.9856459330143541,
"grad_norm": 0.5095606446266174,
"learning_rate": 5.3351259881379014e-08,
"loss": 1.8242,
"step": 206
},
{
"epoch": 0.9904306220095693,
"grad_norm": 0.6890068054199219,
"learning_rate": 2.371401433170495e-08,
"loss": 2.0936,
"step": 207
},
{
"epoch": 0.9952153110047847,
"grad_norm": 0.8535192608833313,
"learning_rate": 5.928855096154484e-09,
"loss": 1.5928,
"step": 208
},
{
"epoch": 1.0,
"grad_norm": 1.58793306350708,
"learning_rate": 0.0,
"loss": 1.6603,
"step": 209
}
],
"logging_steps": 1,
"max_steps": 209,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 53,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 2.769507127708877e+16,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}