SQL-Llama-v0.5 / trainer_state.json
IceKingBing's picture
Upload folder using huggingface_hub
2e9ec5f verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 9.47867298578199,
"eval_steps": 500,
"global_step": 1000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.01,
"grad_norm": 2.9891708745218706,
"learning_rate": 0.0,
"loss": 0.2991,
"step": 1
},
{
"epoch": 0.02,
"grad_norm": 2.628476882946116,
"learning_rate": 4.075900941810124e-06,
"loss": 0.2519,
"step": 2
},
{
"epoch": 0.03,
"grad_norm": 1.952335655925286,
"learning_rate": 6.46015014942309e-06,
"loss": 0.2296,
"step": 3
},
{
"epoch": 0.04,
"grad_norm": 1.1420556071851116,
"learning_rate": 8.151801883620247e-06,
"loss": 0.2095,
"step": 4
},
{
"epoch": 0.05,
"grad_norm": 1.1092935988958605,
"learning_rate": 9.463948908766788e-06,
"loss": 0.1482,
"step": 5
},
{
"epoch": 0.06,
"grad_norm": 1.1226935054620768,
"learning_rate": 1.0536051091233212e-05,
"loss": 0.1426,
"step": 6
},
{
"epoch": 0.07,
"grad_norm": 1.4358456255356538,
"learning_rate": 1.1442500570809876e-05,
"loss": 0.123,
"step": 7
},
{
"epoch": 0.08,
"grad_norm": 1.6670832907973767,
"learning_rate": 1.222770282543037e-05,
"loss": 0.1827,
"step": 8
},
{
"epoch": 0.09,
"grad_norm": 1.4139671504293465,
"learning_rate": 1.292030029884618e-05,
"loss": 0.1326,
"step": 9
},
{
"epoch": 0.09,
"grad_norm": 0.9959671837674485,
"learning_rate": 1.3539849850576912e-05,
"loss": 0.1463,
"step": 10
},
{
"epoch": 0.1,
"grad_norm": 0.9550511331001,
"learning_rate": 1.4100300592531481e-05,
"loss": 0.1467,
"step": 11
},
{
"epoch": 0.11,
"grad_norm": 1.3812862098055738,
"learning_rate": 1.4611952033043337e-05,
"loss": 0.128,
"step": 12
},
{
"epoch": 0.12,
"grad_norm": 0.8924701383280631,
"learning_rate": 1.5082625732282867e-05,
"loss": 0.1396,
"step": 13
},
{
"epoch": 0.13,
"grad_norm": 0.9547565129335883,
"learning_rate": 1.551840151262e-05,
"loss": 0.1344,
"step": 14
},
{
"epoch": 0.14,
"grad_norm": 0.851462122049498,
"learning_rate": 1.5924099058189875e-05,
"loss": 0.1446,
"step": 15
},
{
"epoch": 0.15,
"grad_norm": 0.9253817311894532,
"learning_rate": 1.6303603767240495e-05,
"loss": 0.1284,
"step": 16
},
{
"epoch": 0.16,
"grad_norm": 0.6232417169686899,
"learning_rate": 1.6660093644266146e-05,
"loss": 0.1341,
"step": 17
},
{
"epoch": 0.17,
"grad_norm": 0.9291062982617737,
"learning_rate": 1.6996201240656302e-05,
"loss": 0.141,
"step": 18
},
{
"epoch": 0.18,
"grad_norm": 0.7591782680034003,
"learning_rate": 1.7314131752785847e-05,
"loss": 0.1244,
"step": 19
},
{
"epoch": 0.19,
"grad_norm": 0.7675626330959906,
"learning_rate": 1.7615750792387035e-05,
"loss": 0.1581,
"step": 20
},
{
"epoch": 0.2,
"grad_norm": 0.7871669246563585,
"learning_rate": 1.7902650720232966e-05,
"loss": 0.1608,
"step": 21
},
{
"epoch": 0.21,
"grad_norm": 0.6101055900848477,
"learning_rate": 1.8176201534341607e-05,
"loss": 0.0872,
"step": 22
},
{
"epoch": 0.22,
"grad_norm": 0.6847091729874671,
"learning_rate": 1.8437590437029225e-05,
"loss": 0.1287,
"step": 23
},
{
"epoch": 0.23,
"grad_norm": 0.6979191813257568,
"learning_rate": 1.868785297485346e-05,
"loss": 0.1207,
"step": 24
},
{
"epoch": 0.24,
"grad_norm": 0.6357442032508126,
"learning_rate": 1.8927897817533575e-05,
"loss": 0.1458,
"step": 25
},
{
"epoch": 0.25,
"grad_norm": 0.6349345994671496,
"learning_rate": 1.915852667409299e-05,
"loss": 0.1156,
"step": 26
},
{
"epoch": 0.26,
"grad_norm": 0.5770730681028545,
"learning_rate": 1.9380450448269272e-05,
"loss": 0.1265,
"step": 27
},
{
"epoch": 0.27,
"grad_norm": 0.7646910751751819,
"learning_rate": 1.9594302454430122e-05,
"loss": 0.1752,
"step": 28
},
{
"epoch": 0.27,
"grad_norm": 0.775283037307772,
"learning_rate": 1.9800649313336155e-05,
"loss": 0.1838,
"step": 29
},
{
"epoch": 0.28,
"grad_norm": 0.6990618822813822,
"learning_rate": 2e-05,
"loss": 0.1502,
"step": 30
},
{
"epoch": 0.29,
"grad_norm": 0.4792098167589292,
"learning_rate": 2e-05,
"loss": 0.0842,
"step": 31
},
{
"epoch": 0.3,
"grad_norm": 0.7437266001506447,
"learning_rate": 1.997938144329897e-05,
"loss": 0.197,
"step": 32
},
{
"epoch": 0.31,
"grad_norm": 0.7072993660584064,
"learning_rate": 1.995876288659794e-05,
"loss": 0.1643,
"step": 33
},
{
"epoch": 0.32,
"grad_norm": 0.7433128258167087,
"learning_rate": 1.993814432989691e-05,
"loss": 0.1405,
"step": 34
},
{
"epoch": 0.33,
"grad_norm": 0.5408021250842455,
"learning_rate": 1.9917525773195878e-05,
"loss": 0.1013,
"step": 35
},
{
"epoch": 0.34,
"grad_norm": 0.6861690512756182,
"learning_rate": 1.9896907216494847e-05,
"loss": 0.169,
"step": 36
},
{
"epoch": 0.35,
"grad_norm": 0.541331532924967,
"learning_rate": 1.9876288659793816e-05,
"loss": 0.0984,
"step": 37
},
{
"epoch": 0.36,
"grad_norm": 0.7273271700140183,
"learning_rate": 1.9855670103092785e-05,
"loss": 0.1723,
"step": 38
},
{
"epoch": 0.37,
"grad_norm": 0.5346399587322774,
"learning_rate": 1.9835051546391754e-05,
"loss": 0.0985,
"step": 39
},
{
"epoch": 0.38,
"grad_norm": 0.6520794087333511,
"learning_rate": 1.9814432989690723e-05,
"loss": 0.1885,
"step": 40
},
{
"epoch": 0.39,
"grad_norm": 0.5789181191981833,
"learning_rate": 1.9793814432989692e-05,
"loss": 0.1242,
"step": 41
},
{
"epoch": 0.4,
"grad_norm": 0.5688284285410545,
"learning_rate": 1.977319587628866e-05,
"loss": 0.1401,
"step": 42
},
{
"epoch": 0.41,
"grad_norm": 0.5423895126989404,
"learning_rate": 1.975257731958763e-05,
"loss": 0.1122,
"step": 43
},
{
"epoch": 0.42,
"grad_norm": 0.5506903661320599,
"learning_rate": 1.97319587628866e-05,
"loss": 0.1352,
"step": 44
},
{
"epoch": 0.43,
"grad_norm": 0.5530734690137673,
"learning_rate": 1.9711340206185568e-05,
"loss": 0.0964,
"step": 45
},
{
"epoch": 0.44,
"grad_norm": 0.5310373722240495,
"learning_rate": 1.969072164948454e-05,
"loss": 0.1405,
"step": 46
},
{
"epoch": 0.45,
"grad_norm": 0.5424356381959826,
"learning_rate": 1.9670103092783506e-05,
"loss": 0.1441,
"step": 47
},
{
"epoch": 0.45,
"grad_norm": 0.5273761880575986,
"learning_rate": 1.9649484536082475e-05,
"loss": 0.1516,
"step": 48
},
{
"epoch": 0.46,
"grad_norm": 0.4855824464395224,
"learning_rate": 1.9628865979381444e-05,
"loss": 0.1107,
"step": 49
},
{
"epoch": 0.47,
"grad_norm": 0.538789486511192,
"learning_rate": 1.9608247422680413e-05,
"loss": 0.1278,
"step": 50
},
{
"epoch": 0.48,
"grad_norm": 0.5191270790600404,
"learning_rate": 1.9587628865979382e-05,
"loss": 0.1066,
"step": 51
},
{
"epoch": 0.49,
"grad_norm": 0.5396501130372474,
"learning_rate": 1.956701030927835e-05,
"loss": 0.1377,
"step": 52
},
{
"epoch": 0.5,
"grad_norm": 0.4731574435681649,
"learning_rate": 1.954639175257732e-05,
"loss": 0.0951,
"step": 53
},
{
"epoch": 0.51,
"grad_norm": 0.611677100662539,
"learning_rate": 1.952577319587629e-05,
"loss": 0.1517,
"step": 54
},
{
"epoch": 0.52,
"grad_norm": 0.46006652203317466,
"learning_rate": 1.9505154639175258e-05,
"loss": 0.1024,
"step": 55
},
{
"epoch": 0.53,
"grad_norm": 0.5113730973996482,
"learning_rate": 1.9484536082474227e-05,
"loss": 0.1277,
"step": 56
},
{
"epoch": 0.54,
"grad_norm": 0.48719943941570315,
"learning_rate": 1.9463917525773196e-05,
"loss": 0.1042,
"step": 57
},
{
"epoch": 0.55,
"grad_norm": 0.5634315247940872,
"learning_rate": 1.9443298969072165e-05,
"loss": 0.113,
"step": 58
},
{
"epoch": 0.56,
"grad_norm": 0.5175008176253489,
"learning_rate": 1.9422680412371134e-05,
"loss": 0.1321,
"step": 59
},
{
"epoch": 0.57,
"grad_norm": 0.5003858507987266,
"learning_rate": 1.9402061855670103e-05,
"loss": 0.1181,
"step": 60
},
{
"epoch": 0.58,
"grad_norm": 0.5740662456591682,
"learning_rate": 1.9381443298969072e-05,
"loss": 0.1305,
"step": 61
},
{
"epoch": 0.59,
"grad_norm": 0.5356876491554879,
"learning_rate": 1.936082474226804e-05,
"loss": 0.1166,
"step": 62
},
{
"epoch": 0.6,
"grad_norm": 0.4664669227433975,
"learning_rate": 1.934020618556701e-05,
"loss": 0.0924,
"step": 63
},
{
"epoch": 0.61,
"grad_norm": 0.44420974113139416,
"learning_rate": 1.9319587628865983e-05,
"loss": 0.1006,
"step": 64
},
{
"epoch": 0.62,
"grad_norm": 0.5188205131037862,
"learning_rate": 1.9298969072164952e-05,
"loss": 0.1231,
"step": 65
},
{
"epoch": 0.63,
"grad_norm": 0.6050642569758461,
"learning_rate": 1.927835051546392e-05,
"loss": 0.1786,
"step": 66
},
{
"epoch": 0.64,
"grad_norm": 0.48238849910541576,
"learning_rate": 1.925773195876289e-05,
"loss": 0.1471,
"step": 67
},
{
"epoch": 0.64,
"grad_norm": 0.49789315637710874,
"learning_rate": 1.923711340206186e-05,
"loss": 0.1133,
"step": 68
},
{
"epoch": 0.65,
"grad_norm": 0.49039414070768905,
"learning_rate": 1.9216494845360828e-05,
"loss": 0.1429,
"step": 69
},
{
"epoch": 0.66,
"grad_norm": 0.4827438083411632,
"learning_rate": 1.9195876288659797e-05,
"loss": 0.1061,
"step": 70
},
{
"epoch": 0.67,
"grad_norm": 0.48225039936541547,
"learning_rate": 1.9175257731958766e-05,
"loss": 0.1305,
"step": 71
},
{
"epoch": 0.68,
"grad_norm": 0.4761932693694711,
"learning_rate": 1.9154639175257735e-05,
"loss": 0.0927,
"step": 72
},
{
"epoch": 0.69,
"grad_norm": 0.43140784843947283,
"learning_rate": 1.9134020618556704e-05,
"loss": 0.1342,
"step": 73
},
{
"epoch": 0.7,
"grad_norm": 0.5484585043425267,
"learning_rate": 1.9113402061855673e-05,
"loss": 0.1891,
"step": 74
},
{
"epoch": 0.71,
"grad_norm": 0.4220775541843179,
"learning_rate": 1.9092783505154642e-05,
"loss": 0.0998,
"step": 75
},
{
"epoch": 0.72,
"grad_norm": 0.44409602468867826,
"learning_rate": 1.907216494845361e-05,
"loss": 0.1078,
"step": 76
},
{
"epoch": 0.73,
"grad_norm": 0.5539792334248451,
"learning_rate": 1.9051546391752577e-05,
"loss": 0.1256,
"step": 77
},
{
"epoch": 0.74,
"grad_norm": 0.4573628453476899,
"learning_rate": 1.9030927835051546e-05,
"loss": 0.1037,
"step": 78
},
{
"epoch": 0.75,
"grad_norm": 0.429947081098509,
"learning_rate": 1.9010309278350515e-05,
"loss": 0.1081,
"step": 79
},
{
"epoch": 0.76,
"grad_norm": 0.514407744031743,
"learning_rate": 1.8989690721649484e-05,
"loss": 0.157,
"step": 80
},
{
"epoch": 0.77,
"grad_norm": 0.5311443560406982,
"learning_rate": 1.8969072164948453e-05,
"loss": 0.1039,
"step": 81
},
{
"epoch": 0.78,
"grad_norm": 0.6125785057203883,
"learning_rate": 1.8948453608247422e-05,
"loss": 0.1398,
"step": 82
},
{
"epoch": 0.79,
"grad_norm": 0.43334771814067946,
"learning_rate": 1.8927835051546394e-05,
"loss": 0.0968,
"step": 83
},
{
"epoch": 0.8,
"grad_norm": 0.49487051753793676,
"learning_rate": 1.8907216494845363e-05,
"loss": 0.1333,
"step": 84
},
{
"epoch": 0.81,
"grad_norm": 0.4336280637343098,
"learning_rate": 1.8886597938144332e-05,
"loss": 0.0974,
"step": 85
},
{
"epoch": 0.82,
"grad_norm": 0.49506544937586355,
"learning_rate": 1.88659793814433e-05,
"loss": 0.112,
"step": 86
},
{
"epoch": 0.82,
"grad_norm": 0.4091312680240698,
"learning_rate": 1.884536082474227e-05,
"loss": 0.1143,
"step": 87
},
{
"epoch": 0.83,
"grad_norm": 0.48089070903661946,
"learning_rate": 1.882474226804124e-05,
"loss": 0.1284,
"step": 88
},
{
"epoch": 0.84,
"grad_norm": 0.5614761730702772,
"learning_rate": 1.880412371134021e-05,
"loss": 0.1613,
"step": 89
},
{
"epoch": 0.85,
"grad_norm": 0.43634241092691967,
"learning_rate": 1.8783505154639178e-05,
"loss": 0.1131,
"step": 90
},
{
"epoch": 0.86,
"grad_norm": 0.46095036634171394,
"learning_rate": 1.8762886597938147e-05,
"loss": 0.0948,
"step": 91
},
{
"epoch": 0.87,
"grad_norm": 0.36008710383623416,
"learning_rate": 1.8742268041237116e-05,
"loss": 0.0774,
"step": 92
},
{
"epoch": 0.88,
"grad_norm": 0.5394277302857614,
"learning_rate": 1.8721649484536085e-05,
"loss": 0.15,
"step": 93
},
{
"epoch": 0.89,
"grad_norm": 0.5286664974389561,
"learning_rate": 1.8701030927835054e-05,
"loss": 0.1029,
"step": 94
},
{
"epoch": 0.9,
"grad_norm": 0.44181433524277963,
"learning_rate": 1.8680412371134023e-05,
"loss": 0.1051,
"step": 95
},
{
"epoch": 0.91,
"grad_norm": 0.47802204444761204,
"learning_rate": 1.865979381443299e-05,
"loss": 0.1475,
"step": 96
},
{
"epoch": 0.92,
"grad_norm": 0.40580589522182464,
"learning_rate": 1.863917525773196e-05,
"loss": 0.1205,
"step": 97
},
{
"epoch": 0.93,
"grad_norm": 0.3470193515302502,
"learning_rate": 1.861855670103093e-05,
"loss": 0.0695,
"step": 98
},
{
"epoch": 0.94,
"grad_norm": 0.8370973971565091,
"learning_rate": 1.85979381443299e-05,
"loss": 0.1328,
"step": 99
},
{
"epoch": 0.95,
"grad_norm": 0.7143901365450398,
"learning_rate": 1.8577319587628868e-05,
"loss": 0.1193,
"step": 100
},
{
"epoch": 0.96,
"grad_norm": 0.42967489397457476,
"learning_rate": 1.8556701030927837e-05,
"loss": 0.1019,
"step": 101
},
{
"epoch": 0.97,
"grad_norm": 0.455637101456171,
"learning_rate": 1.8536082474226806e-05,
"loss": 0.1176,
"step": 102
},
{
"epoch": 0.98,
"grad_norm": 0.4124368388053171,
"learning_rate": 1.8515463917525775e-05,
"loss": 0.0868,
"step": 103
},
{
"epoch": 0.99,
"grad_norm": 0.5993952166794445,
"learning_rate": 1.8494845360824744e-05,
"loss": 0.1743,
"step": 104
},
{
"epoch": 1.0,
"grad_norm": 0.42428104090834723,
"learning_rate": 1.8474226804123713e-05,
"loss": 0.1129,
"step": 105
},
{
"epoch": 1.0,
"grad_norm": 0.44428984015154116,
"learning_rate": 1.8453608247422682e-05,
"loss": 0.0745,
"step": 106
},
{
"epoch": 1.01,
"grad_norm": 0.39792334349591185,
"learning_rate": 1.843298969072165e-05,
"loss": 0.0724,
"step": 107
},
{
"epoch": 1.02,
"grad_norm": 0.33478341250084837,
"learning_rate": 1.841237113402062e-05,
"loss": 0.0625,
"step": 108
},
{
"epoch": 1.03,
"grad_norm": 0.35706805867910113,
"learning_rate": 1.839175257731959e-05,
"loss": 0.0764,
"step": 109
},
{
"epoch": 1.04,
"grad_norm": 0.3592115113274458,
"learning_rate": 1.8371134020618558e-05,
"loss": 0.0817,
"step": 110
},
{
"epoch": 1.05,
"grad_norm": 0.42738331362282894,
"learning_rate": 1.8350515463917527e-05,
"loss": 0.0752,
"step": 111
},
{
"epoch": 1.06,
"grad_norm": 0.4023951340242268,
"learning_rate": 1.8329896907216496e-05,
"loss": 0.0822,
"step": 112
},
{
"epoch": 1.07,
"grad_norm": 0.4646984802980572,
"learning_rate": 1.8309278350515465e-05,
"loss": 0.0811,
"step": 113
},
{
"epoch": 1.08,
"grad_norm": 0.44504271248702093,
"learning_rate": 1.8288659793814434e-05,
"loss": 0.0564,
"step": 114
},
{
"epoch": 1.09,
"grad_norm": 0.33807535579171555,
"learning_rate": 1.8268041237113403e-05,
"loss": 0.0615,
"step": 115
},
{
"epoch": 1.1,
"grad_norm": 0.43496401665380907,
"learning_rate": 1.8247422680412372e-05,
"loss": 0.1046,
"step": 116
},
{
"epoch": 1.11,
"grad_norm": 0.48325092877921866,
"learning_rate": 1.822680412371134e-05,
"loss": 0.08,
"step": 117
},
{
"epoch": 1.12,
"grad_norm": 0.8768604458982987,
"learning_rate": 1.820618556701031e-05,
"loss": 0.0909,
"step": 118
},
{
"epoch": 1.13,
"grad_norm": 0.3569991733824619,
"learning_rate": 1.818556701030928e-05,
"loss": 0.0608,
"step": 119
},
{
"epoch": 1.14,
"grad_norm": 0.4656594436502199,
"learning_rate": 1.8164948453608248e-05,
"loss": 0.086,
"step": 120
},
{
"epoch": 1.15,
"grad_norm": 0.35277327685267473,
"learning_rate": 1.8144329896907217e-05,
"loss": 0.0638,
"step": 121
},
{
"epoch": 1.16,
"grad_norm": 0.448631001636137,
"learning_rate": 1.8123711340206186e-05,
"loss": 0.0779,
"step": 122
},
{
"epoch": 1.17,
"grad_norm": 0.4365549438849367,
"learning_rate": 1.8103092783505155e-05,
"loss": 0.0951,
"step": 123
},
{
"epoch": 1.18,
"grad_norm": 0.47401540189076946,
"learning_rate": 1.8082474226804124e-05,
"loss": 0.0982,
"step": 124
},
{
"epoch": 1.18,
"grad_norm": 0.4158031117835676,
"learning_rate": 1.8061855670103093e-05,
"loss": 0.088,
"step": 125
},
{
"epoch": 1.19,
"grad_norm": 0.4481248057802596,
"learning_rate": 1.8041237113402062e-05,
"loss": 0.0874,
"step": 126
},
{
"epoch": 1.2,
"grad_norm": 0.4179778588273936,
"learning_rate": 1.802061855670103e-05,
"loss": 0.082,
"step": 127
},
{
"epoch": 1.21,
"grad_norm": 0.3473637454051514,
"learning_rate": 1.8e-05,
"loss": 0.0627,
"step": 128
},
{
"epoch": 1.22,
"grad_norm": 0.3776966135269121,
"learning_rate": 1.797938144329897e-05,
"loss": 0.0709,
"step": 129
},
{
"epoch": 1.23,
"grad_norm": 0.4415996094246706,
"learning_rate": 1.7958762886597942e-05,
"loss": 0.0824,
"step": 130
},
{
"epoch": 1.24,
"grad_norm": 0.37802367310847074,
"learning_rate": 1.793814432989691e-05,
"loss": 0.0663,
"step": 131
},
{
"epoch": 1.25,
"grad_norm": 0.2953516626376782,
"learning_rate": 1.791752577319588e-05,
"loss": 0.0501,
"step": 132
},
{
"epoch": 1.26,
"grad_norm": 0.40263067588370255,
"learning_rate": 1.789690721649485e-05,
"loss": 0.0887,
"step": 133
},
{
"epoch": 1.27,
"grad_norm": 0.35922391452204516,
"learning_rate": 1.7876288659793818e-05,
"loss": 0.0653,
"step": 134
},
{
"epoch": 1.28,
"grad_norm": 0.39393795840614804,
"learning_rate": 1.7855670103092787e-05,
"loss": 0.0741,
"step": 135
},
{
"epoch": 1.29,
"grad_norm": 0.4026290844719299,
"learning_rate": 1.7835051546391756e-05,
"loss": 0.0966,
"step": 136
},
{
"epoch": 1.3,
"grad_norm": 0.33584409346668986,
"learning_rate": 1.7814432989690725e-05,
"loss": 0.0585,
"step": 137
},
{
"epoch": 1.31,
"grad_norm": 0.3414251431403049,
"learning_rate": 1.779381443298969e-05,
"loss": 0.0682,
"step": 138
},
{
"epoch": 1.32,
"grad_norm": 0.3434593532506542,
"learning_rate": 1.777319587628866e-05,
"loss": 0.0637,
"step": 139
},
{
"epoch": 1.33,
"grad_norm": 0.39753609961205566,
"learning_rate": 1.775257731958763e-05,
"loss": 0.0611,
"step": 140
},
{
"epoch": 1.34,
"grad_norm": 0.42998781979853173,
"learning_rate": 1.7731958762886598e-05,
"loss": 0.0625,
"step": 141
},
{
"epoch": 1.35,
"grad_norm": 0.34154494674126434,
"learning_rate": 1.7711340206185567e-05,
"loss": 0.0667,
"step": 142
},
{
"epoch": 1.36,
"grad_norm": 0.3676085999652895,
"learning_rate": 1.7690721649484536e-05,
"loss": 0.0701,
"step": 143
},
{
"epoch": 1.36,
"grad_norm": 0.2761776142522199,
"learning_rate": 1.7670103092783505e-05,
"loss": 0.0538,
"step": 144
},
{
"epoch": 1.37,
"grad_norm": 0.3784576985822394,
"learning_rate": 1.7649484536082474e-05,
"loss": 0.0799,
"step": 145
},
{
"epoch": 1.38,
"grad_norm": 0.35329272160892033,
"learning_rate": 1.7628865979381443e-05,
"loss": 0.0665,
"step": 146
},
{
"epoch": 1.39,
"grad_norm": 0.4139007216329521,
"learning_rate": 1.7608247422680412e-05,
"loss": 0.0796,
"step": 147
},
{
"epoch": 1.4,
"grad_norm": 0.3336389662855423,
"learning_rate": 1.758762886597938e-05,
"loss": 0.0653,
"step": 148
},
{
"epoch": 1.41,
"grad_norm": 0.5268985988721392,
"learning_rate": 1.7567010309278353e-05,
"loss": 0.0801,
"step": 149
},
{
"epoch": 1.42,
"grad_norm": 0.37674074663317386,
"learning_rate": 1.7546391752577322e-05,
"loss": 0.0884,
"step": 150
},
{
"epoch": 1.43,
"grad_norm": 0.35273202703123274,
"learning_rate": 1.752577319587629e-05,
"loss": 0.0727,
"step": 151
},
{
"epoch": 1.44,
"grad_norm": 0.3824657309832461,
"learning_rate": 1.750515463917526e-05,
"loss": 0.0769,
"step": 152
},
{
"epoch": 1.45,
"grad_norm": 0.42634471741485275,
"learning_rate": 1.748453608247423e-05,
"loss": 0.0922,
"step": 153
},
{
"epoch": 1.46,
"grad_norm": 0.3445619941367275,
"learning_rate": 1.74639175257732e-05,
"loss": 0.073,
"step": 154
},
{
"epoch": 1.47,
"grad_norm": 0.35323195905302596,
"learning_rate": 1.7443298969072168e-05,
"loss": 0.0812,
"step": 155
},
{
"epoch": 1.48,
"grad_norm": 0.3427297993674751,
"learning_rate": 1.7422680412371137e-05,
"loss": 0.0863,
"step": 156
},
{
"epoch": 1.49,
"grad_norm": 0.40044573893019114,
"learning_rate": 1.7402061855670106e-05,
"loss": 0.0891,
"step": 157
},
{
"epoch": 1.5,
"grad_norm": 0.43246751870030287,
"learning_rate": 1.7381443298969075e-05,
"loss": 0.1018,
"step": 158
},
{
"epoch": 1.51,
"grad_norm": 0.4032172748024563,
"learning_rate": 1.7360824742268044e-05,
"loss": 0.0861,
"step": 159
},
{
"epoch": 1.52,
"grad_norm": 0.42850936286961694,
"learning_rate": 1.7340206185567013e-05,
"loss": 0.0874,
"step": 160
},
{
"epoch": 1.53,
"grad_norm": 0.4861621190360696,
"learning_rate": 1.731958762886598e-05,
"loss": 0.0973,
"step": 161
},
{
"epoch": 1.54,
"grad_norm": 0.3963801242216849,
"learning_rate": 1.729896907216495e-05,
"loss": 0.0731,
"step": 162
},
{
"epoch": 1.55,
"grad_norm": 0.35697454590706096,
"learning_rate": 1.727835051546392e-05,
"loss": 0.0805,
"step": 163
},
{
"epoch": 1.55,
"grad_norm": 0.344319619383359,
"learning_rate": 1.725773195876289e-05,
"loss": 0.0826,
"step": 164
},
{
"epoch": 1.56,
"grad_norm": 0.3997725391367782,
"learning_rate": 1.7237113402061858e-05,
"loss": 0.0899,
"step": 165
},
{
"epoch": 1.57,
"grad_norm": 0.3373739227823287,
"learning_rate": 1.7216494845360827e-05,
"loss": 0.0681,
"step": 166
},
{
"epoch": 1.58,
"grad_norm": 0.29045827349093645,
"learning_rate": 1.7195876288659796e-05,
"loss": 0.0564,
"step": 167
},
{
"epoch": 1.59,
"grad_norm": 0.351362213727217,
"learning_rate": 1.7175257731958765e-05,
"loss": 0.0735,
"step": 168
},
{
"epoch": 1.6,
"grad_norm": 0.3413459419041117,
"learning_rate": 1.7154639175257734e-05,
"loss": 0.077,
"step": 169
},
{
"epoch": 1.61,
"grad_norm": 0.3495399576670488,
"learning_rate": 1.7134020618556703e-05,
"loss": 0.069,
"step": 170
},
{
"epoch": 1.62,
"grad_norm": 0.3450464919974795,
"learning_rate": 1.7113402061855672e-05,
"loss": 0.0598,
"step": 171
},
{
"epoch": 1.63,
"grad_norm": 0.3000530369884767,
"learning_rate": 1.709278350515464e-05,
"loss": 0.064,
"step": 172
},
{
"epoch": 1.64,
"grad_norm": 0.3837715513782022,
"learning_rate": 1.707216494845361e-05,
"loss": 0.0685,
"step": 173
},
{
"epoch": 1.65,
"grad_norm": 0.48290916882482465,
"learning_rate": 1.705154639175258e-05,
"loss": 0.0859,
"step": 174
},
{
"epoch": 1.66,
"grad_norm": 0.36559227120396875,
"learning_rate": 1.7030927835051548e-05,
"loss": 0.065,
"step": 175
},
{
"epoch": 1.67,
"grad_norm": 0.3994686956745055,
"learning_rate": 1.7010309278350517e-05,
"loss": 0.0889,
"step": 176
},
{
"epoch": 1.68,
"grad_norm": 0.42878969203405465,
"learning_rate": 1.6989690721649486e-05,
"loss": 0.0958,
"step": 177
},
{
"epoch": 1.69,
"grad_norm": 0.3722570156130593,
"learning_rate": 1.6969072164948455e-05,
"loss": 0.0767,
"step": 178
},
{
"epoch": 1.7,
"grad_norm": 0.3933025906289077,
"learning_rate": 1.6948453608247424e-05,
"loss": 0.0847,
"step": 179
},
{
"epoch": 1.71,
"grad_norm": 0.34309150740792804,
"learning_rate": 1.6927835051546393e-05,
"loss": 0.0741,
"step": 180
},
{
"epoch": 1.72,
"grad_norm": 0.4399189324095103,
"learning_rate": 1.6907216494845362e-05,
"loss": 0.0908,
"step": 181
},
{
"epoch": 1.73,
"grad_norm": 0.4649681598006581,
"learning_rate": 1.688659793814433e-05,
"loss": 0.1088,
"step": 182
},
{
"epoch": 1.73,
"grad_norm": 0.3630771268583078,
"learning_rate": 1.68659793814433e-05,
"loss": 0.0847,
"step": 183
},
{
"epoch": 1.74,
"grad_norm": 0.5928441465836051,
"learning_rate": 1.684536082474227e-05,
"loss": 0.0711,
"step": 184
},
{
"epoch": 1.75,
"grad_norm": 0.3226019404889507,
"learning_rate": 1.6824742268041238e-05,
"loss": 0.0709,
"step": 185
},
{
"epoch": 1.76,
"grad_norm": 0.37376434675002645,
"learning_rate": 1.6804123711340207e-05,
"loss": 0.0824,
"step": 186
},
{
"epoch": 1.77,
"grad_norm": 0.3738343679769799,
"learning_rate": 1.6783505154639176e-05,
"loss": 0.0768,
"step": 187
},
{
"epoch": 1.78,
"grad_norm": 0.3403615076772027,
"learning_rate": 1.6762886597938145e-05,
"loss": 0.0694,
"step": 188
},
{
"epoch": 1.79,
"grad_norm": 0.3708718690601821,
"learning_rate": 1.6742268041237114e-05,
"loss": 0.0837,
"step": 189
},
{
"epoch": 1.8,
"grad_norm": 0.3776559355753804,
"learning_rate": 1.6721649484536083e-05,
"loss": 0.0862,
"step": 190
},
{
"epoch": 1.81,
"grad_norm": 0.31976177974712416,
"learning_rate": 1.6701030927835052e-05,
"loss": 0.0554,
"step": 191
},
{
"epoch": 1.82,
"grad_norm": 0.42017789621086404,
"learning_rate": 1.668041237113402e-05,
"loss": 0.0873,
"step": 192
},
{
"epoch": 1.83,
"grad_norm": 0.39305634100685377,
"learning_rate": 1.665979381443299e-05,
"loss": 0.076,
"step": 193
},
{
"epoch": 1.84,
"grad_norm": 0.4109573533516631,
"learning_rate": 1.663917525773196e-05,
"loss": 0.0979,
"step": 194
},
{
"epoch": 1.85,
"grad_norm": 0.44730451989447667,
"learning_rate": 1.661855670103093e-05,
"loss": 0.0853,
"step": 195
},
{
"epoch": 1.86,
"grad_norm": 0.353454668891172,
"learning_rate": 1.65979381443299e-05,
"loss": 0.0643,
"step": 196
},
{
"epoch": 1.87,
"grad_norm": 0.3993816968866783,
"learning_rate": 1.657731958762887e-05,
"loss": 0.0672,
"step": 197
},
{
"epoch": 1.88,
"grad_norm": 0.3651932473919443,
"learning_rate": 1.6556701030927836e-05,
"loss": 0.0854,
"step": 198
},
{
"epoch": 1.89,
"grad_norm": 0.40273286465825053,
"learning_rate": 1.6536082474226805e-05,
"loss": 0.0958,
"step": 199
},
{
"epoch": 1.9,
"grad_norm": 0.4077931403172478,
"learning_rate": 1.6515463917525774e-05,
"loss": 0.0914,
"step": 200
},
{
"epoch": 1.91,
"grad_norm": 0.37528853442361415,
"learning_rate": 1.6494845360824743e-05,
"loss": 0.0694,
"step": 201
},
{
"epoch": 1.91,
"grad_norm": 0.3698188162853022,
"learning_rate": 1.6474226804123712e-05,
"loss": 0.0658,
"step": 202
},
{
"epoch": 1.92,
"grad_norm": 0.4166757244873981,
"learning_rate": 1.645360824742268e-05,
"loss": 0.1108,
"step": 203
},
{
"epoch": 1.93,
"grad_norm": 0.4230675090854494,
"learning_rate": 1.643298969072165e-05,
"loss": 0.0824,
"step": 204
},
{
"epoch": 1.94,
"grad_norm": 0.38053714129679106,
"learning_rate": 1.641237113402062e-05,
"loss": 0.0729,
"step": 205
},
{
"epoch": 1.95,
"grad_norm": 0.3446081893025308,
"learning_rate": 1.6391752577319588e-05,
"loss": 0.0628,
"step": 206
},
{
"epoch": 1.96,
"grad_norm": 0.36932648919505784,
"learning_rate": 1.6371134020618557e-05,
"loss": 0.0684,
"step": 207
},
{
"epoch": 1.97,
"grad_norm": 0.357176232481463,
"learning_rate": 1.6350515463917526e-05,
"loss": 0.0832,
"step": 208
},
{
"epoch": 1.98,
"grad_norm": 0.4689409820598192,
"learning_rate": 1.6329896907216495e-05,
"loss": 0.0777,
"step": 209
},
{
"epoch": 1.99,
"grad_norm": 0.395944519071077,
"learning_rate": 1.6309278350515464e-05,
"loss": 0.0947,
"step": 210
},
{
"epoch": 2.0,
"grad_norm": 0.40622234250336314,
"learning_rate": 1.6288659793814433e-05,
"loss": 0.0627,
"step": 211
},
{
"epoch": 2.01,
"grad_norm": 0.2539631706405056,
"learning_rate": 1.6268041237113402e-05,
"loss": 0.0427,
"step": 212
},
{
"epoch": 2.02,
"grad_norm": 0.22459173547890357,
"learning_rate": 1.624742268041237e-05,
"loss": 0.0362,
"step": 213
},
{
"epoch": 2.03,
"grad_norm": 0.23482759010212903,
"learning_rate": 1.622680412371134e-05,
"loss": 0.0509,
"step": 214
},
{
"epoch": 2.04,
"grad_norm": 0.30937542481826746,
"learning_rate": 1.6206185567010312e-05,
"loss": 0.0476,
"step": 215
},
{
"epoch": 2.05,
"grad_norm": 0.2913540274331382,
"learning_rate": 1.618556701030928e-05,
"loss": 0.047,
"step": 216
},
{
"epoch": 2.06,
"grad_norm": 0.3187881928357141,
"learning_rate": 1.616494845360825e-05,
"loss": 0.0407,
"step": 217
},
{
"epoch": 2.07,
"grad_norm": 0.38096875307403705,
"learning_rate": 1.614432989690722e-05,
"loss": 0.0517,
"step": 218
},
{
"epoch": 2.08,
"grad_norm": 0.26618588358363005,
"learning_rate": 1.612371134020619e-05,
"loss": 0.0393,
"step": 219
},
{
"epoch": 2.09,
"grad_norm": 0.2966677921364006,
"learning_rate": 1.6103092783505158e-05,
"loss": 0.0443,
"step": 220
},
{
"epoch": 2.09,
"grad_norm": 0.2643265543845527,
"learning_rate": 1.6082474226804127e-05,
"loss": 0.0381,
"step": 221
},
{
"epoch": 2.1,
"grad_norm": 0.3494937748231802,
"learning_rate": 1.6061855670103096e-05,
"loss": 0.0473,
"step": 222
},
{
"epoch": 2.11,
"grad_norm": 0.3938727217874404,
"learning_rate": 1.6041237113402065e-05,
"loss": 0.0449,
"step": 223
},
{
"epoch": 2.12,
"grad_norm": 0.36976282472918287,
"learning_rate": 1.6020618556701034e-05,
"loss": 0.0371,
"step": 224
},
{
"epoch": 2.13,
"grad_norm": 0.5213171301410763,
"learning_rate": 1.6000000000000003e-05,
"loss": 0.0475,
"step": 225
},
{
"epoch": 2.14,
"grad_norm": 0.3425345389848002,
"learning_rate": 1.597938144329897e-05,
"loss": 0.049,
"step": 226
},
{
"epoch": 2.15,
"grad_norm": 0.40706532531559037,
"learning_rate": 1.595876288659794e-05,
"loss": 0.0418,
"step": 227
},
{
"epoch": 2.16,
"grad_norm": 0.5042215292481345,
"learning_rate": 1.593814432989691e-05,
"loss": 0.0424,
"step": 228
},
{
"epoch": 2.17,
"grad_norm": 0.4570412267916464,
"learning_rate": 1.5917525773195875e-05,
"loss": 0.0506,
"step": 229
},
{
"epoch": 2.18,
"grad_norm": 0.5034229119221877,
"learning_rate": 1.5896907216494844e-05,
"loss": 0.0566,
"step": 230
},
{
"epoch": 2.19,
"grad_norm": 0.42136631426690235,
"learning_rate": 1.5876288659793813e-05,
"loss": 0.0442,
"step": 231
},
{
"epoch": 2.2,
"grad_norm": 0.40409499578362834,
"learning_rate": 1.5855670103092782e-05,
"loss": 0.0335,
"step": 232
},
{
"epoch": 2.21,
"grad_norm": 0.32539652421785425,
"learning_rate": 1.583505154639175e-05,
"loss": 0.0464,
"step": 233
},
{
"epoch": 2.22,
"grad_norm": 0.6100877359487853,
"learning_rate": 1.5814432989690724e-05,
"loss": 0.04,
"step": 234
},
{
"epoch": 2.23,
"grad_norm": 0.42379157616320506,
"learning_rate": 1.5793814432989693e-05,
"loss": 0.0542,
"step": 235
},
{
"epoch": 2.24,
"grad_norm": 0.44993961379408737,
"learning_rate": 1.5773195876288662e-05,
"loss": 0.0579,
"step": 236
},
{
"epoch": 2.25,
"grad_norm": 0.3872831252997736,
"learning_rate": 1.575257731958763e-05,
"loss": 0.0477,
"step": 237
},
{
"epoch": 2.26,
"grad_norm": 0.29852934467622494,
"learning_rate": 1.57319587628866e-05,
"loss": 0.0311,
"step": 238
},
{
"epoch": 2.27,
"grad_norm": 0.4096859188034772,
"learning_rate": 1.571134020618557e-05,
"loss": 0.0477,
"step": 239
},
{
"epoch": 2.27,
"grad_norm": 0.38511792624817015,
"learning_rate": 1.5690721649484538e-05,
"loss": 0.0456,
"step": 240
},
{
"epoch": 2.28,
"grad_norm": 0.4350761298724512,
"learning_rate": 1.5670103092783507e-05,
"loss": 0.0568,
"step": 241
},
{
"epoch": 2.29,
"grad_norm": 0.380441348207281,
"learning_rate": 1.5649484536082476e-05,
"loss": 0.0479,
"step": 242
},
{
"epoch": 2.3,
"grad_norm": 0.4126996337548848,
"learning_rate": 1.5628865979381445e-05,
"loss": 0.0573,
"step": 243
},
{
"epoch": 2.31,
"grad_norm": 0.38219571542214703,
"learning_rate": 1.5608247422680414e-05,
"loss": 0.0445,
"step": 244
},
{
"epoch": 2.32,
"grad_norm": 0.4206724865877809,
"learning_rate": 1.5587628865979383e-05,
"loss": 0.0439,
"step": 245
},
{
"epoch": 2.33,
"grad_norm": 0.33839226017923396,
"learning_rate": 1.5567010309278352e-05,
"loss": 0.0452,
"step": 246
},
{
"epoch": 2.34,
"grad_norm": 0.4421136976463177,
"learning_rate": 1.554639175257732e-05,
"loss": 0.0412,
"step": 247
},
{
"epoch": 2.35,
"grad_norm": 0.31243316412502314,
"learning_rate": 1.552577319587629e-05,
"loss": 0.0433,
"step": 248
},
{
"epoch": 2.36,
"grad_norm": 0.47482175745343197,
"learning_rate": 1.550515463917526e-05,
"loss": 0.056,
"step": 249
},
{
"epoch": 2.37,
"grad_norm": 0.4731354609231214,
"learning_rate": 1.5484536082474228e-05,
"loss": 0.0454,
"step": 250
},
{
"epoch": 2.38,
"grad_norm": 0.411636913353054,
"learning_rate": 1.5463917525773197e-05,
"loss": 0.0592,
"step": 251
},
{
"epoch": 2.39,
"grad_norm": 0.4216044229371355,
"learning_rate": 1.5443298969072166e-05,
"loss": 0.0574,
"step": 252
},
{
"epoch": 2.4,
"grad_norm": 0.42413925142569947,
"learning_rate": 1.5422680412371135e-05,
"loss": 0.0493,
"step": 253
},
{
"epoch": 2.41,
"grad_norm": 0.3361715122711189,
"learning_rate": 1.5402061855670104e-05,
"loss": 0.0353,
"step": 254
},
{
"epoch": 2.42,
"grad_norm": 0.39136734043814736,
"learning_rate": 1.5381443298969073e-05,
"loss": 0.0501,
"step": 255
},
{
"epoch": 2.43,
"grad_norm": 0.3294190366865447,
"learning_rate": 1.5360824742268042e-05,
"loss": 0.0543,
"step": 256
},
{
"epoch": 2.44,
"grad_norm": 0.3194270106179261,
"learning_rate": 1.534020618556701e-05,
"loss": 0.0554,
"step": 257
},
{
"epoch": 2.45,
"grad_norm": 0.3562230292367001,
"learning_rate": 1.531958762886598e-05,
"loss": 0.051,
"step": 258
},
{
"epoch": 2.45,
"grad_norm": 0.32243682475425084,
"learning_rate": 1.529896907216495e-05,
"loss": 0.0505,
"step": 259
},
{
"epoch": 2.46,
"grad_norm": 0.36024004029795603,
"learning_rate": 1.527835051546392e-05,
"loss": 0.0577,
"step": 260
},
{
"epoch": 2.47,
"grad_norm": 0.40265269593582487,
"learning_rate": 1.5257731958762888e-05,
"loss": 0.048,
"step": 261
},
{
"epoch": 2.48,
"grad_norm": 0.3469798977646712,
"learning_rate": 1.5237113402061857e-05,
"loss": 0.0579,
"step": 262
},
{
"epoch": 2.49,
"grad_norm": 0.3423329645091883,
"learning_rate": 1.5216494845360826e-05,
"loss": 0.0412,
"step": 263
},
{
"epoch": 2.5,
"grad_norm": 0.43161057242467094,
"learning_rate": 1.5195876288659795e-05,
"loss": 0.0605,
"step": 264
},
{
"epoch": 2.51,
"grad_norm": 0.405644681097467,
"learning_rate": 1.5175257731958764e-05,
"loss": 0.0495,
"step": 265
},
{
"epoch": 2.52,
"grad_norm": 0.37620517666493297,
"learning_rate": 1.5154639175257733e-05,
"loss": 0.0431,
"step": 266
},
{
"epoch": 2.53,
"grad_norm": 0.46121084868962603,
"learning_rate": 1.5134020618556702e-05,
"loss": 0.0396,
"step": 267
},
{
"epoch": 2.54,
"grad_norm": 0.371855906789752,
"learning_rate": 1.511340206185567e-05,
"loss": 0.0505,
"step": 268
},
{
"epoch": 2.55,
"grad_norm": 0.6676032867957563,
"learning_rate": 1.509278350515464e-05,
"loss": 0.0537,
"step": 269
},
{
"epoch": 2.56,
"grad_norm": 0.3837774338064626,
"learning_rate": 1.5072164948453609e-05,
"loss": 0.0557,
"step": 270
},
{
"epoch": 2.57,
"grad_norm": 0.3258598799919947,
"learning_rate": 1.5051546391752578e-05,
"loss": 0.0454,
"step": 271
},
{
"epoch": 2.58,
"grad_norm": 0.3996456752484399,
"learning_rate": 1.5030927835051547e-05,
"loss": 0.0459,
"step": 272
},
{
"epoch": 2.59,
"grad_norm": 0.3299441996994548,
"learning_rate": 1.5010309278350516e-05,
"loss": 0.0497,
"step": 273
},
{
"epoch": 2.6,
"grad_norm": 0.36221639374753245,
"learning_rate": 1.4989690721649487e-05,
"loss": 0.0541,
"step": 274
},
{
"epoch": 2.61,
"grad_norm": 0.2792964215041527,
"learning_rate": 1.4969072164948456e-05,
"loss": 0.0429,
"step": 275
},
{
"epoch": 2.62,
"grad_norm": 0.5203819480576923,
"learning_rate": 1.4948453608247425e-05,
"loss": 0.0675,
"step": 276
},
{
"epoch": 2.63,
"grad_norm": 0.318549865648741,
"learning_rate": 1.4927835051546394e-05,
"loss": 0.0525,
"step": 277
},
{
"epoch": 2.64,
"grad_norm": 0.3289777596451074,
"learning_rate": 1.4907216494845363e-05,
"loss": 0.0462,
"step": 278
},
{
"epoch": 2.64,
"grad_norm": 0.34871010230806093,
"learning_rate": 1.4886597938144332e-05,
"loss": 0.0542,
"step": 279
},
{
"epoch": 2.65,
"grad_norm": 0.4369056274527498,
"learning_rate": 1.48659793814433e-05,
"loss": 0.0626,
"step": 280
},
{
"epoch": 2.66,
"grad_norm": 0.2625050476133873,
"learning_rate": 1.484536082474227e-05,
"loss": 0.028,
"step": 281
},
{
"epoch": 2.67,
"grad_norm": 0.27595875352549787,
"learning_rate": 1.4824742268041239e-05,
"loss": 0.0367,
"step": 282
},
{
"epoch": 2.68,
"grad_norm": 0.35959997598687243,
"learning_rate": 1.4804123711340208e-05,
"loss": 0.0461,
"step": 283
},
{
"epoch": 2.69,
"grad_norm": 0.3599849997150387,
"learning_rate": 1.4783505154639177e-05,
"loss": 0.0496,
"step": 284
},
{
"epoch": 2.7,
"grad_norm": 0.3843506060977493,
"learning_rate": 1.4762886597938146e-05,
"loss": 0.055,
"step": 285
},
{
"epoch": 2.71,
"grad_norm": 0.3376272310517259,
"learning_rate": 1.4742268041237115e-05,
"loss": 0.0478,
"step": 286
},
{
"epoch": 2.72,
"grad_norm": 0.3522914957684885,
"learning_rate": 1.4721649484536084e-05,
"loss": 0.0387,
"step": 287
},
{
"epoch": 2.73,
"grad_norm": 0.3225808422651381,
"learning_rate": 1.4701030927835055e-05,
"loss": 0.0444,
"step": 288
},
{
"epoch": 2.74,
"grad_norm": 0.49053438285259304,
"learning_rate": 1.468041237113402e-05,
"loss": 0.073,
"step": 289
},
{
"epoch": 2.75,
"grad_norm": 0.46606621159920686,
"learning_rate": 1.465979381443299e-05,
"loss": 0.0619,
"step": 290
},
{
"epoch": 2.76,
"grad_norm": 0.43465211934093306,
"learning_rate": 1.4639175257731958e-05,
"loss": 0.0678,
"step": 291
},
{
"epoch": 2.77,
"grad_norm": 0.4874636251377174,
"learning_rate": 1.4618556701030927e-05,
"loss": 0.0574,
"step": 292
},
{
"epoch": 2.78,
"grad_norm": 0.3357847221198546,
"learning_rate": 1.4597938144329898e-05,
"loss": 0.0492,
"step": 293
},
{
"epoch": 2.79,
"grad_norm": 0.42201195365915817,
"learning_rate": 1.4577319587628867e-05,
"loss": 0.0588,
"step": 294
},
{
"epoch": 2.8,
"grad_norm": 0.34583487936425006,
"learning_rate": 1.4556701030927836e-05,
"loss": 0.0497,
"step": 295
},
{
"epoch": 2.81,
"grad_norm": 0.3602057060990167,
"learning_rate": 1.4536082474226805e-05,
"loss": 0.0489,
"step": 296
},
{
"epoch": 2.82,
"grad_norm": 0.3802425181902414,
"learning_rate": 1.4515463917525774e-05,
"loss": 0.0602,
"step": 297
},
{
"epoch": 2.82,
"grad_norm": 0.664119897493606,
"learning_rate": 1.4494845360824743e-05,
"loss": 0.0655,
"step": 298
},
{
"epoch": 2.83,
"grad_norm": 0.28990184222340354,
"learning_rate": 1.4474226804123712e-05,
"loss": 0.0411,
"step": 299
},
{
"epoch": 2.84,
"grad_norm": 0.2691393920028635,
"learning_rate": 1.4453608247422681e-05,
"loss": 0.0353,
"step": 300
},
{
"epoch": 2.85,
"grad_norm": 0.3151619663769757,
"learning_rate": 1.443298969072165e-05,
"loss": 0.0415,
"step": 301
},
{
"epoch": 2.86,
"grad_norm": 0.37371722004265706,
"learning_rate": 1.441237113402062e-05,
"loss": 0.0524,
"step": 302
},
{
"epoch": 2.87,
"grad_norm": 0.36164289630762564,
"learning_rate": 1.4391752577319588e-05,
"loss": 0.0471,
"step": 303
},
{
"epoch": 2.88,
"grad_norm": 0.3107728435900313,
"learning_rate": 1.4371134020618557e-05,
"loss": 0.0562,
"step": 304
},
{
"epoch": 2.89,
"grad_norm": 0.34122302247401665,
"learning_rate": 1.4350515463917526e-05,
"loss": 0.0511,
"step": 305
},
{
"epoch": 2.9,
"grad_norm": 0.3443913112869095,
"learning_rate": 1.4329896907216495e-05,
"loss": 0.0568,
"step": 306
},
{
"epoch": 2.91,
"grad_norm": 0.2543497320695684,
"learning_rate": 1.4309278350515466e-05,
"loss": 0.0448,
"step": 307
},
{
"epoch": 2.92,
"grad_norm": 0.3778386166477604,
"learning_rate": 1.4288659793814435e-05,
"loss": 0.0508,
"step": 308
},
{
"epoch": 2.93,
"grad_norm": 0.4320774715869598,
"learning_rate": 1.4268041237113404e-05,
"loss": 0.0616,
"step": 309
},
{
"epoch": 2.94,
"grad_norm": 0.335037078211093,
"learning_rate": 1.4247422680412373e-05,
"loss": 0.0533,
"step": 310
},
{
"epoch": 2.95,
"grad_norm": 0.32569482603821204,
"learning_rate": 1.4226804123711342e-05,
"loss": 0.0405,
"step": 311
},
{
"epoch": 2.96,
"grad_norm": 0.326452214723922,
"learning_rate": 1.4206185567010311e-05,
"loss": 0.0511,
"step": 312
},
{
"epoch": 2.97,
"grad_norm": 0.4642396575882938,
"learning_rate": 1.418556701030928e-05,
"loss": 0.0517,
"step": 313
},
{
"epoch": 2.98,
"grad_norm": 0.4804421549313872,
"learning_rate": 1.416494845360825e-05,
"loss": 0.0502,
"step": 314
},
{
"epoch": 2.99,
"grad_norm": 0.2622694939176555,
"learning_rate": 1.4144329896907218e-05,
"loss": 0.0359,
"step": 315
},
{
"epoch": 3.0,
"grad_norm": 0.30568190452494537,
"learning_rate": 1.4123711340206187e-05,
"loss": 0.0441,
"step": 316
},
{
"epoch": 3.0,
"grad_norm": 0.3255233828126888,
"learning_rate": 1.4103092783505156e-05,
"loss": 0.0415,
"step": 317
},
{
"epoch": 3.01,
"grad_norm": 0.30117947362276853,
"learning_rate": 1.4082474226804125e-05,
"loss": 0.0319,
"step": 318
},
{
"epoch": 3.02,
"grad_norm": 0.25501640881776183,
"learning_rate": 1.4061855670103093e-05,
"loss": 0.0269,
"step": 319
},
{
"epoch": 3.03,
"grad_norm": 0.2793349926953507,
"learning_rate": 1.4041237113402062e-05,
"loss": 0.0304,
"step": 320
},
{
"epoch": 3.04,
"grad_norm": 0.2728743749105482,
"learning_rate": 1.402061855670103e-05,
"loss": 0.0296,
"step": 321
},
{
"epoch": 3.05,
"grad_norm": 0.32384766173473445,
"learning_rate": 1.4e-05,
"loss": 0.036,
"step": 322
},
{
"epoch": 3.06,
"grad_norm": 0.2871499324879541,
"learning_rate": 1.3979381443298969e-05,
"loss": 0.0272,
"step": 323
},
{
"epoch": 3.07,
"grad_norm": 0.3048952690282634,
"learning_rate": 1.3958762886597938e-05,
"loss": 0.0304,
"step": 324
},
{
"epoch": 3.08,
"grad_norm": 0.29499381495312016,
"learning_rate": 1.3938144329896907e-05,
"loss": 0.0375,
"step": 325
},
{
"epoch": 3.09,
"grad_norm": 0.33960760615879576,
"learning_rate": 1.3917525773195878e-05,
"loss": 0.0281,
"step": 326
},
{
"epoch": 3.1,
"grad_norm": 0.2801784394258787,
"learning_rate": 1.3896907216494847e-05,
"loss": 0.0275,
"step": 327
},
{
"epoch": 3.11,
"grad_norm": 0.28641353025616123,
"learning_rate": 1.3876288659793816e-05,
"loss": 0.0291,
"step": 328
},
{
"epoch": 3.12,
"grad_norm": 0.40741675895777785,
"learning_rate": 1.3855670103092785e-05,
"loss": 0.0245,
"step": 329
},
{
"epoch": 3.13,
"grad_norm": 0.37558851156875184,
"learning_rate": 1.3835051546391754e-05,
"loss": 0.027,
"step": 330
},
{
"epoch": 3.14,
"grad_norm": 0.31454384721220896,
"learning_rate": 1.3814432989690723e-05,
"loss": 0.0267,
"step": 331
},
{
"epoch": 3.15,
"grad_norm": 0.3686256853784791,
"learning_rate": 1.3793814432989692e-05,
"loss": 0.0291,
"step": 332
},
{
"epoch": 3.16,
"grad_norm": 0.52624963357147,
"learning_rate": 1.377319587628866e-05,
"loss": 0.0351,
"step": 333
},
{
"epoch": 3.17,
"grad_norm": 0.38793787826881015,
"learning_rate": 1.375257731958763e-05,
"loss": 0.0293,
"step": 334
},
{
"epoch": 3.18,
"grad_norm": 0.3191838033315306,
"learning_rate": 1.3731958762886599e-05,
"loss": 0.0244,
"step": 335
},
{
"epoch": 3.18,
"grad_norm": 0.3206252223008484,
"learning_rate": 1.3711340206185568e-05,
"loss": 0.0225,
"step": 336
},
{
"epoch": 3.19,
"grad_norm": 0.3930825745473696,
"learning_rate": 1.3690721649484537e-05,
"loss": 0.0285,
"step": 337
},
{
"epoch": 3.2,
"grad_norm": 0.37868170622948444,
"learning_rate": 1.3670103092783506e-05,
"loss": 0.0261,
"step": 338
},
{
"epoch": 3.21,
"grad_norm": 0.2881883471519314,
"learning_rate": 1.3649484536082475e-05,
"loss": 0.0228,
"step": 339
},
{
"epoch": 3.22,
"grad_norm": 0.41491656122412524,
"learning_rate": 1.3628865979381446e-05,
"loss": 0.031,
"step": 340
},
{
"epoch": 3.23,
"grad_norm": 0.24539058378037693,
"learning_rate": 1.3608247422680415e-05,
"loss": 0.0166,
"step": 341
},
{
"epoch": 3.24,
"grad_norm": 0.38072088609171495,
"learning_rate": 1.3587628865979384e-05,
"loss": 0.0289,
"step": 342
},
{
"epoch": 3.25,
"grad_norm": 0.39038013412856726,
"learning_rate": 1.3567010309278353e-05,
"loss": 0.0284,
"step": 343
},
{
"epoch": 3.26,
"grad_norm": 0.32586383550385545,
"learning_rate": 1.3546391752577322e-05,
"loss": 0.0299,
"step": 344
},
{
"epoch": 3.27,
"grad_norm": 0.3241675347931097,
"learning_rate": 1.352577319587629e-05,
"loss": 0.0266,
"step": 345
},
{
"epoch": 3.28,
"grad_norm": 0.2767862304010282,
"learning_rate": 1.350515463917526e-05,
"loss": 0.0227,
"step": 346
},
{
"epoch": 3.29,
"grad_norm": 0.45667070040116314,
"learning_rate": 1.3484536082474229e-05,
"loss": 0.0424,
"step": 347
},
{
"epoch": 3.3,
"grad_norm": 0.3441475173658359,
"learning_rate": 1.3463917525773198e-05,
"loss": 0.0374,
"step": 348
},
{
"epoch": 3.31,
"grad_norm": 0.3274123877805381,
"learning_rate": 1.3443298969072167e-05,
"loss": 0.0314,
"step": 349
},
{
"epoch": 3.32,
"grad_norm": 0.33042439237456334,
"learning_rate": 1.3422680412371134e-05,
"loss": 0.0341,
"step": 350
},
{
"epoch": 3.33,
"grad_norm": 0.3803341496244581,
"learning_rate": 1.3402061855670103e-05,
"loss": 0.0323,
"step": 351
},
{
"epoch": 3.34,
"grad_norm": 0.404763767611467,
"learning_rate": 1.3381443298969072e-05,
"loss": 0.037,
"step": 352
},
{
"epoch": 3.35,
"grad_norm": 0.3289567532580943,
"learning_rate": 1.3360824742268041e-05,
"loss": 0.0278,
"step": 353
},
{
"epoch": 3.36,
"grad_norm": 0.8999941401820684,
"learning_rate": 1.334020618556701e-05,
"loss": 0.039,
"step": 354
},
{
"epoch": 3.36,
"grad_norm": 0.3420570691062843,
"learning_rate": 1.331958762886598e-05,
"loss": 0.0238,
"step": 355
},
{
"epoch": 3.37,
"grad_norm": 0.3608104399597999,
"learning_rate": 1.3298969072164948e-05,
"loss": 0.0364,
"step": 356
},
{
"epoch": 3.38,
"grad_norm": 0.5031741834969633,
"learning_rate": 1.3278350515463917e-05,
"loss": 0.0233,
"step": 357
},
{
"epoch": 3.39,
"grad_norm": 0.40651629963181135,
"learning_rate": 1.3257731958762886e-05,
"loss": 0.0355,
"step": 358
},
{
"epoch": 3.4,
"grad_norm": 0.30066439600456807,
"learning_rate": 1.3237113402061857e-05,
"loss": 0.0292,
"step": 359
},
{
"epoch": 3.41,
"grad_norm": 0.30514482264845005,
"learning_rate": 1.3216494845360826e-05,
"loss": 0.031,
"step": 360
},
{
"epoch": 3.42,
"grad_norm": 0.2914563368637351,
"learning_rate": 1.3195876288659795e-05,
"loss": 0.0292,
"step": 361
},
{
"epoch": 3.43,
"grad_norm": 0.287425509422324,
"learning_rate": 1.3175257731958764e-05,
"loss": 0.0278,
"step": 362
},
{
"epoch": 3.44,
"grad_norm": 0.3345751528771148,
"learning_rate": 1.3154639175257733e-05,
"loss": 0.0301,
"step": 363
},
{
"epoch": 3.45,
"grad_norm": 0.3529380273809963,
"learning_rate": 1.3134020618556702e-05,
"loss": 0.0304,
"step": 364
},
{
"epoch": 3.46,
"grad_norm": 0.4242742976287536,
"learning_rate": 1.3113402061855671e-05,
"loss": 0.0309,
"step": 365
},
{
"epoch": 3.47,
"grad_norm": 0.3627828234644092,
"learning_rate": 1.309278350515464e-05,
"loss": 0.038,
"step": 366
},
{
"epoch": 3.48,
"grad_norm": 0.2697725321760682,
"learning_rate": 1.307216494845361e-05,
"loss": 0.025,
"step": 367
},
{
"epoch": 3.49,
"grad_norm": 0.3233785658508157,
"learning_rate": 1.3051546391752578e-05,
"loss": 0.0301,
"step": 368
},
{
"epoch": 3.5,
"grad_norm": 0.449798409024347,
"learning_rate": 1.3030927835051547e-05,
"loss": 0.0321,
"step": 369
},
{
"epoch": 3.51,
"grad_norm": 0.36225000261816176,
"learning_rate": 1.3010309278350516e-05,
"loss": 0.0241,
"step": 370
},
{
"epoch": 3.52,
"grad_norm": 0.3544203420087151,
"learning_rate": 1.2989690721649485e-05,
"loss": 0.0333,
"step": 371
},
{
"epoch": 3.53,
"grad_norm": 0.36459194127774713,
"learning_rate": 1.2969072164948454e-05,
"loss": 0.029,
"step": 372
},
{
"epoch": 3.54,
"grad_norm": 0.32888228585115675,
"learning_rate": 1.2948453608247425e-05,
"loss": 0.0302,
"step": 373
},
{
"epoch": 3.55,
"grad_norm": 0.35520313106243706,
"learning_rate": 1.2927835051546394e-05,
"loss": 0.0337,
"step": 374
},
{
"epoch": 3.55,
"grad_norm": 0.5554458377367298,
"learning_rate": 1.2907216494845363e-05,
"loss": 0.0337,
"step": 375
},
{
"epoch": 3.56,
"grad_norm": 0.3411589855347537,
"learning_rate": 1.2886597938144332e-05,
"loss": 0.0304,
"step": 376
},
{
"epoch": 3.57,
"grad_norm": 0.30338468537904195,
"learning_rate": 1.2865979381443301e-05,
"loss": 0.0263,
"step": 377
},
{
"epoch": 3.58,
"grad_norm": 0.374971905291679,
"learning_rate": 1.284536082474227e-05,
"loss": 0.028,
"step": 378
},
{
"epoch": 3.59,
"grad_norm": 0.3266597456993636,
"learning_rate": 1.282474226804124e-05,
"loss": 0.0307,
"step": 379
},
{
"epoch": 3.6,
"grad_norm": 0.3549155027609738,
"learning_rate": 1.2804123711340207e-05,
"loss": 0.0267,
"step": 380
},
{
"epoch": 3.61,
"grad_norm": 0.34253925899013193,
"learning_rate": 1.2783505154639176e-05,
"loss": 0.033,
"step": 381
},
{
"epoch": 3.62,
"grad_norm": 0.26567401153328163,
"learning_rate": 1.2762886597938145e-05,
"loss": 0.0281,
"step": 382
},
{
"epoch": 3.63,
"grad_norm": 0.229385813957085,
"learning_rate": 1.2742268041237114e-05,
"loss": 0.0255,
"step": 383
},
{
"epoch": 3.64,
"grad_norm": 0.3137644696413612,
"learning_rate": 1.2721649484536083e-05,
"loss": 0.0324,
"step": 384
},
{
"epoch": 3.65,
"grad_norm": 0.29790461333551643,
"learning_rate": 1.2701030927835052e-05,
"loss": 0.0334,
"step": 385
},
{
"epoch": 3.66,
"grad_norm": 0.3367489949425905,
"learning_rate": 1.268041237113402e-05,
"loss": 0.0333,
"step": 386
},
{
"epoch": 3.67,
"grad_norm": 0.2833106264430702,
"learning_rate": 1.265979381443299e-05,
"loss": 0.0295,
"step": 387
},
{
"epoch": 3.68,
"grad_norm": 0.3960602093402055,
"learning_rate": 1.2639175257731959e-05,
"loss": 0.0407,
"step": 388
},
{
"epoch": 3.69,
"grad_norm": 0.31969845461231755,
"learning_rate": 1.2618556701030928e-05,
"loss": 0.0277,
"step": 389
},
{
"epoch": 3.7,
"grad_norm": 0.3290785829575204,
"learning_rate": 1.2597938144329897e-05,
"loss": 0.0265,
"step": 390
},
{
"epoch": 3.71,
"grad_norm": 0.3404336720078591,
"learning_rate": 1.2577319587628866e-05,
"loss": 0.0315,
"step": 391
},
{
"epoch": 3.72,
"grad_norm": 0.5503181512684359,
"learning_rate": 1.2556701030927837e-05,
"loss": 0.038,
"step": 392
},
{
"epoch": 3.73,
"grad_norm": 0.3164052256814836,
"learning_rate": 1.2536082474226806e-05,
"loss": 0.0303,
"step": 393
},
{
"epoch": 3.73,
"grad_norm": 0.3537884497218655,
"learning_rate": 1.2515463917525775e-05,
"loss": 0.0346,
"step": 394
},
{
"epoch": 3.74,
"grad_norm": 0.30473674474026663,
"learning_rate": 1.2494845360824744e-05,
"loss": 0.0202,
"step": 395
},
{
"epoch": 3.75,
"grad_norm": 0.2871227650536729,
"learning_rate": 1.2474226804123713e-05,
"loss": 0.0265,
"step": 396
},
{
"epoch": 3.76,
"grad_norm": 0.3309988126611014,
"learning_rate": 1.2453608247422682e-05,
"loss": 0.0279,
"step": 397
},
{
"epoch": 3.77,
"grad_norm": 0.9733441842358322,
"learning_rate": 1.243298969072165e-05,
"loss": 0.0423,
"step": 398
},
{
"epoch": 3.78,
"grad_norm": 0.3592067200121845,
"learning_rate": 1.241237113402062e-05,
"loss": 0.0289,
"step": 399
},
{
"epoch": 3.79,
"grad_norm": 0.3675779133876147,
"learning_rate": 1.2391752577319589e-05,
"loss": 0.0367,
"step": 400
},
{
"epoch": 3.8,
"grad_norm": 0.32275002804733144,
"learning_rate": 1.2371134020618558e-05,
"loss": 0.0335,
"step": 401
},
{
"epoch": 3.81,
"grad_norm": 0.3539181940955811,
"learning_rate": 1.2350515463917527e-05,
"loss": 0.0339,
"step": 402
},
{
"epoch": 3.82,
"grad_norm": 0.35591839029860434,
"learning_rate": 1.2329896907216496e-05,
"loss": 0.0257,
"step": 403
},
{
"epoch": 3.83,
"grad_norm": 0.3939505543168573,
"learning_rate": 1.2309278350515465e-05,
"loss": 0.0358,
"step": 404
},
{
"epoch": 3.84,
"grad_norm": 0.3601549434017552,
"learning_rate": 1.2288659793814434e-05,
"loss": 0.0306,
"step": 405
},
{
"epoch": 3.85,
"grad_norm": 0.32211406496174017,
"learning_rate": 1.2268041237113405e-05,
"loss": 0.032,
"step": 406
},
{
"epoch": 3.86,
"grad_norm": 0.4629340332538,
"learning_rate": 1.2247422680412374e-05,
"loss": 0.0462,
"step": 407
},
{
"epoch": 3.87,
"grad_norm": 0.3778768694733899,
"learning_rate": 1.2226804123711343e-05,
"loss": 0.0313,
"step": 408
},
{
"epoch": 3.88,
"grad_norm": 0.3175992257215685,
"learning_rate": 1.2206185567010312e-05,
"loss": 0.0308,
"step": 409
},
{
"epoch": 3.89,
"grad_norm": 0.3404146748141718,
"learning_rate": 1.2185567010309279e-05,
"loss": 0.0349,
"step": 410
},
{
"epoch": 3.9,
"grad_norm": 0.3238537583829642,
"learning_rate": 1.2164948453608248e-05,
"loss": 0.0309,
"step": 411
},
{
"epoch": 3.91,
"grad_norm": 0.33221994535840943,
"learning_rate": 1.2144329896907217e-05,
"loss": 0.0386,
"step": 412
},
{
"epoch": 3.91,
"grad_norm": 0.6196418201733107,
"learning_rate": 1.2123711340206186e-05,
"loss": 0.0339,
"step": 413
},
{
"epoch": 3.92,
"grad_norm": 0.36582501230718867,
"learning_rate": 1.2103092783505155e-05,
"loss": 0.0335,
"step": 414
},
{
"epoch": 3.93,
"grad_norm": 0.38861827384892444,
"learning_rate": 1.2082474226804124e-05,
"loss": 0.0389,
"step": 415
},
{
"epoch": 3.94,
"grad_norm": 0.4074480657306203,
"learning_rate": 1.2061855670103093e-05,
"loss": 0.04,
"step": 416
},
{
"epoch": 3.95,
"grad_norm": 0.4413396734481435,
"learning_rate": 1.2041237113402062e-05,
"loss": 0.0389,
"step": 417
},
{
"epoch": 3.96,
"grad_norm": 0.28851564332459445,
"learning_rate": 1.2020618556701031e-05,
"loss": 0.0265,
"step": 418
},
{
"epoch": 3.97,
"grad_norm": 0.2424457157962498,
"learning_rate": 1.2e-05,
"loss": 0.0254,
"step": 419
},
{
"epoch": 3.98,
"grad_norm": 0.4272297534542686,
"learning_rate": 1.197938144329897e-05,
"loss": 0.0383,
"step": 420
},
{
"epoch": 3.99,
"grad_norm": 0.3165573418696913,
"learning_rate": 1.1958762886597938e-05,
"loss": 0.0308,
"step": 421
},
{
"epoch": 4.0,
"grad_norm": 0.3494903212566013,
"learning_rate": 1.1938144329896907e-05,
"loss": 0.0288,
"step": 422
},
{
"epoch": 4.01,
"grad_norm": 0.3039325138893434,
"learning_rate": 1.1917525773195876e-05,
"loss": 0.0209,
"step": 423
},
{
"epoch": 4.02,
"grad_norm": 0.25310916497891606,
"learning_rate": 1.1896907216494845e-05,
"loss": 0.0191,
"step": 424
},
{
"epoch": 4.03,
"grad_norm": 0.21576813459763575,
"learning_rate": 1.1876288659793816e-05,
"loss": 0.0153,
"step": 425
},
{
"epoch": 4.04,
"grad_norm": 0.23819113418761503,
"learning_rate": 1.1855670103092785e-05,
"loss": 0.0217,
"step": 426
},
{
"epoch": 4.05,
"grad_norm": 0.20964070228563295,
"learning_rate": 1.1835051546391754e-05,
"loss": 0.0176,
"step": 427
},
{
"epoch": 4.06,
"grad_norm": 0.29672768350885953,
"learning_rate": 1.1814432989690723e-05,
"loss": 0.0139,
"step": 428
},
{
"epoch": 4.07,
"grad_norm": 0.23294951548189244,
"learning_rate": 1.1793814432989692e-05,
"loss": 0.0139,
"step": 429
},
{
"epoch": 4.08,
"grad_norm": 0.2060021066696814,
"learning_rate": 1.1773195876288661e-05,
"loss": 0.0133,
"step": 430
},
{
"epoch": 4.09,
"grad_norm": 0.375387726454009,
"learning_rate": 1.175257731958763e-05,
"loss": 0.0178,
"step": 431
},
{
"epoch": 4.09,
"grad_norm": 0.2230402245674164,
"learning_rate": 1.17319587628866e-05,
"loss": 0.0156,
"step": 432
},
{
"epoch": 4.1,
"grad_norm": 0.292594199763992,
"learning_rate": 1.1711340206185568e-05,
"loss": 0.0198,
"step": 433
},
{
"epoch": 4.11,
"grad_norm": 0.2713815645357825,
"learning_rate": 1.1690721649484537e-05,
"loss": 0.0133,
"step": 434
},
{
"epoch": 4.12,
"grad_norm": 0.3042696630922716,
"learning_rate": 1.1670103092783506e-05,
"loss": 0.0179,
"step": 435
},
{
"epoch": 4.13,
"grad_norm": 0.3060397681888689,
"learning_rate": 1.1649484536082475e-05,
"loss": 0.0202,
"step": 436
},
{
"epoch": 4.14,
"grad_norm": 0.5892571064581607,
"learning_rate": 1.1628865979381444e-05,
"loss": 0.018,
"step": 437
},
{
"epoch": 4.15,
"grad_norm": 0.34992625617665546,
"learning_rate": 1.1608247422680413e-05,
"loss": 0.0174,
"step": 438
},
{
"epoch": 4.16,
"grad_norm": 0.4359154787962013,
"learning_rate": 1.1587628865979384e-05,
"loss": 0.0176,
"step": 439
},
{
"epoch": 4.17,
"grad_norm": 0.3071734543167,
"learning_rate": 1.1567010309278353e-05,
"loss": 0.0153,
"step": 440
},
{
"epoch": 4.18,
"grad_norm": 0.42215911693589253,
"learning_rate": 1.1546391752577319e-05,
"loss": 0.0199,
"step": 441
},
{
"epoch": 4.19,
"grad_norm": 0.24853746454332362,
"learning_rate": 1.1525773195876288e-05,
"loss": 0.0156,
"step": 442
},
{
"epoch": 4.2,
"grad_norm": 0.33464780801869937,
"learning_rate": 1.1505154639175259e-05,
"loss": 0.0187,
"step": 443
},
{
"epoch": 4.21,
"grad_norm": 0.25430822098085926,
"learning_rate": 1.1484536082474228e-05,
"loss": 0.0123,
"step": 444
},
{
"epoch": 4.22,
"grad_norm": 0.4668939082594934,
"learning_rate": 1.1463917525773197e-05,
"loss": 0.0259,
"step": 445
},
{
"epoch": 4.23,
"grad_norm": 0.36654497298203265,
"learning_rate": 1.1443298969072166e-05,
"loss": 0.022,
"step": 446
},
{
"epoch": 4.24,
"grad_norm": 0.3537296258632948,
"learning_rate": 1.1422680412371135e-05,
"loss": 0.0204,
"step": 447
},
{
"epoch": 4.25,
"grad_norm": 0.3977196341251711,
"learning_rate": 1.1402061855670104e-05,
"loss": 0.0206,
"step": 448
},
{
"epoch": 4.26,
"grad_norm": 0.5008794084302111,
"learning_rate": 1.1381443298969073e-05,
"loss": 0.0214,
"step": 449
},
{
"epoch": 4.27,
"grad_norm": 0.2571833331626884,
"learning_rate": 1.1360824742268042e-05,
"loss": 0.0173,
"step": 450
},
{
"epoch": 4.27,
"grad_norm": 0.298842323813605,
"learning_rate": 1.134020618556701e-05,
"loss": 0.0206,
"step": 451
},
{
"epoch": 4.28,
"grad_norm": 0.33104383962857126,
"learning_rate": 1.131958762886598e-05,
"loss": 0.0124,
"step": 452
},
{
"epoch": 4.29,
"grad_norm": 0.39539612499995247,
"learning_rate": 1.1298969072164949e-05,
"loss": 0.0225,
"step": 453
},
{
"epoch": 4.3,
"grad_norm": 0.28518556418045077,
"learning_rate": 1.1278350515463918e-05,
"loss": 0.02,
"step": 454
},
{
"epoch": 4.31,
"grad_norm": 0.31398138355691413,
"learning_rate": 1.1257731958762887e-05,
"loss": 0.0185,
"step": 455
},
{
"epoch": 4.32,
"grad_norm": 0.20929347557733147,
"learning_rate": 1.1237113402061856e-05,
"loss": 0.0136,
"step": 456
},
{
"epoch": 4.33,
"grad_norm": 0.2018044165879369,
"learning_rate": 1.1216494845360825e-05,
"loss": 0.0134,
"step": 457
},
{
"epoch": 4.34,
"grad_norm": 0.3165303093038224,
"learning_rate": 1.1195876288659796e-05,
"loss": 0.0167,
"step": 458
},
{
"epoch": 4.35,
"grad_norm": 0.23349534899737623,
"learning_rate": 1.1175257731958765e-05,
"loss": 0.0117,
"step": 459
},
{
"epoch": 4.36,
"grad_norm": 0.2411509914272223,
"learning_rate": 1.1154639175257734e-05,
"loss": 0.0142,
"step": 460
},
{
"epoch": 4.37,
"grad_norm": 0.3475794117912949,
"learning_rate": 1.1134020618556703e-05,
"loss": 0.0185,
"step": 461
},
{
"epoch": 4.38,
"grad_norm": 0.3374764765382937,
"learning_rate": 1.1113402061855672e-05,
"loss": 0.0244,
"step": 462
},
{
"epoch": 4.39,
"grad_norm": 0.3315291479092438,
"learning_rate": 1.109278350515464e-05,
"loss": 0.0267,
"step": 463
},
{
"epoch": 4.4,
"grad_norm": 0.31495684208857333,
"learning_rate": 1.107216494845361e-05,
"loss": 0.0156,
"step": 464
},
{
"epoch": 4.41,
"grad_norm": 0.20727858074775188,
"learning_rate": 1.1051546391752579e-05,
"loss": 0.0113,
"step": 465
},
{
"epoch": 4.42,
"grad_norm": 0.3886581878552454,
"learning_rate": 1.1030927835051548e-05,
"loss": 0.0183,
"step": 466
},
{
"epoch": 4.43,
"grad_norm": 0.3327295253639389,
"learning_rate": 1.1010309278350517e-05,
"loss": 0.0209,
"step": 467
},
{
"epoch": 4.44,
"grad_norm": 0.3603857174118914,
"learning_rate": 1.0989690721649486e-05,
"loss": 0.02,
"step": 468
},
{
"epoch": 4.45,
"grad_norm": 0.5104364825962914,
"learning_rate": 1.0969072164948455e-05,
"loss": 0.0237,
"step": 469
},
{
"epoch": 4.45,
"grad_norm": 0.3132748296369924,
"learning_rate": 1.0948453608247424e-05,
"loss": 0.02,
"step": 470
},
{
"epoch": 4.46,
"grad_norm": 0.26804751970641105,
"learning_rate": 1.0927835051546391e-05,
"loss": 0.0184,
"step": 471
},
{
"epoch": 4.47,
"grad_norm": 0.3186486690869349,
"learning_rate": 1.090721649484536e-05,
"loss": 0.0192,
"step": 472
},
{
"epoch": 4.48,
"grad_norm": 0.19257480703594548,
"learning_rate": 1.088659793814433e-05,
"loss": 0.0123,
"step": 473
},
{
"epoch": 4.49,
"grad_norm": 0.3731683263751716,
"learning_rate": 1.0865979381443298e-05,
"loss": 0.0191,
"step": 474
},
{
"epoch": 4.5,
"grad_norm": 0.26369876018080096,
"learning_rate": 1.0845360824742267e-05,
"loss": 0.0174,
"step": 475
},
{
"epoch": 4.51,
"grad_norm": 0.33480039257997307,
"learning_rate": 1.0824742268041238e-05,
"loss": 0.0214,
"step": 476
},
{
"epoch": 4.52,
"grad_norm": 0.29677370501613215,
"learning_rate": 1.0804123711340207e-05,
"loss": 0.0195,
"step": 477
},
{
"epoch": 4.53,
"grad_norm": 0.539422730315377,
"learning_rate": 1.0783505154639176e-05,
"loss": 0.017,
"step": 478
},
{
"epoch": 4.54,
"grad_norm": 0.2543066828604527,
"learning_rate": 1.0762886597938145e-05,
"loss": 0.0163,
"step": 479
},
{
"epoch": 4.55,
"grad_norm": 0.21269139758273636,
"learning_rate": 1.0742268041237114e-05,
"loss": 0.0111,
"step": 480
},
{
"epoch": 4.56,
"grad_norm": 0.31407610877999054,
"learning_rate": 1.0721649484536083e-05,
"loss": 0.0157,
"step": 481
},
{
"epoch": 4.57,
"grad_norm": 0.4367632793728854,
"learning_rate": 1.0701030927835052e-05,
"loss": 0.0221,
"step": 482
},
{
"epoch": 4.58,
"grad_norm": 0.28996133244397765,
"learning_rate": 1.0680412371134021e-05,
"loss": 0.0164,
"step": 483
},
{
"epoch": 4.59,
"grad_norm": 0.4943179539876271,
"learning_rate": 1.065979381443299e-05,
"loss": 0.0199,
"step": 484
},
{
"epoch": 4.6,
"grad_norm": 0.42846872704226036,
"learning_rate": 1.063917525773196e-05,
"loss": 0.0272,
"step": 485
},
{
"epoch": 4.61,
"grad_norm": 0.3753926288023311,
"learning_rate": 1.0618556701030928e-05,
"loss": 0.0174,
"step": 486
},
{
"epoch": 4.62,
"grad_norm": 0.37712136896981857,
"learning_rate": 1.0597938144329897e-05,
"loss": 0.0244,
"step": 487
},
{
"epoch": 4.63,
"grad_norm": 0.32428324873639963,
"learning_rate": 1.0577319587628866e-05,
"loss": 0.0171,
"step": 488
},
{
"epoch": 4.64,
"grad_norm": 0.2662380800802627,
"learning_rate": 1.0556701030927835e-05,
"loss": 0.0159,
"step": 489
},
{
"epoch": 4.64,
"grad_norm": 0.44805335761039156,
"learning_rate": 1.0536082474226804e-05,
"loss": 0.0195,
"step": 490
},
{
"epoch": 4.65,
"grad_norm": 0.31275170441413874,
"learning_rate": 1.0515463917525775e-05,
"loss": 0.0202,
"step": 491
},
{
"epoch": 4.66,
"grad_norm": 0.23831022586682868,
"learning_rate": 1.0494845360824744e-05,
"loss": 0.0112,
"step": 492
},
{
"epoch": 4.67,
"grad_norm": 0.33762787100790437,
"learning_rate": 1.0474226804123713e-05,
"loss": 0.016,
"step": 493
},
{
"epoch": 4.68,
"grad_norm": 0.3189168390574777,
"learning_rate": 1.0453608247422682e-05,
"loss": 0.0205,
"step": 494
},
{
"epoch": 4.69,
"grad_norm": 0.3433216091560776,
"learning_rate": 1.0432989690721651e-05,
"loss": 0.0225,
"step": 495
},
{
"epoch": 4.7,
"grad_norm": 0.42246985000612747,
"learning_rate": 1.041237113402062e-05,
"loss": 0.0213,
"step": 496
},
{
"epoch": 4.71,
"grad_norm": 0.31305034575550633,
"learning_rate": 1.039175257731959e-05,
"loss": 0.019,
"step": 497
},
{
"epoch": 4.72,
"grad_norm": 0.3586053900398176,
"learning_rate": 1.0371134020618558e-05,
"loss": 0.019,
"step": 498
},
{
"epoch": 4.73,
"grad_norm": 0.4826492206312301,
"learning_rate": 1.0350515463917527e-05,
"loss": 0.0201,
"step": 499
},
{
"epoch": 4.74,
"grad_norm": 0.522039253975035,
"learning_rate": 1.0329896907216496e-05,
"loss": 0.0228,
"step": 500
},
{
"epoch": 4.75,
"grad_norm": 0.30662264437106923,
"learning_rate": 1.0309278350515464e-05,
"loss": 0.0188,
"step": 501
},
{
"epoch": 4.76,
"grad_norm": 0.23584262380284332,
"learning_rate": 1.0288659793814433e-05,
"loss": 0.0171,
"step": 502
},
{
"epoch": 4.77,
"grad_norm": 0.35066882033050856,
"learning_rate": 1.0268041237113402e-05,
"loss": 0.0215,
"step": 503
},
{
"epoch": 4.78,
"grad_norm": 0.3072576328870572,
"learning_rate": 1.024742268041237e-05,
"loss": 0.0179,
"step": 504
},
{
"epoch": 4.79,
"grad_norm": 0.3946617024101158,
"learning_rate": 1.022680412371134e-05,
"loss": 0.0254,
"step": 505
},
{
"epoch": 4.8,
"grad_norm": 0.3044961303886306,
"learning_rate": 1.0206185567010309e-05,
"loss": 0.0152,
"step": 506
},
{
"epoch": 4.81,
"grad_norm": 0.3913434669257833,
"learning_rate": 1.0185567010309278e-05,
"loss": 0.0183,
"step": 507
},
{
"epoch": 4.82,
"grad_norm": 0.2969867596324075,
"learning_rate": 1.0164948453608247e-05,
"loss": 0.0181,
"step": 508
},
{
"epoch": 4.82,
"grad_norm": 0.2861695249785404,
"learning_rate": 1.0144329896907218e-05,
"loss": 0.023,
"step": 509
},
{
"epoch": 4.83,
"grad_norm": 0.29063587886178854,
"learning_rate": 1.0123711340206187e-05,
"loss": 0.0145,
"step": 510
},
{
"epoch": 4.84,
"grad_norm": 0.3620662675895615,
"learning_rate": 1.0103092783505156e-05,
"loss": 0.0165,
"step": 511
},
{
"epoch": 4.85,
"grad_norm": 0.3710298985209495,
"learning_rate": 1.0082474226804125e-05,
"loss": 0.0197,
"step": 512
},
{
"epoch": 4.86,
"grad_norm": 0.28875593734598454,
"learning_rate": 1.0061855670103094e-05,
"loss": 0.0208,
"step": 513
},
{
"epoch": 4.87,
"grad_norm": 0.3703020585274093,
"learning_rate": 1.0041237113402063e-05,
"loss": 0.02,
"step": 514
},
{
"epoch": 4.88,
"grad_norm": 0.37636878622494474,
"learning_rate": 1.0020618556701032e-05,
"loss": 0.0178,
"step": 515
},
{
"epoch": 4.89,
"grad_norm": 0.38458338647940543,
"learning_rate": 1e-05,
"loss": 0.0196,
"step": 516
},
{
"epoch": 4.9,
"grad_norm": 0.3733502697118733,
"learning_rate": 9.97938144329897e-06,
"loss": 0.025,
"step": 517
},
{
"epoch": 4.91,
"grad_norm": 0.36714452126763575,
"learning_rate": 9.958762886597939e-06,
"loss": 0.0227,
"step": 518
},
{
"epoch": 4.92,
"grad_norm": 0.5885657809108235,
"learning_rate": 9.938144329896908e-06,
"loss": 0.0262,
"step": 519
},
{
"epoch": 4.93,
"grad_norm": 0.2032633090477059,
"learning_rate": 9.917525773195877e-06,
"loss": 0.0162,
"step": 520
},
{
"epoch": 4.94,
"grad_norm": 0.3096844435694019,
"learning_rate": 9.896907216494846e-06,
"loss": 0.0203,
"step": 521
},
{
"epoch": 4.95,
"grad_norm": 0.3393284409552235,
"learning_rate": 9.876288659793815e-06,
"loss": 0.0207,
"step": 522
},
{
"epoch": 4.96,
"grad_norm": 0.2741715795478744,
"learning_rate": 9.855670103092784e-06,
"loss": 0.0221,
"step": 523
},
{
"epoch": 4.97,
"grad_norm": 0.37463814442956445,
"learning_rate": 9.835051546391753e-06,
"loss": 0.0234,
"step": 524
},
{
"epoch": 4.98,
"grad_norm": 0.4738136811346728,
"learning_rate": 9.814432989690722e-06,
"loss": 0.0278,
"step": 525
},
{
"epoch": 4.99,
"grad_norm": 0.3489230788153202,
"learning_rate": 9.793814432989691e-06,
"loss": 0.0243,
"step": 526
},
{
"epoch": 5.0,
"grad_norm": 0.2857214147666074,
"learning_rate": 9.77319587628866e-06,
"loss": 0.0144,
"step": 527
},
{
"epoch": 5.0,
"grad_norm": 0.2092384861247451,
"learning_rate": 9.752577319587629e-06,
"loss": 0.0149,
"step": 528
},
{
"epoch": 5.01,
"grad_norm": 0.14883241071562817,
"learning_rate": 9.731958762886598e-06,
"loss": 0.0097,
"step": 529
},
{
"epoch": 5.02,
"grad_norm": 0.19005300974461106,
"learning_rate": 9.711340206185567e-06,
"loss": 0.0112,
"step": 530
},
{
"epoch": 5.03,
"grad_norm": 0.28962130274777387,
"learning_rate": 9.690721649484536e-06,
"loss": 0.0116,
"step": 531
},
{
"epoch": 5.04,
"grad_norm": 0.19553844733981432,
"learning_rate": 9.670103092783505e-06,
"loss": 0.0103,
"step": 532
},
{
"epoch": 5.05,
"grad_norm": 0.25040245444790443,
"learning_rate": 9.649484536082476e-06,
"loss": 0.0098,
"step": 533
},
{
"epoch": 5.06,
"grad_norm": 0.2774859050659358,
"learning_rate": 9.628865979381445e-06,
"loss": 0.0128,
"step": 534
},
{
"epoch": 5.07,
"grad_norm": 0.26098388727829175,
"learning_rate": 9.608247422680414e-06,
"loss": 0.0139,
"step": 535
},
{
"epoch": 5.08,
"grad_norm": 0.9832390179635483,
"learning_rate": 9.587628865979383e-06,
"loss": 0.0128,
"step": 536
},
{
"epoch": 5.09,
"grad_norm": 0.28903859271599475,
"learning_rate": 9.567010309278352e-06,
"loss": 0.0186,
"step": 537
},
{
"epoch": 5.1,
"grad_norm": 0.21464310727083336,
"learning_rate": 9.546391752577321e-06,
"loss": 0.0136,
"step": 538
},
{
"epoch": 5.11,
"grad_norm": 0.7129637380236128,
"learning_rate": 9.525773195876288e-06,
"loss": 0.013,
"step": 539
},
{
"epoch": 5.12,
"grad_norm": 0.20889981065182758,
"learning_rate": 9.505154639175257e-06,
"loss": 0.01,
"step": 540
},
{
"epoch": 5.13,
"grad_norm": 0.2772459878682804,
"learning_rate": 9.484536082474226e-06,
"loss": 0.0128,
"step": 541
},
{
"epoch": 5.14,
"grad_norm": 0.4406782747081322,
"learning_rate": 9.463917525773197e-06,
"loss": 0.015,
"step": 542
},
{
"epoch": 5.15,
"grad_norm": 0.4487916657701603,
"learning_rate": 9.443298969072166e-06,
"loss": 0.0151,
"step": 543
},
{
"epoch": 5.16,
"grad_norm": 0.41889595861431955,
"learning_rate": 9.422680412371135e-06,
"loss": 0.0078,
"step": 544
},
{
"epoch": 5.17,
"grad_norm": 0.3881633249425354,
"learning_rate": 9.402061855670104e-06,
"loss": 0.0098,
"step": 545
},
{
"epoch": 5.18,
"grad_norm": 0.2606728279358393,
"learning_rate": 9.381443298969073e-06,
"loss": 0.0101,
"step": 546
},
{
"epoch": 5.18,
"grad_norm": 1.2817249348263469,
"learning_rate": 9.360824742268042e-06,
"loss": 0.0128,
"step": 547
},
{
"epoch": 5.19,
"grad_norm": 0.21307853839730945,
"learning_rate": 9.340206185567011e-06,
"loss": 0.0082,
"step": 548
},
{
"epoch": 5.2,
"grad_norm": 0.45497726174014624,
"learning_rate": 9.31958762886598e-06,
"loss": 0.0163,
"step": 549
},
{
"epoch": 5.21,
"grad_norm": 0.19973279977956332,
"learning_rate": 9.29896907216495e-06,
"loss": 0.0082,
"step": 550
},
{
"epoch": 5.22,
"grad_norm": 0.3383610488120908,
"learning_rate": 9.278350515463918e-06,
"loss": 0.0114,
"step": 551
},
{
"epoch": 5.23,
"grad_norm": 0.7428320244456705,
"learning_rate": 9.257731958762887e-06,
"loss": 0.012,
"step": 552
},
{
"epoch": 5.24,
"grad_norm": 0.40515618523988156,
"learning_rate": 9.237113402061856e-06,
"loss": 0.0133,
"step": 553
},
{
"epoch": 5.25,
"grad_norm": 0.9595889731995229,
"learning_rate": 9.216494845360825e-06,
"loss": 0.0126,
"step": 554
},
{
"epoch": 5.26,
"grad_norm": 0.2555796426297848,
"learning_rate": 9.195876288659794e-06,
"loss": 0.0088,
"step": 555
},
{
"epoch": 5.27,
"grad_norm": 1.1734842567931907,
"learning_rate": 9.175257731958764e-06,
"loss": 0.0124,
"step": 556
},
{
"epoch": 5.28,
"grad_norm": 0.639853338372346,
"learning_rate": 9.154639175257733e-06,
"loss": 0.0128,
"step": 557
},
{
"epoch": 5.29,
"grad_norm": 0.2967246201764664,
"learning_rate": 9.134020618556702e-06,
"loss": 0.0116,
"step": 558
},
{
"epoch": 5.3,
"grad_norm": 0.29006278457989304,
"learning_rate": 9.11340206185567e-06,
"loss": 0.0109,
"step": 559
},
{
"epoch": 5.31,
"grad_norm": 0.3194667186722855,
"learning_rate": 9.09278350515464e-06,
"loss": 0.0086,
"step": 560
},
{
"epoch": 5.32,
"grad_norm": 0.22538365959895024,
"learning_rate": 9.072164948453609e-06,
"loss": 0.0096,
"step": 561
},
{
"epoch": 5.33,
"grad_norm": 0.41653884079317305,
"learning_rate": 9.051546391752578e-06,
"loss": 0.0164,
"step": 562
},
{
"epoch": 5.34,
"grad_norm": 0.232625127123573,
"learning_rate": 9.030927835051547e-06,
"loss": 0.0101,
"step": 563
},
{
"epoch": 5.35,
"grad_norm": 0.25382934285884684,
"learning_rate": 9.010309278350516e-06,
"loss": 0.0063,
"step": 564
},
{
"epoch": 5.36,
"grad_norm": 0.4534152022439007,
"learning_rate": 8.989690721649485e-06,
"loss": 0.0148,
"step": 565
},
{
"epoch": 5.36,
"grad_norm": 0.2907059320951449,
"learning_rate": 8.969072164948455e-06,
"loss": 0.0158,
"step": 566
},
{
"epoch": 5.37,
"grad_norm": 0.3146631593770584,
"learning_rate": 8.948453608247424e-06,
"loss": 0.0113,
"step": 567
},
{
"epoch": 5.38,
"grad_norm": 0.27253269631538185,
"learning_rate": 8.927835051546394e-06,
"loss": 0.0163,
"step": 568
},
{
"epoch": 5.39,
"grad_norm": 0.30106908476464594,
"learning_rate": 8.907216494845363e-06,
"loss": 0.0139,
"step": 569
},
{
"epoch": 5.4,
"grad_norm": 0.3136667643091521,
"learning_rate": 8.88659793814433e-06,
"loss": 0.013,
"step": 570
},
{
"epoch": 5.41,
"grad_norm": 0.3163081476220768,
"learning_rate": 8.865979381443299e-06,
"loss": 0.0116,
"step": 571
},
{
"epoch": 5.42,
"grad_norm": 0.34427973720830696,
"learning_rate": 8.845360824742268e-06,
"loss": 0.0155,
"step": 572
},
{
"epoch": 5.43,
"grad_norm": 0.32199708558401496,
"learning_rate": 8.824742268041237e-06,
"loss": 0.0134,
"step": 573
},
{
"epoch": 5.44,
"grad_norm": 0.24951925098374725,
"learning_rate": 8.804123711340206e-06,
"loss": 0.0077,
"step": 574
},
{
"epoch": 5.45,
"grad_norm": 0.3027392971500056,
"learning_rate": 8.783505154639177e-06,
"loss": 0.0146,
"step": 575
},
{
"epoch": 5.46,
"grad_norm": 0.3429491076568905,
"learning_rate": 8.762886597938146e-06,
"loss": 0.0095,
"step": 576
},
{
"epoch": 5.47,
"grad_norm": 0.18400102446882485,
"learning_rate": 8.742268041237115e-06,
"loss": 0.0076,
"step": 577
},
{
"epoch": 5.48,
"grad_norm": 0.18674845878125962,
"learning_rate": 8.721649484536084e-06,
"loss": 0.0095,
"step": 578
},
{
"epoch": 5.49,
"grad_norm": 0.2578319195456068,
"learning_rate": 8.701030927835053e-06,
"loss": 0.0117,
"step": 579
},
{
"epoch": 5.5,
"grad_norm": 0.24779914890946345,
"learning_rate": 8.680412371134022e-06,
"loss": 0.0082,
"step": 580
},
{
"epoch": 5.51,
"grad_norm": 0.20523808916301142,
"learning_rate": 8.65979381443299e-06,
"loss": 0.0095,
"step": 581
},
{
"epoch": 5.52,
"grad_norm": 0.27638558639910105,
"learning_rate": 8.63917525773196e-06,
"loss": 0.0115,
"step": 582
},
{
"epoch": 5.53,
"grad_norm": 0.4417053829473975,
"learning_rate": 8.618556701030929e-06,
"loss": 0.0114,
"step": 583
},
{
"epoch": 5.54,
"grad_norm": 0.32912561572097343,
"learning_rate": 8.597938144329898e-06,
"loss": 0.0119,
"step": 584
},
{
"epoch": 5.55,
"grad_norm": 0.3840875023385024,
"learning_rate": 8.577319587628867e-06,
"loss": 0.015,
"step": 585
},
{
"epoch": 5.55,
"grad_norm": 0.25579070123305014,
"learning_rate": 8.556701030927836e-06,
"loss": 0.0137,
"step": 586
},
{
"epoch": 5.56,
"grad_norm": 0.304203443666268,
"learning_rate": 8.536082474226805e-06,
"loss": 0.0092,
"step": 587
},
{
"epoch": 5.57,
"grad_norm": 0.40200939915235306,
"learning_rate": 8.515463917525774e-06,
"loss": 0.0143,
"step": 588
},
{
"epoch": 5.58,
"grad_norm": 0.43548078848162036,
"learning_rate": 8.494845360824743e-06,
"loss": 0.0129,
"step": 589
},
{
"epoch": 5.59,
"grad_norm": 0.24074196109392795,
"learning_rate": 8.474226804123712e-06,
"loss": 0.0119,
"step": 590
},
{
"epoch": 5.6,
"grad_norm": 0.2634007932446454,
"learning_rate": 8.453608247422681e-06,
"loss": 0.0101,
"step": 591
},
{
"epoch": 5.61,
"grad_norm": 0.29981716266733627,
"learning_rate": 8.43298969072165e-06,
"loss": 0.0133,
"step": 592
},
{
"epoch": 5.62,
"grad_norm": 0.26188159260611216,
"learning_rate": 8.412371134020619e-06,
"loss": 0.0133,
"step": 593
},
{
"epoch": 5.63,
"grad_norm": 0.205717153842302,
"learning_rate": 8.391752577319588e-06,
"loss": 0.0097,
"step": 594
},
{
"epoch": 5.64,
"grad_norm": 0.4413493296964626,
"learning_rate": 8.371134020618557e-06,
"loss": 0.0146,
"step": 595
},
{
"epoch": 5.65,
"grad_norm": 0.3072426104855869,
"learning_rate": 8.350515463917526e-06,
"loss": 0.0122,
"step": 596
},
{
"epoch": 5.66,
"grad_norm": 0.254124576895681,
"learning_rate": 8.329896907216495e-06,
"loss": 0.012,
"step": 597
},
{
"epoch": 5.67,
"grad_norm": 0.3673105236686386,
"learning_rate": 8.309278350515464e-06,
"loss": 0.0105,
"step": 598
},
{
"epoch": 5.68,
"grad_norm": 0.3370925430569057,
"learning_rate": 8.288659793814435e-06,
"loss": 0.0121,
"step": 599
},
{
"epoch": 5.69,
"grad_norm": 0.28742987720893526,
"learning_rate": 8.268041237113402e-06,
"loss": 0.0127,
"step": 600
},
{
"epoch": 5.7,
"grad_norm": 0.20662692913394118,
"learning_rate": 8.247422680412371e-06,
"loss": 0.0106,
"step": 601
},
{
"epoch": 5.71,
"grad_norm": 0.34304236054409176,
"learning_rate": 8.22680412371134e-06,
"loss": 0.013,
"step": 602
},
{
"epoch": 5.72,
"grad_norm": 0.3498192482222944,
"learning_rate": 8.20618556701031e-06,
"loss": 0.0206,
"step": 603
},
{
"epoch": 5.73,
"grad_norm": 0.23410345877813962,
"learning_rate": 8.185567010309278e-06,
"loss": 0.0081,
"step": 604
},
{
"epoch": 5.73,
"grad_norm": 0.25643622911785885,
"learning_rate": 8.164948453608247e-06,
"loss": 0.0133,
"step": 605
},
{
"epoch": 5.74,
"grad_norm": 0.3351476276951425,
"learning_rate": 8.144329896907216e-06,
"loss": 0.011,
"step": 606
},
{
"epoch": 5.75,
"grad_norm": 0.4857353673194612,
"learning_rate": 8.123711340206185e-06,
"loss": 0.0142,
"step": 607
},
{
"epoch": 5.76,
"grad_norm": 0.24036930140903176,
"learning_rate": 8.103092783505156e-06,
"loss": 0.0098,
"step": 608
},
{
"epoch": 5.77,
"grad_norm": 0.31527822064318667,
"learning_rate": 8.082474226804125e-06,
"loss": 0.014,
"step": 609
},
{
"epoch": 5.78,
"grad_norm": 0.5123007573141696,
"learning_rate": 8.061855670103094e-06,
"loss": 0.0174,
"step": 610
},
{
"epoch": 5.79,
"grad_norm": 0.38746757448567115,
"learning_rate": 8.041237113402063e-06,
"loss": 0.0137,
"step": 611
},
{
"epoch": 5.8,
"grad_norm": 0.4343711825415732,
"learning_rate": 8.020618556701032e-06,
"loss": 0.019,
"step": 612
},
{
"epoch": 5.81,
"grad_norm": 0.2951941464936056,
"learning_rate": 8.000000000000001e-06,
"loss": 0.017,
"step": 613
},
{
"epoch": 5.82,
"grad_norm": 0.7031783613513517,
"learning_rate": 7.97938144329897e-06,
"loss": 0.0121,
"step": 614
},
{
"epoch": 5.83,
"grad_norm": 0.2683574298860335,
"learning_rate": 7.958762886597938e-06,
"loss": 0.0129,
"step": 615
},
{
"epoch": 5.84,
"grad_norm": 0.2914367547317128,
"learning_rate": 7.938144329896907e-06,
"loss": 0.0118,
"step": 616
},
{
"epoch": 5.85,
"grad_norm": 0.2616221690965675,
"learning_rate": 7.917525773195876e-06,
"loss": 0.0129,
"step": 617
},
{
"epoch": 5.86,
"grad_norm": 0.28306497295408684,
"learning_rate": 7.896907216494846e-06,
"loss": 0.0124,
"step": 618
},
{
"epoch": 5.87,
"grad_norm": 0.1838057997862805,
"learning_rate": 7.876288659793815e-06,
"loss": 0.0078,
"step": 619
},
{
"epoch": 5.88,
"grad_norm": 0.4198522465665641,
"learning_rate": 7.855670103092785e-06,
"loss": 0.0132,
"step": 620
},
{
"epoch": 5.89,
"grad_norm": 0.2251248208823119,
"learning_rate": 7.835051546391754e-06,
"loss": 0.0102,
"step": 621
},
{
"epoch": 5.9,
"grad_norm": 0.4121537026524667,
"learning_rate": 7.814432989690723e-06,
"loss": 0.0153,
"step": 622
},
{
"epoch": 5.91,
"grad_norm": 0.25137229623767915,
"learning_rate": 7.793814432989692e-06,
"loss": 0.0124,
"step": 623
},
{
"epoch": 5.91,
"grad_norm": 0.330048418683357,
"learning_rate": 7.77319587628866e-06,
"loss": 0.0121,
"step": 624
},
{
"epoch": 5.92,
"grad_norm": 0.26928445325355976,
"learning_rate": 7.75257731958763e-06,
"loss": 0.013,
"step": 625
},
{
"epoch": 5.93,
"grad_norm": 0.6750950719849301,
"learning_rate": 7.731958762886599e-06,
"loss": 0.0195,
"step": 626
},
{
"epoch": 5.94,
"grad_norm": 0.21912040175146663,
"learning_rate": 7.711340206185568e-06,
"loss": 0.0103,
"step": 627
},
{
"epoch": 5.95,
"grad_norm": 0.5192309207046593,
"learning_rate": 7.690721649484537e-06,
"loss": 0.0103,
"step": 628
},
{
"epoch": 5.96,
"grad_norm": 0.2499389350341481,
"learning_rate": 7.670103092783506e-06,
"loss": 0.0104,
"step": 629
},
{
"epoch": 5.97,
"grad_norm": 0.45889351128005573,
"learning_rate": 7.649484536082475e-06,
"loss": 0.0084,
"step": 630
},
{
"epoch": 5.98,
"grad_norm": 0.2545214901827422,
"learning_rate": 7.628865979381444e-06,
"loss": 0.0153,
"step": 631
},
{
"epoch": 5.99,
"grad_norm": 0.25279438420709727,
"learning_rate": 7.608247422680413e-06,
"loss": 0.0098,
"step": 632
},
{
"epoch": 6.0,
"grad_norm": 0.43737702685003976,
"learning_rate": 7.587628865979382e-06,
"loss": 0.0122,
"step": 633
},
{
"epoch": 6.01,
"grad_norm": 0.22934656595803204,
"learning_rate": 7.567010309278351e-06,
"loss": 0.0087,
"step": 634
},
{
"epoch": 6.02,
"grad_norm": 0.15231865896327362,
"learning_rate": 7.54639175257732e-06,
"loss": 0.0098,
"step": 635
},
{
"epoch": 6.03,
"grad_norm": 0.20717841442031606,
"learning_rate": 7.525773195876289e-06,
"loss": 0.0067,
"step": 636
},
{
"epoch": 6.04,
"grad_norm": 0.25764199600905263,
"learning_rate": 7.505154639175258e-06,
"loss": 0.007,
"step": 637
},
{
"epoch": 6.05,
"grad_norm": 0.1323193834272981,
"learning_rate": 7.484536082474228e-06,
"loss": 0.0057,
"step": 638
},
{
"epoch": 6.06,
"grad_norm": 0.3948955176186245,
"learning_rate": 7.463917525773197e-06,
"loss": 0.0053,
"step": 639
},
{
"epoch": 6.07,
"grad_norm": 0.4238329416871496,
"learning_rate": 7.443298969072166e-06,
"loss": 0.0081,
"step": 640
},
{
"epoch": 6.08,
"grad_norm": 0.19649838915193749,
"learning_rate": 7.422680412371135e-06,
"loss": 0.0053,
"step": 641
},
{
"epoch": 6.09,
"grad_norm": 0.2908240846449479,
"learning_rate": 7.402061855670104e-06,
"loss": 0.0096,
"step": 642
},
{
"epoch": 6.09,
"grad_norm": 0.2259739433011496,
"learning_rate": 7.381443298969073e-06,
"loss": 0.005,
"step": 643
},
{
"epoch": 6.1,
"grad_norm": 0.23409348906610827,
"learning_rate": 7.360824742268042e-06,
"loss": 0.0106,
"step": 644
},
{
"epoch": 6.11,
"grad_norm": 0.17073700500980396,
"learning_rate": 7.34020618556701e-06,
"loss": 0.0047,
"step": 645
},
{
"epoch": 6.12,
"grad_norm": 0.28854786967187007,
"learning_rate": 7.319587628865979e-06,
"loss": 0.0094,
"step": 646
},
{
"epoch": 6.13,
"grad_norm": 0.6234812881274385,
"learning_rate": 7.298969072164949e-06,
"loss": 0.0083,
"step": 647
},
{
"epoch": 6.14,
"grad_norm": 0.4916702762029449,
"learning_rate": 7.278350515463918e-06,
"loss": 0.0083,
"step": 648
},
{
"epoch": 6.15,
"grad_norm": 0.6537917279553066,
"learning_rate": 7.257731958762887e-06,
"loss": 0.0072,
"step": 649
},
{
"epoch": 6.16,
"grad_norm": 0.45766983087614604,
"learning_rate": 7.237113402061856e-06,
"loss": 0.005,
"step": 650
},
{
"epoch": 6.17,
"grad_norm": 0.338662913202777,
"learning_rate": 7.216494845360825e-06,
"loss": 0.0077,
"step": 651
},
{
"epoch": 6.18,
"grad_norm": 0.44542608987435417,
"learning_rate": 7.195876288659794e-06,
"loss": 0.0077,
"step": 652
},
{
"epoch": 6.19,
"grad_norm": 0.21085384264115978,
"learning_rate": 7.175257731958763e-06,
"loss": 0.0047,
"step": 653
},
{
"epoch": 6.2,
"grad_norm": 0.43108200588178025,
"learning_rate": 7.154639175257733e-06,
"loss": 0.0082,
"step": 654
},
{
"epoch": 6.21,
"grad_norm": 0.27079959653147445,
"learning_rate": 7.134020618556702e-06,
"loss": 0.0065,
"step": 655
},
{
"epoch": 6.22,
"grad_norm": 0.18635187697421032,
"learning_rate": 7.113402061855671e-06,
"loss": 0.0046,
"step": 656
},
{
"epoch": 6.23,
"grad_norm": 0.1937725776930435,
"learning_rate": 7.09278350515464e-06,
"loss": 0.0043,
"step": 657
},
{
"epoch": 6.24,
"grad_norm": 0.25968599174419577,
"learning_rate": 7.072164948453609e-06,
"loss": 0.0061,
"step": 658
},
{
"epoch": 6.25,
"grad_norm": 0.35569005930879194,
"learning_rate": 7.051546391752578e-06,
"loss": 0.0112,
"step": 659
},
{
"epoch": 6.26,
"grad_norm": 0.24992727175457866,
"learning_rate": 7.030927835051546e-06,
"loss": 0.0052,
"step": 660
},
{
"epoch": 6.27,
"grad_norm": 0.20240529645065294,
"learning_rate": 7.010309278350515e-06,
"loss": 0.0042,
"step": 661
},
{
"epoch": 6.27,
"grad_norm": 0.4155360937037759,
"learning_rate": 6.989690721649484e-06,
"loss": 0.0078,
"step": 662
},
{
"epoch": 6.28,
"grad_norm": 0.17432727299005332,
"learning_rate": 6.9690721649484534e-06,
"loss": 0.0054,
"step": 663
},
{
"epoch": 6.29,
"grad_norm": 0.3722985035966386,
"learning_rate": 6.948453608247423e-06,
"loss": 0.0092,
"step": 664
},
{
"epoch": 6.3,
"grad_norm": 0.43975229887933437,
"learning_rate": 6.927835051546392e-06,
"loss": 0.0103,
"step": 665
},
{
"epoch": 6.31,
"grad_norm": 0.4940771129939558,
"learning_rate": 6.907216494845361e-06,
"loss": 0.0122,
"step": 666
},
{
"epoch": 6.32,
"grad_norm": 0.5740542533003908,
"learning_rate": 6.88659793814433e-06,
"loss": 0.0082,
"step": 667
},
{
"epoch": 6.33,
"grad_norm": 0.2058111984948537,
"learning_rate": 6.865979381443299e-06,
"loss": 0.0041,
"step": 668
},
{
"epoch": 6.34,
"grad_norm": 0.2769802828064851,
"learning_rate": 6.8453608247422684e-06,
"loss": 0.0043,
"step": 669
},
{
"epoch": 6.35,
"grad_norm": 0.17711607312500582,
"learning_rate": 6.8247422680412375e-06,
"loss": 0.005,
"step": 670
},
{
"epoch": 6.36,
"grad_norm": 0.2643012835038824,
"learning_rate": 6.804123711340207e-06,
"loss": 0.007,
"step": 671
},
{
"epoch": 6.37,
"grad_norm": 0.15711871292498947,
"learning_rate": 6.783505154639176e-06,
"loss": 0.0047,
"step": 672
},
{
"epoch": 6.38,
"grad_norm": 0.24961908945393735,
"learning_rate": 6.762886597938145e-06,
"loss": 0.0072,
"step": 673
},
{
"epoch": 6.39,
"grad_norm": 0.30169143687465855,
"learning_rate": 6.742268041237114e-06,
"loss": 0.0052,
"step": 674
},
{
"epoch": 6.4,
"grad_norm": 0.3934678580621137,
"learning_rate": 6.7216494845360834e-06,
"loss": 0.0113,
"step": 675
},
{
"epoch": 6.41,
"grad_norm": 0.14868452197760967,
"learning_rate": 6.701030927835052e-06,
"loss": 0.0033,
"step": 676
},
{
"epoch": 6.42,
"grad_norm": 0.15768272852397436,
"learning_rate": 6.680412371134021e-06,
"loss": 0.0049,
"step": 677
},
{
"epoch": 6.43,
"grad_norm": 0.18987586844687035,
"learning_rate": 6.65979381443299e-06,
"loss": 0.0058,
"step": 678
},
{
"epoch": 6.44,
"grad_norm": 0.32365525926963373,
"learning_rate": 6.639175257731959e-06,
"loss": 0.0113,
"step": 679
},
{
"epoch": 6.45,
"grad_norm": 0.30112921416349264,
"learning_rate": 6.6185567010309286e-06,
"loss": 0.0059,
"step": 680
},
{
"epoch": 6.45,
"grad_norm": 0.27227559502406745,
"learning_rate": 6.597938144329898e-06,
"loss": 0.0044,
"step": 681
},
{
"epoch": 6.46,
"grad_norm": 0.2332633357697481,
"learning_rate": 6.577319587628867e-06,
"loss": 0.005,
"step": 682
},
{
"epoch": 6.47,
"grad_norm": 0.20390945044742648,
"learning_rate": 6.556701030927836e-06,
"loss": 0.0102,
"step": 683
},
{
"epoch": 6.48,
"grad_norm": 0.15200979357777994,
"learning_rate": 6.536082474226805e-06,
"loss": 0.0039,
"step": 684
},
{
"epoch": 6.49,
"grad_norm": 0.5223834969714265,
"learning_rate": 6.515463917525774e-06,
"loss": 0.0111,
"step": 685
},
{
"epoch": 6.5,
"grad_norm": 0.3265798495671334,
"learning_rate": 6.494845360824743e-06,
"loss": 0.0078,
"step": 686
},
{
"epoch": 6.51,
"grad_norm": 0.232083338532599,
"learning_rate": 6.4742268041237126e-06,
"loss": 0.0055,
"step": 687
},
{
"epoch": 6.52,
"grad_norm": 0.34450412522366625,
"learning_rate": 6.453608247422682e-06,
"loss": 0.0072,
"step": 688
},
{
"epoch": 6.53,
"grad_norm": 0.2940178208253681,
"learning_rate": 6.432989690721651e-06,
"loss": 0.0096,
"step": 689
},
{
"epoch": 6.54,
"grad_norm": 0.22739277995901097,
"learning_rate": 6.41237113402062e-06,
"loss": 0.0086,
"step": 690
},
{
"epoch": 6.55,
"grad_norm": 0.18501928510863955,
"learning_rate": 6.391752577319588e-06,
"loss": 0.0054,
"step": 691
},
{
"epoch": 6.56,
"grad_norm": 0.2495674698089042,
"learning_rate": 6.371134020618557e-06,
"loss": 0.0055,
"step": 692
},
{
"epoch": 6.57,
"grad_norm": 0.3115858655220306,
"learning_rate": 6.350515463917526e-06,
"loss": 0.0099,
"step": 693
},
{
"epoch": 6.58,
"grad_norm": 0.22112881277968816,
"learning_rate": 6.329896907216495e-06,
"loss": 0.0066,
"step": 694
},
{
"epoch": 6.59,
"grad_norm": 0.20243049146834055,
"learning_rate": 6.309278350515464e-06,
"loss": 0.0064,
"step": 695
},
{
"epoch": 6.6,
"grad_norm": 0.22200307910304581,
"learning_rate": 6.288659793814433e-06,
"loss": 0.0061,
"step": 696
},
{
"epoch": 6.61,
"grad_norm": 0.18114997882790249,
"learning_rate": 6.268041237113403e-06,
"loss": 0.0048,
"step": 697
},
{
"epoch": 6.62,
"grad_norm": 0.18321139514207627,
"learning_rate": 6.247422680412372e-06,
"loss": 0.0045,
"step": 698
},
{
"epoch": 6.63,
"grad_norm": 0.23312915446744617,
"learning_rate": 6.226804123711341e-06,
"loss": 0.0058,
"step": 699
},
{
"epoch": 6.64,
"grad_norm": 0.29021254723103196,
"learning_rate": 6.20618556701031e-06,
"loss": 0.0043,
"step": 700
},
{
"epoch": 6.64,
"grad_norm": 0.2228934881305218,
"learning_rate": 6.185567010309279e-06,
"loss": 0.0055,
"step": 701
},
{
"epoch": 6.65,
"grad_norm": 0.19568048144400402,
"learning_rate": 6.164948453608248e-06,
"loss": 0.0042,
"step": 702
},
{
"epoch": 6.66,
"grad_norm": 0.36502903760734834,
"learning_rate": 6.144329896907217e-06,
"loss": 0.0083,
"step": 703
},
{
"epoch": 6.67,
"grad_norm": 0.2105959847384471,
"learning_rate": 6.123711340206187e-06,
"loss": 0.0041,
"step": 704
},
{
"epoch": 6.68,
"grad_norm": 0.28583998554670637,
"learning_rate": 6.103092783505156e-06,
"loss": 0.008,
"step": 705
},
{
"epoch": 6.69,
"grad_norm": 0.2877234166151061,
"learning_rate": 6.082474226804124e-06,
"loss": 0.0045,
"step": 706
},
{
"epoch": 6.7,
"grad_norm": 0.2595387378602294,
"learning_rate": 6.061855670103093e-06,
"loss": 0.0068,
"step": 707
},
{
"epoch": 6.71,
"grad_norm": 0.3666200514263358,
"learning_rate": 6.041237113402062e-06,
"loss": 0.0076,
"step": 708
},
{
"epoch": 6.72,
"grad_norm": 0.32836599809355366,
"learning_rate": 6.020618556701031e-06,
"loss": 0.0094,
"step": 709
},
{
"epoch": 6.73,
"grad_norm": 0.1502521298292464,
"learning_rate": 6e-06,
"loss": 0.0036,
"step": 710
},
{
"epoch": 6.74,
"grad_norm": 0.3099705969225408,
"learning_rate": 5.979381443298969e-06,
"loss": 0.007,
"step": 711
},
{
"epoch": 6.75,
"grad_norm": 0.2671350583818879,
"learning_rate": 5.958762886597938e-06,
"loss": 0.0055,
"step": 712
},
{
"epoch": 6.76,
"grad_norm": 0.17973191292917046,
"learning_rate": 5.938144329896908e-06,
"loss": 0.0035,
"step": 713
},
{
"epoch": 6.77,
"grad_norm": 0.23976800200824433,
"learning_rate": 5.917525773195877e-06,
"loss": 0.0094,
"step": 714
},
{
"epoch": 6.78,
"grad_norm": 0.22438468475214163,
"learning_rate": 5.896907216494846e-06,
"loss": 0.0052,
"step": 715
},
{
"epoch": 6.79,
"grad_norm": 0.27719335104735204,
"learning_rate": 5.876288659793815e-06,
"loss": 0.0082,
"step": 716
},
{
"epoch": 6.8,
"grad_norm": 0.34399483371337203,
"learning_rate": 5.855670103092784e-06,
"loss": 0.0064,
"step": 717
},
{
"epoch": 6.81,
"grad_norm": 0.23526144082943856,
"learning_rate": 5.835051546391753e-06,
"loss": 0.0053,
"step": 718
},
{
"epoch": 6.82,
"grad_norm": 0.24070642969870953,
"learning_rate": 5.814432989690722e-06,
"loss": 0.0063,
"step": 719
},
{
"epoch": 6.82,
"grad_norm": 0.3700428588633788,
"learning_rate": 5.793814432989692e-06,
"loss": 0.0048,
"step": 720
},
{
"epoch": 6.83,
"grad_norm": 0.2950260534136937,
"learning_rate": 5.7731958762886594e-06,
"loss": 0.0066,
"step": 721
},
{
"epoch": 6.84,
"grad_norm": 0.2567961484308644,
"learning_rate": 5.752577319587629e-06,
"loss": 0.007,
"step": 722
},
{
"epoch": 6.85,
"grad_norm": 0.38641545896743845,
"learning_rate": 5.731958762886598e-06,
"loss": 0.0091,
"step": 723
},
{
"epoch": 6.86,
"grad_norm": 0.6894868188681116,
"learning_rate": 5.711340206185567e-06,
"loss": 0.0059,
"step": 724
},
{
"epoch": 6.87,
"grad_norm": 0.2086778424808283,
"learning_rate": 5.690721649484536e-06,
"loss": 0.0049,
"step": 725
},
{
"epoch": 6.88,
"grad_norm": 0.5003155963999651,
"learning_rate": 5.670103092783505e-06,
"loss": 0.0056,
"step": 726
},
{
"epoch": 6.89,
"grad_norm": 0.3749979734366017,
"learning_rate": 5.6494845360824744e-06,
"loss": 0.0107,
"step": 727
},
{
"epoch": 6.9,
"grad_norm": 0.3579697500786867,
"learning_rate": 5.6288659793814435e-06,
"loss": 0.0111,
"step": 728
},
{
"epoch": 6.91,
"grad_norm": 0.15577346135663542,
"learning_rate": 5.6082474226804125e-06,
"loss": 0.0052,
"step": 729
},
{
"epoch": 6.92,
"grad_norm": 0.24229129751187709,
"learning_rate": 5.587628865979382e-06,
"loss": 0.0061,
"step": 730
},
{
"epoch": 6.93,
"grad_norm": 0.272248489519313,
"learning_rate": 5.567010309278351e-06,
"loss": 0.0056,
"step": 731
},
{
"epoch": 6.94,
"grad_norm": 0.2643050890894097,
"learning_rate": 5.54639175257732e-06,
"loss": 0.0057,
"step": 732
},
{
"epoch": 6.95,
"grad_norm": 0.1879692365238936,
"learning_rate": 5.525773195876289e-06,
"loss": 0.0053,
"step": 733
},
{
"epoch": 6.96,
"grad_norm": 0.19340918121052053,
"learning_rate": 5.5051546391752584e-06,
"loss": 0.0062,
"step": 734
},
{
"epoch": 6.97,
"grad_norm": 0.1529525975962478,
"learning_rate": 5.4845360824742275e-06,
"loss": 0.0056,
"step": 735
},
{
"epoch": 6.98,
"grad_norm": 0.2736569341555376,
"learning_rate": 5.463917525773196e-06,
"loss": 0.0084,
"step": 736
},
{
"epoch": 6.99,
"grad_norm": 0.110938591179384,
"learning_rate": 5.443298969072165e-06,
"loss": 0.0034,
"step": 737
},
{
"epoch": 7.0,
"grad_norm": 0.11849309886356285,
"learning_rate": 5.422680412371134e-06,
"loss": 0.0038,
"step": 738
},
{
"epoch": 7.0,
"grad_norm": 0.14387416323452448,
"learning_rate": 5.4020618556701036e-06,
"loss": 0.004,
"step": 739
},
{
"epoch": 7.01,
"grad_norm": 0.13134460729300612,
"learning_rate": 5.381443298969073e-06,
"loss": 0.0017,
"step": 740
},
{
"epoch": 7.02,
"grad_norm": 0.07788721196206765,
"learning_rate": 5.360824742268042e-06,
"loss": 0.002,
"step": 741
},
{
"epoch": 7.03,
"grad_norm": 0.15260318526862712,
"learning_rate": 5.340206185567011e-06,
"loss": 0.0034,
"step": 742
},
{
"epoch": 7.04,
"grad_norm": 0.1089060941487077,
"learning_rate": 5.31958762886598e-06,
"loss": 0.0027,
"step": 743
},
{
"epoch": 7.05,
"grad_norm": 0.5144936258164009,
"learning_rate": 5.298969072164949e-06,
"loss": 0.0084,
"step": 744
},
{
"epoch": 7.06,
"grad_norm": 0.10415072666834733,
"learning_rate": 5.278350515463918e-06,
"loss": 0.0031,
"step": 745
},
{
"epoch": 7.07,
"grad_norm": 0.23251966615021696,
"learning_rate": 5.257731958762888e-06,
"loss": 0.0043,
"step": 746
},
{
"epoch": 7.08,
"grad_norm": 0.3238685163977462,
"learning_rate": 5.237113402061857e-06,
"loss": 0.0024,
"step": 747
},
{
"epoch": 7.09,
"grad_norm": 0.09957366190949146,
"learning_rate": 5.216494845360826e-06,
"loss": 0.0034,
"step": 748
},
{
"epoch": 7.1,
"grad_norm": 0.16441110064955763,
"learning_rate": 5.195876288659795e-06,
"loss": 0.0033,
"step": 749
},
{
"epoch": 7.11,
"grad_norm": 0.11502838716698602,
"learning_rate": 5.175257731958764e-06,
"loss": 0.0019,
"step": 750
},
{
"epoch": 7.12,
"grad_norm": 0.2059822859277287,
"learning_rate": 5.154639175257732e-06,
"loss": 0.0033,
"step": 751
},
{
"epoch": 7.13,
"grad_norm": 0.27175574153425236,
"learning_rate": 5.134020618556701e-06,
"loss": 0.0059,
"step": 752
},
{
"epoch": 7.14,
"grad_norm": 0.20687016932388036,
"learning_rate": 5.11340206185567e-06,
"loss": 0.0034,
"step": 753
},
{
"epoch": 7.15,
"grad_norm": 0.07446575471966888,
"learning_rate": 5.092783505154639e-06,
"loss": 0.0017,
"step": 754
},
{
"epoch": 7.16,
"grad_norm": 0.08134197831230383,
"learning_rate": 5.072164948453609e-06,
"loss": 0.0018,
"step": 755
},
{
"epoch": 7.17,
"grad_norm": 0.4431797957874383,
"learning_rate": 5.051546391752578e-06,
"loss": 0.0035,
"step": 756
},
{
"epoch": 7.18,
"grad_norm": 0.08978970602215328,
"learning_rate": 5.030927835051547e-06,
"loss": 0.0016,
"step": 757
},
{
"epoch": 7.18,
"grad_norm": 0.08201681589111187,
"learning_rate": 5.010309278350516e-06,
"loss": 0.0018,
"step": 758
},
{
"epoch": 7.19,
"grad_norm": 0.19255657384965863,
"learning_rate": 4.989690721649485e-06,
"loss": 0.0046,
"step": 759
},
{
"epoch": 7.2,
"grad_norm": 0.16057946531020928,
"learning_rate": 4.969072164948454e-06,
"loss": 0.0035,
"step": 760
},
{
"epoch": 7.21,
"grad_norm": 0.13631749535248597,
"learning_rate": 4.948453608247423e-06,
"loss": 0.0021,
"step": 761
},
{
"epoch": 7.22,
"grad_norm": 0.7153825657411309,
"learning_rate": 4.927835051546392e-06,
"loss": 0.0052,
"step": 762
},
{
"epoch": 7.23,
"grad_norm": 0.16228089967333942,
"learning_rate": 4.907216494845361e-06,
"loss": 0.0021,
"step": 763
},
{
"epoch": 7.24,
"grad_norm": 0.24502597494763348,
"learning_rate": 4.88659793814433e-06,
"loss": 0.0022,
"step": 764
},
{
"epoch": 7.25,
"grad_norm": 0.2654605805395672,
"learning_rate": 4.865979381443299e-06,
"loss": 0.0026,
"step": 765
},
{
"epoch": 7.26,
"grad_norm": 0.5389825789320348,
"learning_rate": 4.845360824742268e-06,
"loss": 0.0071,
"step": 766
},
{
"epoch": 7.27,
"grad_norm": 0.17119480049176156,
"learning_rate": 4.824742268041238e-06,
"loss": 0.0015,
"step": 767
},
{
"epoch": 7.28,
"grad_norm": 0.104553489885139,
"learning_rate": 4.804123711340207e-06,
"loss": 0.0012,
"step": 768
},
{
"epoch": 7.29,
"grad_norm": 0.12259079407573792,
"learning_rate": 4.783505154639176e-06,
"loss": 0.0026,
"step": 769
},
{
"epoch": 7.3,
"grad_norm": 0.12028525774793548,
"learning_rate": 4.762886597938144e-06,
"loss": 0.0018,
"step": 770
},
{
"epoch": 7.31,
"grad_norm": 0.25021515469036354,
"learning_rate": 4.742268041237113e-06,
"loss": 0.0042,
"step": 771
},
{
"epoch": 7.32,
"grad_norm": 0.24941873030749326,
"learning_rate": 4.721649484536083e-06,
"loss": 0.0026,
"step": 772
},
{
"epoch": 7.33,
"grad_norm": 0.16119100502451406,
"learning_rate": 4.701030927835052e-06,
"loss": 0.0019,
"step": 773
},
{
"epoch": 7.34,
"grad_norm": 0.18735636733059363,
"learning_rate": 4.680412371134021e-06,
"loss": 0.0034,
"step": 774
},
{
"epoch": 7.35,
"grad_norm": 0.3405583858973068,
"learning_rate": 4.65979381443299e-06,
"loss": 0.0045,
"step": 775
},
{
"epoch": 7.36,
"grad_norm": 0.18205119461685462,
"learning_rate": 4.639175257731959e-06,
"loss": 0.0022,
"step": 776
},
{
"epoch": 7.36,
"grad_norm": 0.1260694412756396,
"learning_rate": 4.618556701030928e-06,
"loss": 0.0013,
"step": 777
},
{
"epoch": 7.37,
"grad_norm": 0.1680960837920412,
"learning_rate": 4.597938144329897e-06,
"loss": 0.002,
"step": 778
},
{
"epoch": 7.38,
"grad_norm": 0.15630853271864512,
"learning_rate": 4.577319587628866e-06,
"loss": 0.0034,
"step": 779
},
{
"epoch": 7.39,
"grad_norm": 0.5056257260650967,
"learning_rate": 4.556701030927835e-06,
"loss": 0.0044,
"step": 780
},
{
"epoch": 7.4,
"grad_norm": 0.3261608875731138,
"learning_rate": 4.536082474226804e-06,
"loss": 0.0024,
"step": 781
},
{
"epoch": 7.41,
"grad_norm": 0.11774930596045845,
"learning_rate": 4.515463917525773e-06,
"loss": 0.0027,
"step": 782
},
{
"epoch": 7.42,
"grad_norm": 0.20014053153468342,
"learning_rate": 4.494845360824742e-06,
"loss": 0.0033,
"step": 783
},
{
"epoch": 7.43,
"grad_norm": 0.13662801183781947,
"learning_rate": 4.474226804123712e-06,
"loss": 0.0022,
"step": 784
},
{
"epoch": 7.44,
"grad_norm": 0.1367035652992447,
"learning_rate": 4.453608247422681e-06,
"loss": 0.0015,
"step": 785
},
{
"epoch": 7.45,
"grad_norm": 0.14959126604407402,
"learning_rate": 4.4329896907216494e-06,
"loss": 0.0016,
"step": 786
},
{
"epoch": 7.46,
"grad_norm": 0.40227617233297097,
"learning_rate": 4.4123711340206185e-06,
"loss": 0.0069,
"step": 787
},
{
"epoch": 7.47,
"grad_norm": 0.1439579211526795,
"learning_rate": 4.391752577319588e-06,
"loss": 0.0019,
"step": 788
},
{
"epoch": 7.48,
"grad_norm": 0.2423599613554704,
"learning_rate": 4.371134020618557e-06,
"loss": 0.002,
"step": 789
},
{
"epoch": 7.49,
"grad_norm": 0.061898449078300846,
"learning_rate": 4.350515463917526e-06,
"loss": 0.0007,
"step": 790
},
{
"epoch": 7.5,
"grad_norm": 0.21962886757720973,
"learning_rate": 4.329896907216495e-06,
"loss": 0.0023,
"step": 791
},
{
"epoch": 7.51,
"grad_norm": 0.25843588283083585,
"learning_rate": 4.3092783505154644e-06,
"loss": 0.0039,
"step": 792
},
{
"epoch": 7.52,
"grad_norm": 0.15596379651783276,
"learning_rate": 4.2886597938144335e-06,
"loss": 0.0022,
"step": 793
},
{
"epoch": 7.53,
"grad_norm": 0.28093297628000347,
"learning_rate": 4.2680412371134025e-06,
"loss": 0.0023,
"step": 794
},
{
"epoch": 7.54,
"grad_norm": 0.10389743423350419,
"learning_rate": 4.2474226804123715e-06,
"loss": 0.0016,
"step": 795
},
{
"epoch": 7.55,
"grad_norm": 0.1906894184142329,
"learning_rate": 4.2268041237113405e-06,
"loss": 0.0041,
"step": 796
},
{
"epoch": 7.55,
"grad_norm": 0.351454008104445,
"learning_rate": 4.2061855670103096e-06,
"loss": 0.0028,
"step": 797
},
{
"epoch": 7.56,
"grad_norm": 0.28491723481759745,
"learning_rate": 4.185567010309279e-06,
"loss": 0.0029,
"step": 798
},
{
"epoch": 7.57,
"grad_norm": 0.32800973275401546,
"learning_rate": 4.164948453608248e-06,
"loss": 0.0031,
"step": 799
},
{
"epoch": 7.58,
"grad_norm": 0.16568805128723144,
"learning_rate": 4.1443298969072175e-06,
"loss": 0.0025,
"step": 800
},
{
"epoch": 7.59,
"grad_norm": 0.13805636962121431,
"learning_rate": 4.123711340206186e-06,
"loss": 0.004,
"step": 801
},
{
"epoch": 7.6,
"grad_norm": 0.22470442377904398,
"learning_rate": 4.103092783505155e-06,
"loss": 0.0036,
"step": 802
},
{
"epoch": 7.61,
"grad_norm": 0.13920068652558185,
"learning_rate": 4.082474226804124e-06,
"loss": 0.0016,
"step": 803
},
{
"epoch": 7.62,
"grad_norm": 0.12487615470333269,
"learning_rate": 4.061855670103093e-06,
"loss": 0.002,
"step": 804
},
{
"epoch": 7.63,
"grad_norm": 0.13024795088464938,
"learning_rate": 4.041237113402063e-06,
"loss": 0.0021,
"step": 805
},
{
"epoch": 7.64,
"grad_norm": 0.4559559030982543,
"learning_rate": 4.020618556701032e-06,
"loss": 0.0049,
"step": 806
},
{
"epoch": 7.65,
"grad_norm": 0.31572235041331265,
"learning_rate": 4.000000000000001e-06,
"loss": 0.0031,
"step": 807
},
{
"epoch": 7.66,
"grad_norm": 0.10781685433371178,
"learning_rate": 3.979381443298969e-06,
"loss": 0.0012,
"step": 808
},
{
"epoch": 7.67,
"grad_norm": 0.2707219292080708,
"learning_rate": 3.958762886597938e-06,
"loss": 0.0046,
"step": 809
},
{
"epoch": 7.68,
"grad_norm": 0.18722702062922045,
"learning_rate": 3.938144329896908e-06,
"loss": 0.0022,
"step": 810
},
{
"epoch": 7.69,
"grad_norm": 0.19212406204692095,
"learning_rate": 3.917525773195877e-06,
"loss": 0.0021,
"step": 811
},
{
"epoch": 7.7,
"grad_norm": 0.38087322503362697,
"learning_rate": 3.896907216494846e-06,
"loss": 0.0037,
"step": 812
},
{
"epoch": 7.71,
"grad_norm": 0.26518756245475933,
"learning_rate": 3.876288659793815e-06,
"loss": 0.0032,
"step": 813
},
{
"epoch": 7.72,
"grad_norm": 0.2553887734939709,
"learning_rate": 3.855670103092784e-06,
"loss": 0.0031,
"step": 814
},
{
"epoch": 7.73,
"grad_norm": 0.1343025738681451,
"learning_rate": 3.835051546391753e-06,
"loss": 0.0031,
"step": 815
},
{
"epoch": 7.73,
"grad_norm": 0.17024687128126006,
"learning_rate": 3.814432989690722e-06,
"loss": 0.0022,
"step": 816
},
{
"epoch": 7.74,
"grad_norm": 0.18223575801892042,
"learning_rate": 3.793814432989691e-06,
"loss": 0.0037,
"step": 817
},
{
"epoch": 7.75,
"grad_norm": 0.27174237568099197,
"learning_rate": 3.77319587628866e-06,
"loss": 0.0035,
"step": 818
},
{
"epoch": 7.76,
"grad_norm": 0.14450601408992675,
"learning_rate": 3.752577319587629e-06,
"loss": 0.0013,
"step": 819
},
{
"epoch": 7.77,
"grad_norm": 0.18897491743653955,
"learning_rate": 3.7319587628865984e-06,
"loss": 0.0025,
"step": 820
},
{
"epoch": 7.78,
"grad_norm": 0.07385960694848187,
"learning_rate": 3.7113402061855674e-06,
"loss": 0.001,
"step": 821
},
{
"epoch": 7.79,
"grad_norm": 0.15435878501514447,
"learning_rate": 3.6907216494845365e-06,
"loss": 0.002,
"step": 822
},
{
"epoch": 7.8,
"grad_norm": 0.08634587004116331,
"learning_rate": 3.670103092783505e-06,
"loss": 0.0012,
"step": 823
},
{
"epoch": 7.81,
"grad_norm": 0.2058857410491722,
"learning_rate": 3.6494845360824745e-06,
"loss": 0.0053,
"step": 824
},
{
"epoch": 7.82,
"grad_norm": 0.1858026147497508,
"learning_rate": 3.6288659793814435e-06,
"loss": 0.0041,
"step": 825
},
{
"epoch": 7.83,
"grad_norm": 0.1011331957444775,
"learning_rate": 3.6082474226804126e-06,
"loss": 0.0018,
"step": 826
},
{
"epoch": 7.84,
"grad_norm": 0.23666951451188797,
"learning_rate": 3.5876288659793816e-06,
"loss": 0.0059,
"step": 827
},
{
"epoch": 7.85,
"grad_norm": 0.2603758621036727,
"learning_rate": 3.567010309278351e-06,
"loss": 0.0062,
"step": 828
},
{
"epoch": 7.86,
"grad_norm": 0.16480165821012008,
"learning_rate": 3.54639175257732e-06,
"loss": 0.004,
"step": 829
},
{
"epoch": 7.87,
"grad_norm": 0.28086898744817096,
"learning_rate": 3.525773195876289e-06,
"loss": 0.0026,
"step": 830
},
{
"epoch": 7.88,
"grad_norm": 0.12322710099051527,
"learning_rate": 3.5051546391752577e-06,
"loss": 0.0032,
"step": 831
},
{
"epoch": 7.89,
"grad_norm": 0.242219730509079,
"learning_rate": 3.4845360824742267e-06,
"loss": 0.0026,
"step": 832
},
{
"epoch": 7.9,
"grad_norm": 0.14529332731776778,
"learning_rate": 3.463917525773196e-06,
"loss": 0.0027,
"step": 833
},
{
"epoch": 7.91,
"grad_norm": 0.2138874096650693,
"learning_rate": 3.443298969072165e-06,
"loss": 0.0034,
"step": 834
},
{
"epoch": 7.91,
"grad_norm": 0.19749759028268452,
"learning_rate": 3.4226804123711342e-06,
"loss": 0.0037,
"step": 835
},
{
"epoch": 7.92,
"grad_norm": 0.15476553398449305,
"learning_rate": 3.4020618556701037e-06,
"loss": 0.0019,
"step": 836
},
{
"epoch": 7.93,
"grad_norm": 0.15960147768108288,
"learning_rate": 3.3814432989690727e-06,
"loss": 0.0033,
"step": 837
},
{
"epoch": 7.94,
"grad_norm": 0.624338061755059,
"learning_rate": 3.3608247422680417e-06,
"loss": 0.0015,
"step": 838
},
{
"epoch": 7.95,
"grad_norm": 0.13702122063292282,
"learning_rate": 3.3402061855670103e-06,
"loss": 0.0023,
"step": 839
},
{
"epoch": 7.96,
"grad_norm": 0.08365808633780984,
"learning_rate": 3.3195876288659793e-06,
"loss": 0.0012,
"step": 840
},
{
"epoch": 7.97,
"grad_norm": 0.0899836549718512,
"learning_rate": 3.298969072164949e-06,
"loss": 0.0011,
"step": 841
},
{
"epoch": 7.98,
"grad_norm": 0.4829138899331933,
"learning_rate": 3.278350515463918e-06,
"loss": 0.008,
"step": 842
},
{
"epoch": 7.99,
"grad_norm": 0.1919209896415992,
"learning_rate": 3.257731958762887e-06,
"loss": 0.004,
"step": 843
},
{
"epoch": 8.0,
"grad_norm": 0.16899372796264808,
"learning_rate": 3.2371134020618563e-06,
"loss": 0.0029,
"step": 844
},
{
"epoch": 8.01,
"grad_norm": 0.05509708497550106,
"learning_rate": 3.2164948453608253e-06,
"loss": 0.0008,
"step": 845
},
{
"epoch": 8.02,
"grad_norm": 0.043054987243414666,
"learning_rate": 3.195876288659794e-06,
"loss": 0.0009,
"step": 846
},
{
"epoch": 8.03,
"grad_norm": 0.07564136188229313,
"learning_rate": 3.175257731958763e-06,
"loss": 0.0008,
"step": 847
},
{
"epoch": 8.04,
"grad_norm": 0.06691193876219953,
"learning_rate": 3.154639175257732e-06,
"loss": 0.0014,
"step": 848
},
{
"epoch": 8.05,
"grad_norm": 0.0603253152399713,
"learning_rate": 3.1340206185567014e-06,
"loss": 0.0012,
"step": 849
},
{
"epoch": 8.06,
"grad_norm": 0.10812760361667872,
"learning_rate": 3.1134020618556704e-06,
"loss": 0.001,
"step": 850
},
{
"epoch": 8.07,
"grad_norm": 0.08892218491395185,
"learning_rate": 3.0927835051546395e-06,
"loss": 0.0024,
"step": 851
},
{
"epoch": 8.08,
"grad_norm": 0.04677768629502165,
"learning_rate": 3.0721649484536085e-06,
"loss": 0.0006,
"step": 852
},
{
"epoch": 8.09,
"grad_norm": 0.04430331529830851,
"learning_rate": 3.051546391752578e-06,
"loss": 0.0006,
"step": 853
},
{
"epoch": 8.09,
"grad_norm": 0.09346449113687558,
"learning_rate": 3.0309278350515465e-06,
"loss": 0.0012,
"step": 854
},
{
"epoch": 8.1,
"grad_norm": 0.2160454348367651,
"learning_rate": 3.0103092783505156e-06,
"loss": 0.0028,
"step": 855
},
{
"epoch": 8.11,
"grad_norm": 0.05155936018458442,
"learning_rate": 2.9896907216494846e-06,
"loss": 0.0008,
"step": 856
},
{
"epoch": 8.12,
"grad_norm": 0.07141585375187036,
"learning_rate": 2.969072164948454e-06,
"loss": 0.0013,
"step": 857
},
{
"epoch": 8.13,
"grad_norm": 0.0687070418387174,
"learning_rate": 2.948453608247423e-06,
"loss": 0.0009,
"step": 858
},
{
"epoch": 8.14,
"grad_norm": 0.07669199142811815,
"learning_rate": 2.927835051546392e-06,
"loss": 0.0009,
"step": 859
},
{
"epoch": 8.15,
"grad_norm": 0.04465730126907368,
"learning_rate": 2.907216494845361e-06,
"loss": 0.0007,
"step": 860
},
{
"epoch": 8.16,
"grad_norm": 0.16895614434947578,
"learning_rate": 2.8865979381443297e-06,
"loss": 0.0015,
"step": 861
},
{
"epoch": 8.17,
"grad_norm": 0.030537970405421798,
"learning_rate": 2.865979381443299e-06,
"loss": 0.0004,
"step": 862
},
{
"epoch": 8.18,
"grad_norm": 0.06522562102441246,
"learning_rate": 2.845360824742268e-06,
"loss": 0.0011,
"step": 863
},
{
"epoch": 8.19,
"grad_norm": 0.05635364358518778,
"learning_rate": 2.8247422680412372e-06,
"loss": 0.0008,
"step": 864
},
{
"epoch": 8.2,
"grad_norm": 0.27006905568911943,
"learning_rate": 2.8041237113402062e-06,
"loss": 0.0028,
"step": 865
},
{
"epoch": 8.21,
"grad_norm": 0.06011103263626429,
"learning_rate": 2.7835051546391757e-06,
"loss": 0.001,
"step": 866
},
{
"epoch": 8.22,
"grad_norm": 0.0483751457086483,
"learning_rate": 2.7628865979381447e-06,
"loss": 0.001,
"step": 867
},
{
"epoch": 8.23,
"grad_norm": 0.19500311188782363,
"learning_rate": 2.7422680412371137e-06,
"loss": 0.0016,
"step": 868
},
{
"epoch": 8.24,
"grad_norm": 0.08213703963458831,
"learning_rate": 2.7216494845360823e-06,
"loss": 0.0008,
"step": 869
},
{
"epoch": 8.25,
"grad_norm": 0.027020412786747442,
"learning_rate": 2.7010309278350518e-06,
"loss": 0.0003,
"step": 870
},
{
"epoch": 8.26,
"grad_norm": 0.20242259650799319,
"learning_rate": 2.680412371134021e-06,
"loss": 0.0032,
"step": 871
},
{
"epoch": 8.27,
"grad_norm": 0.08711446471377081,
"learning_rate": 2.65979381443299e-06,
"loss": 0.0014,
"step": 872
},
{
"epoch": 8.27,
"grad_norm": 0.07389239032061967,
"learning_rate": 2.639175257731959e-06,
"loss": 0.0009,
"step": 873
},
{
"epoch": 8.28,
"grad_norm": 0.1371163852779683,
"learning_rate": 2.6185567010309283e-06,
"loss": 0.0013,
"step": 874
},
{
"epoch": 8.29,
"grad_norm": 0.07524174953179151,
"learning_rate": 2.5979381443298973e-06,
"loss": 0.0006,
"step": 875
},
{
"epoch": 8.3,
"grad_norm": 0.058236929098366835,
"learning_rate": 2.577319587628866e-06,
"loss": 0.0005,
"step": 876
},
{
"epoch": 8.31,
"grad_norm": 0.14363459873619006,
"learning_rate": 2.556701030927835e-06,
"loss": 0.0018,
"step": 877
},
{
"epoch": 8.32,
"grad_norm": 0.09599514123515855,
"learning_rate": 2.5360824742268044e-06,
"loss": 0.0011,
"step": 878
},
{
"epoch": 8.33,
"grad_norm": 0.1042080965702281,
"learning_rate": 2.5154639175257734e-06,
"loss": 0.0008,
"step": 879
},
{
"epoch": 8.34,
"grad_norm": 0.0805476860492103,
"learning_rate": 2.4948453608247425e-06,
"loss": 0.0015,
"step": 880
},
{
"epoch": 8.35,
"grad_norm": 0.19711701300084405,
"learning_rate": 2.4742268041237115e-06,
"loss": 0.0025,
"step": 881
},
{
"epoch": 8.36,
"grad_norm": 0.055123808951037204,
"learning_rate": 2.4536082474226805e-06,
"loss": 0.0007,
"step": 882
},
{
"epoch": 8.37,
"grad_norm": 0.04149342152050289,
"learning_rate": 2.4329896907216495e-06,
"loss": 0.0004,
"step": 883
},
{
"epoch": 8.38,
"grad_norm": 0.06270351282009798,
"learning_rate": 2.412371134020619e-06,
"loss": 0.0007,
"step": 884
},
{
"epoch": 8.39,
"grad_norm": 0.05393996609636815,
"learning_rate": 2.391752577319588e-06,
"loss": 0.0008,
"step": 885
},
{
"epoch": 8.4,
"grad_norm": 0.03400127645574442,
"learning_rate": 2.3711340206185566e-06,
"loss": 0.0005,
"step": 886
},
{
"epoch": 8.41,
"grad_norm": 0.22733022249751159,
"learning_rate": 2.350515463917526e-06,
"loss": 0.0014,
"step": 887
},
{
"epoch": 8.42,
"grad_norm": 0.05866248351197364,
"learning_rate": 2.329896907216495e-06,
"loss": 0.0009,
"step": 888
},
{
"epoch": 8.43,
"grad_norm": 0.0855931813183556,
"learning_rate": 2.309278350515464e-06,
"loss": 0.0016,
"step": 889
},
{
"epoch": 8.44,
"grad_norm": 0.12637047947600505,
"learning_rate": 2.288659793814433e-06,
"loss": 0.0016,
"step": 890
},
{
"epoch": 8.45,
"grad_norm": 0.04727536575972343,
"learning_rate": 2.268041237113402e-06,
"loss": 0.0007,
"step": 891
},
{
"epoch": 8.45,
"grad_norm": 0.07846292190621403,
"learning_rate": 2.247422680412371e-06,
"loss": 0.0008,
"step": 892
},
{
"epoch": 8.46,
"grad_norm": 0.06736962908164983,
"learning_rate": 2.2268041237113406e-06,
"loss": 0.0015,
"step": 893
},
{
"epoch": 8.47,
"grad_norm": 0.05270371384592827,
"learning_rate": 2.2061855670103092e-06,
"loss": 0.001,
"step": 894
},
{
"epoch": 8.48,
"grad_norm": 0.09516249464713478,
"learning_rate": 2.1855670103092787e-06,
"loss": 0.0006,
"step": 895
},
{
"epoch": 8.49,
"grad_norm": 0.06091978231251835,
"learning_rate": 2.1649484536082477e-06,
"loss": 0.0013,
"step": 896
},
{
"epoch": 8.5,
"grad_norm": 0.019466262224842438,
"learning_rate": 2.1443298969072167e-06,
"loss": 0.0003,
"step": 897
},
{
"epoch": 8.51,
"grad_norm": 0.24243963868008434,
"learning_rate": 2.1237113402061858e-06,
"loss": 0.0009,
"step": 898
},
{
"epoch": 8.52,
"grad_norm": 0.0945701438322458,
"learning_rate": 2.1030927835051548e-06,
"loss": 0.0004,
"step": 899
},
{
"epoch": 8.53,
"grad_norm": 0.20953662491346972,
"learning_rate": 2.082474226804124e-06,
"loss": 0.002,
"step": 900
},
{
"epoch": 8.54,
"grad_norm": 0.061779322657336,
"learning_rate": 2.061855670103093e-06,
"loss": 0.0006,
"step": 901
},
{
"epoch": 8.55,
"grad_norm": 0.3308993399847209,
"learning_rate": 2.041237113402062e-06,
"loss": 0.005,
"step": 902
},
{
"epoch": 8.56,
"grad_norm": 0.1975396773766454,
"learning_rate": 2.0206185567010313e-06,
"loss": 0.0015,
"step": 903
},
{
"epoch": 8.57,
"grad_norm": 0.045236846439595156,
"learning_rate": 2.0000000000000003e-06,
"loss": 0.0005,
"step": 904
},
{
"epoch": 8.58,
"grad_norm": 0.13398233084050726,
"learning_rate": 1.979381443298969e-06,
"loss": 0.002,
"step": 905
},
{
"epoch": 8.59,
"grad_norm": 0.0622461666714804,
"learning_rate": 1.9587628865979384e-06,
"loss": 0.0007,
"step": 906
},
{
"epoch": 8.6,
"grad_norm": 0.06781721034478837,
"learning_rate": 1.9381443298969074e-06,
"loss": 0.0011,
"step": 907
},
{
"epoch": 8.61,
"grad_norm": 0.13977337963368935,
"learning_rate": 1.9175257731958764e-06,
"loss": 0.001,
"step": 908
},
{
"epoch": 8.62,
"grad_norm": 0.04013178863550451,
"learning_rate": 1.8969072164948455e-06,
"loss": 0.0004,
"step": 909
},
{
"epoch": 8.63,
"grad_norm": 0.11598113924981095,
"learning_rate": 1.8762886597938145e-06,
"loss": 0.0018,
"step": 910
},
{
"epoch": 8.64,
"grad_norm": 0.08230482757730247,
"learning_rate": 1.8556701030927837e-06,
"loss": 0.0011,
"step": 911
},
{
"epoch": 8.64,
"grad_norm": 0.2257864420204737,
"learning_rate": 1.8350515463917525e-06,
"loss": 0.0016,
"step": 912
},
{
"epoch": 8.65,
"grad_norm": 0.09910986042730756,
"learning_rate": 1.8144329896907218e-06,
"loss": 0.001,
"step": 913
},
{
"epoch": 8.66,
"grad_norm": 0.0759242569105602,
"learning_rate": 1.7938144329896908e-06,
"loss": 0.0013,
"step": 914
},
{
"epoch": 8.67,
"grad_norm": 0.029364530605982583,
"learning_rate": 1.77319587628866e-06,
"loss": 0.0004,
"step": 915
},
{
"epoch": 8.68,
"grad_norm": 0.444311422153807,
"learning_rate": 1.7525773195876288e-06,
"loss": 0.0013,
"step": 916
},
{
"epoch": 8.69,
"grad_norm": 0.22028957714382316,
"learning_rate": 1.731958762886598e-06,
"loss": 0.0017,
"step": 917
},
{
"epoch": 8.7,
"grad_norm": 0.06460565522866611,
"learning_rate": 1.7113402061855671e-06,
"loss": 0.0013,
"step": 918
},
{
"epoch": 8.71,
"grad_norm": 0.08558295275203245,
"learning_rate": 1.6907216494845363e-06,
"loss": 0.0012,
"step": 919
},
{
"epoch": 8.72,
"grad_norm": 0.07692536867678362,
"learning_rate": 1.6701030927835052e-06,
"loss": 0.0004,
"step": 920
},
{
"epoch": 8.73,
"grad_norm": 0.0750770629902968,
"learning_rate": 1.6494845360824744e-06,
"loss": 0.0005,
"step": 921
},
{
"epoch": 8.74,
"grad_norm": 0.042917397004941536,
"learning_rate": 1.6288659793814434e-06,
"loss": 0.0007,
"step": 922
},
{
"epoch": 8.75,
"grad_norm": 0.10264347301003988,
"learning_rate": 1.6082474226804127e-06,
"loss": 0.0012,
"step": 923
},
{
"epoch": 8.76,
"grad_norm": 0.22778156766478125,
"learning_rate": 1.5876288659793815e-06,
"loss": 0.0011,
"step": 924
},
{
"epoch": 8.77,
"grad_norm": 0.04758354713208677,
"learning_rate": 1.5670103092783507e-06,
"loss": 0.0006,
"step": 925
},
{
"epoch": 8.78,
"grad_norm": 0.08412047104508713,
"learning_rate": 1.5463917525773197e-06,
"loss": 0.0015,
"step": 926
},
{
"epoch": 8.79,
"grad_norm": 0.034866429835879974,
"learning_rate": 1.525773195876289e-06,
"loss": 0.0002,
"step": 927
},
{
"epoch": 8.8,
"grad_norm": 0.07562133012194057,
"learning_rate": 1.5051546391752578e-06,
"loss": 0.0007,
"step": 928
},
{
"epoch": 8.81,
"grad_norm": 0.0916289355732546,
"learning_rate": 1.484536082474227e-06,
"loss": 0.0009,
"step": 929
},
{
"epoch": 8.82,
"grad_norm": 0.0615359550941953,
"learning_rate": 1.463917525773196e-06,
"loss": 0.0005,
"step": 930
},
{
"epoch": 8.82,
"grad_norm": 0.11577012903135302,
"learning_rate": 1.4432989690721649e-06,
"loss": 0.002,
"step": 931
},
{
"epoch": 8.83,
"grad_norm": 0.11708940930639869,
"learning_rate": 1.422680412371134e-06,
"loss": 0.0014,
"step": 932
},
{
"epoch": 8.84,
"grad_norm": 0.11404794998644863,
"learning_rate": 1.4020618556701031e-06,
"loss": 0.0004,
"step": 933
},
{
"epoch": 8.85,
"grad_norm": 0.05957189933039153,
"learning_rate": 1.3814432989690724e-06,
"loss": 0.0006,
"step": 934
},
{
"epoch": 8.86,
"grad_norm": 0.07382893776320089,
"learning_rate": 1.3608247422680412e-06,
"loss": 0.0017,
"step": 935
},
{
"epoch": 8.87,
"grad_norm": 0.19998944560156712,
"learning_rate": 1.3402061855670104e-06,
"loss": 0.0014,
"step": 936
},
{
"epoch": 8.88,
"grad_norm": 0.06571641767687599,
"learning_rate": 1.3195876288659794e-06,
"loss": 0.0004,
"step": 937
},
{
"epoch": 8.89,
"grad_norm": 0.06311261303979947,
"learning_rate": 1.2989690721649487e-06,
"loss": 0.0005,
"step": 938
},
{
"epoch": 8.9,
"grad_norm": 0.04274947260430576,
"learning_rate": 1.2783505154639175e-06,
"loss": 0.0007,
"step": 939
},
{
"epoch": 8.91,
"grad_norm": 0.18573998892171853,
"learning_rate": 1.2577319587628867e-06,
"loss": 0.0015,
"step": 940
},
{
"epoch": 8.92,
"grad_norm": 0.41425063699409714,
"learning_rate": 1.2371134020618557e-06,
"loss": 0.008,
"step": 941
},
{
"epoch": 8.93,
"grad_norm": 0.16152795833732628,
"learning_rate": 1.2164948453608248e-06,
"loss": 0.0005,
"step": 942
},
{
"epoch": 8.94,
"grad_norm": 0.049126419628053614,
"learning_rate": 1.195876288659794e-06,
"loss": 0.0005,
"step": 943
},
{
"epoch": 8.95,
"grad_norm": 0.0661304405122751,
"learning_rate": 1.175257731958763e-06,
"loss": 0.0009,
"step": 944
},
{
"epoch": 8.96,
"grad_norm": 0.05744751905817848,
"learning_rate": 1.154639175257732e-06,
"loss": 0.0009,
"step": 945
},
{
"epoch": 8.97,
"grad_norm": 0.07837412274647698,
"learning_rate": 1.134020618556701e-06,
"loss": 0.0011,
"step": 946
},
{
"epoch": 8.98,
"grad_norm": 0.03813985224073424,
"learning_rate": 1.1134020618556703e-06,
"loss": 0.0004,
"step": 947
},
{
"epoch": 8.99,
"grad_norm": 0.20785503418246787,
"learning_rate": 1.0927835051546393e-06,
"loss": 0.0009,
"step": 948
},
{
"epoch": 9.0,
"grad_norm": 0.048326297750132946,
"learning_rate": 1.0721649484536084e-06,
"loss": 0.0005,
"step": 949
},
{
"epoch": 9.0,
"grad_norm": 0.040042798519284356,
"learning_rate": 1.0515463917525774e-06,
"loss": 0.0007,
"step": 950
},
{
"epoch": 9.01,
"grad_norm": 0.06873126031951501,
"learning_rate": 1.0309278350515464e-06,
"loss": 0.0004,
"step": 951
},
{
"epoch": 9.02,
"grad_norm": 0.022756536172152075,
"learning_rate": 1.0103092783505157e-06,
"loss": 0.0003,
"step": 952
},
{
"epoch": 9.03,
"grad_norm": 0.01742605318818428,
"learning_rate": 9.896907216494845e-07,
"loss": 0.0002,
"step": 953
},
{
"epoch": 9.04,
"grad_norm": 0.044961233104923864,
"learning_rate": 9.690721649484537e-07,
"loss": 0.0005,
"step": 954
},
{
"epoch": 9.05,
"grad_norm": 0.053048913302749426,
"learning_rate": 9.484536082474227e-07,
"loss": 0.0009,
"step": 955
},
{
"epoch": 9.06,
"grad_norm": 0.04713989946921602,
"learning_rate": 9.278350515463919e-07,
"loss": 0.0006,
"step": 956
},
{
"epoch": 9.07,
"grad_norm": 0.048810056681268664,
"learning_rate": 9.072164948453609e-07,
"loss": 0.001,
"step": 957
},
{
"epoch": 9.08,
"grad_norm": 0.060005585219904824,
"learning_rate": 8.8659793814433e-07,
"loss": 0.0002,
"step": 958
},
{
"epoch": 9.09,
"grad_norm": 0.012673574874247678,
"learning_rate": 8.65979381443299e-07,
"loss": 0.0002,
"step": 959
},
{
"epoch": 9.1,
"grad_norm": 0.026958799974119988,
"learning_rate": 8.453608247422682e-07,
"loss": 0.0004,
"step": 960
},
{
"epoch": 9.11,
"grad_norm": 0.029290447920903767,
"learning_rate": 8.247422680412372e-07,
"loss": 0.0004,
"step": 961
},
{
"epoch": 9.12,
"grad_norm": 0.02798108501852727,
"learning_rate": 8.041237113402063e-07,
"loss": 0.0004,
"step": 962
},
{
"epoch": 9.13,
"grad_norm": 0.04404423494062546,
"learning_rate": 7.835051546391754e-07,
"loss": 0.0005,
"step": 963
},
{
"epoch": 9.14,
"grad_norm": 0.03359906700662011,
"learning_rate": 7.628865979381445e-07,
"loss": 0.0006,
"step": 964
},
{
"epoch": 9.15,
"grad_norm": 0.012391757389303072,
"learning_rate": 7.422680412371135e-07,
"loss": 0.0002,
"step": 965
},
{
"epoch": 9.16,
"grad_norm": 0.030976476789454522,
"learning_rate": 7.216494845360824e-07,
"loss": 0.0003,
"step": 966
},
{
"epoch": 9.17,
"grad_norm": 0.0457425895684649,
"learning_rate": 7.010309278350516e-07,
"loss": 0.0008,
"step": 967
},
{
"epoch": 9.18,
"grad_norm": 0.03739999858386374,
"learning_rate": 6.804123711340206e-07,
"loss": 0.0005,
"step": 968
},
{
"epoch": 9.18,
"grad_norm": 0.12124451742238591,
"learning_rate": 6.597938144329897e-07,
"loss": 0.0006,
"step": 969
},
{
"epoch": 9.19,
"grad_norm": 0.03048681043306536,
"learning_rate": 6.391752577319587e-07,
"loss": 0.0005,
"step": 970
},
{
"epoch": 9.2,
"grad_norm": 0.039000124602355656,
"learning_rate": 6.185567010309279e-07,
"loss": 0.0006,
"step": 971
},
{
"epoch": 9.21,
"grad_norm": 0.02438232504761682,
"learning_rate": 5.97938144329897e-07,
"loss": 0.0003,
"step": 972
},
{
"epoch": 9.22,
"grad_norm": 0.04705775170496308,
"learning_rate": 5.77319587628866e-07,
"loss": 0.0004,
"step": 973
},
{
"epoch": 9.23,
"grad_norm": 0.01254285745016695,
"learning_rate": 5.567010309278352e-07,
"loss": 0.0001,
"step": 974
},
{
"epoch": 9.24,
"grad_norm": 0.022806488469377377,
"learning_rate": 5.360824742268042e-07,
"loss": 0.0003,
"step": 975
},
{
"epoch": 9.25,
"grad_norm": 0.026636190786526362,
"learning_rate": 5.154639175257732e-07,
"loss": 0.0004,
"step": 976
},
{
"epoch": 9.26,
"grad_norm": 0.05255663685963608,
"learning_rate": 4.948453608247422e-07,
"loss": 0.0006,
"step": 977
},
{
"epoch": 9.27,
"grad_norm": 0.03217353033582061,
"learning_rate": 4.7422680412371136e-07,
"loss": 0.0004,
"step": 978
},
{
"epoch": 9.28,
"grad_norm": 0.0664343564073901,
"learning_rate": 4.5360824742268044e-07,
"loss": 0.0005,
"step": 979
},
{
"epoch": 9.29,
"grad_norm": 0.025925295566005847,
"learning_rate": 4.329896907216495e-07,
"loss": 0.0004,
"step": 980
},
{
"epoch": 9.3,
"grad_norm": 0.0292099906116108,
"learning_rate": 4.123711340206186e-07,
"loss": 0.0002,
"step": 981
},
{
"epoch": 9.31,
"grad_norm": 0.03181595101800679,
"learning_rate": 3.917525773195877e-07,
"loss": 0.0005,
"step": 982
},
{
"epoch": 9.32,
"grad_norm": 0.03120848572963688,
"learning_rate": 3.7113402061855675e-07,
"loss": 0.0003,
"step": 983
},
{
"epoch": 9.33,
"grad_norm": 0.04417780596211519,
"learning_rate": 3.505154639175258e-07,
"loss": 0.0006,
"step": 984
},
{
"epoch": 9.34,
"grad_norm": 0.02624121570420308,
"learning_rate": 3.2989690721649486e-07,
"loss": 0.0003,
"step": 985
},
{
"epoch": 9.35,
"grad_norm": 0.03344031943986025,
"learning_rate": 3.0927835051546394e-07,
"loss": 0.0003,
"step": 986
},
{
"epoch": 9.36,
"grad_norm": 0.034761043645872715,
"learning_rate": 2.88659793814433e-07,
"loss": 0.0004,
"step": 987
},
{
"epoch": 9.36,
"grad_norm": 0.020241986442803762,
"learning_rate": 2.680412371134021e-07,
"loss": 0.0002,
"step": 988
},
{
"epoch": 9.37,
"grad_norm": 0.09793444585785131,
"learning_rate": 2.474226804123711e-07,
"loss": 0.0016,
"step": 989
},
{
"epoch": 9.38,
"grad_norm": 0.017940198006010516,
"learning_rate": 2.2680412371134022e-07,
"loss": 0.0002,
"step": 990
},
{
"epoch": 9.39,
"grad_norm": 0.019179236413784675,
"learning_rate": 2.061855670103093e-07,
"loss": 0.0003,
"step": 991
},
{
"epoch": 9.4,
"grad_norm": 0.050718252371269985,
"learning_rate": 1.8556701030927838e-07,
"loss": 0.001,
"step": 992
},
{
"epoch": 9.41,
"grad_norm": 0.037188941896057204,
"learning_rate": 1.6494845360824743e-07,
"loss": 0.0005,
"step": 993
},
{
"epoch": 9.42,
"grad_norm": 0.02557890459582751,
"learning_rate": 1.443298969072165e-07,
"loss": 0.0002,
"step": 994
},
{
"epoch": 9.43,
"grad_norm": 0.06341211642068095,
"learning_rate": 1.2371134020618556e-07,
"loss": 0.0013,
"step": 995
},
{
"epoch": 9.44,
"grad_norm": 0.026816555571157724,
"learning_rate": 1.0309278350515465e-07,
"loss": 0.0004,
"step": 996
},
{
"epoch": 9.45,
"grad_norm": 0.05575356482299228,
"learning_rate": 8.247422680412371e-08,
"loss": 0.0003,
"step": 997
},
{
"epoch": 9.46,
"grad_norm": 0.01973168523765775,
"learning_rate": 6.185567010309278e-08,
"loss": 0.0001,
"step": 998
},
{
"epoch": 9.47,
"grad_norm": 0.029366063831591748,
"learning_rate": 4.123711340206186e-08,
"loss": 0.0005,
"step": 999
},
{
"epoch": 9.48,
"grad_norm": 0.034015978719337966,
"learning_rate": 2.061855670103093e-08,
"loss": 0.0003,
"step": 1000
},
{
"epoch": 9.48,
"step": 1000,
"total_flos": 147433547374592.0,
"train_loss": 0.03500189868769667,
"train_runtime": 49743.0375,
"train_samples_per_second": 0.643,
"train_steps_per_second": 0.02
}
],
"logging_steps": 1.0,
"max_steps": 1000,
"num_input_tokens_seen": 0,
"num_train_epochs": 10,
"save_steps": 200,
"total_flos": 147433547374592.0,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}