Text Generation
Transformers
Safetensors
English
mistral
axolotl
Generated from Trainer
Mistral
instruct
finetune
chatml
gpt4
synthetic data
science
physics
chemistry
biology
math
conversational
Eval Results
Inference Endpoints
text-generation-inference
Weyaxi's picture
add all checkpoints
4fb8dd7 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.9997927461139896,
"eval_steps": 500,
"global_step": 1206,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0,
"grad_norm": 27.81778461909011,
"learning_rate": 5.000000000000001e-07,
"loss": 0.7993,
"step": 1
},
{
"epoch": 0.0,
"grad_norm": 28.63833175363421,
"learning_rate": 1.0000000000000002e-06,
"loss": 0.9056,
"step": 2
},
{
"epoch": 0.0,
"grad_norm": 25.646828828014854,
"learning_rate": 1.5e-06,
"loss": 0.8473,
"step": 3
},
{
"epoch": 0.0,
"grad_norm": 9.834124771941388,
"learning_rate": 2.0000000000000003e-06,
"loss": 0.8192,
"step": 4
},
{
"epoch": 0.0,
"grad_norm": 10.558095859980105,
"learning_rate": 2.5e-06,
"loss": 0.7943,
"step": 5
},
{
"epoch": 0.0,
"grad_norm": 7.905789045775758,
"learning_rate": 3e-06,
"loss": 0.7075,
"step": 6
},
{
"epoch": 0.01,
"grad_norm": 7.259519170268483,
"learning_rate": 3.5e-06,
"loss": 0.7537,
"step": 7
},
{
"epoch": 0.01,
"grad_norm": 6.639042051048664,
"learning_rate": 4.000000000000001e-06,
"loss": 0.7471,
"step": 8
},
{
"epoch": 0.01,
"grad_norm": 8.515070932390074,
"learning_rate": 4.5e-06,
"loss": 0.7689,
"step": 9
},
{
"epoch": 0.01,
"grad_norm": 8.916410424632533,
"learning_rate": 5e-06,
"loss": 0.7194,
"step": 10
},
{
"epoch": 0.01,
"grad_norm": 4.835046497413255,
"learning_rate": 4.9999978617243506e-06,
"loss": 0.6949,
"step": 11
},
{
"epoch": 0.01,
"grad_norm": 10.065648500649479,
"learning_rate": 4.9999914469010585e-06,
"loss": 0.7039,
"step": 12
},
{
"epoch": 0.01,
"grad_norm": 5.299372887839679,
"learning_rate": 4.999980755541098e-06,
"loss": 0.7067,
"step": 13
},
{
"epoch": 0.01,
"grad_norm": 5.693110837094718,
"learning_rate": 4.999965787662758e-06,
"loss": 0.7126,
"step": 14
},
{
"epoch": 0.01,
"grad_norm": 2.983869635716314,
"learning_rate": 4.999946543291642e-06,
"loss": 0.6496,
"step": 15
},
{
"epoch": 0.01,
"grad_norm": 4.2561193962441175,
"learning_rate": 4.999923022460671e-06,
"loss": 0.7036,
"step": 16
},
{
"epoch": 0.01,
"grad_norm": 3.011772824968437,
"learning_rate": 4.999895225210079e-06,
"loss": 0.7009,
"step": 17
},
{
"epoch": 0.01,
"grad_norm": 3.386638415717137,
"learning_rate": 4.9998631515874165e-06,
"loss": 0.6624,
"step": 18
},
{
"epoch": 0.02,
"grad_norm": 3.764658092125165,
"learning_rate": 4.999826801647551e-06,
"loss": 0.6687,
"step": 19
},
{
"epoch": 0.02,
"grad_norm": 2.3982096117966614,
"learning_rate": 4.999786175452662e-06,
"loss": 0.706,
"step": 20
},
{
"epoch": 0.02,
"grad_norm": 2.8051633678260193,
"learning_rate": 4.999741273072246e-06,
"loss": 0.7031,
"step": 21
},
{
"epoch": 0.02,
"grad_norm": 3.1177784624332614,
"learning_rate": 4.999692094583114e-06,
"loss": 0.7525,
"step": 22
},
{
"epoch": 0.02,
"grad_norm": 2.2533819675617806,
"learning_rate": 4.9996386400693906e-06,
"loss": 0.6767,
"step": 23
},
{
"epoch": 0.02,
"grad_norm": 2.61893793162573,
"learning_rate": 4.999580909622518e-06,
"loss": 0.6432,
"step": 24
},
{
"epoch": 0.02,
"grad_norm": 2.76057623723569,
"learning_rate": 4.999518903341251e-06,
"loss": 0.6809,
"step": 25
},
{
"epoch": 0.02,
"grad_norm": 2.27983032069553,
"learning_rate": 4.999452621331657e-06,
"loss": 0.6798,
"step": 26
},
{
"epoch": 0.02,
"grad_norm": 2.501904568120582,
"learning_rate": 4.99938206370712e-06,
"loss": 0.6412,
"step": 27
},
{
"epoch": 0.02,
"grad_norm": 2.819229290729669,
"learning_rate": 4.999307230588338e-06,
"loss": 0.6188,
"step": 28
},
{
"epoch": 0.02,
"grad_norm": 2.1233212322022212,
"learning_rate": 4.9992281221033224e-06,
"loss": 0.6378,
"step": 29
},
{
"epoch": 0.02,
"grad_norm": 2.7806911906686755,
"learning_rate": 4.999144738387396e-06,
"loss": 0.6653,
"step": 30
},
{
"epoch": 0.03,
"grad_norm": 2.4045490257014563,
"learning_rate": 4.999057079583199e-06,
"loss": 0.6377,
"step": 31
},
{
"epoch": 0.03,
"grad_norm": 2.3803717769210446,
"learning_rate": 4.998965145840681e-06,
"loss": 0.6855,
"step": 32
},
{
"epoch": 0.03,
"grad_norm": 2.3976652879633473,
"learning_rate": 4.998868937317106e-06,
"loss": 0.6284,
"step": 33
},
{
"epoch": 0.03,
"grad_norm": 2.2958541157119727,
"learning_rate": 4.998768454177051e-06,
"loss": 0.6521,
"step": 34
},
{
"epoch": 0.03,
"grad_norm": 2.1925196833696154,
"learning_rate": 4.998663696592403e-06,
"loss": 0.6619,
"step": 35
},
{
"epoch": 0.03,
"grad_norm": 2.361006042901851,
"learning_rate": 4.998554664742362e-06,
"loss": 0.6155,
"step": 36
},
{
"epoch": 0.03,
"grad_norm": 2.1577758143653614,
"learning_rate": 4.998441358813443e-06,
"loss": 0.6398,
"step": 37
},
{
"epoch": 0.03,
"grad_norm": 2.219872074512664,
"learning_rate": 4.998323778999467e-06,
"loss": 0.6051,
"step": 38
},
{
"epoch": 0.03,
"grad_norm": 2.2907501521408546,
"learning_rate": 4.9982019255015705e-06,
"loss": 0.6337,
"step": 39
},
{
"epoch": 0.03,
"grad_norm": 2.1769862324666183,
"learning_rate": 4.9980757985281955e-06,
"loss": 0.6606,
"step": 40
},
{
"epoch": 0.03,
"grad_norm": 2.4252479779661607,
"learning_rate": 4.997945398295101e-06,
"loss": 0.6685,
"step": 41
},
{
"epoch": 0.03,
"grad_norm": 2.3929541982084657,
"learning_rate": 4.99781072502535e-06,
"loss": 0.6084,
"step": 42
},
{
"epoch": 0.04,
"grad_norm": 1.932539969840091,
"learning_rate": 4.997671778949318e-06,
"loss": 0.6123,
"step": 43
},
{
"epoch": 0.04,
"grad_norm": 2.191742541327873,
"learning_rate": 4.997528560304688e-06,
"loss": 0.6247,
"step": 44
},
{
"epoch": 0.04,
"grad_norm": 2.423376784566499,
"learning_rate": 4.997381069336455e-06,
"loss": 0.7024,
"step": 45
},
{
"epoch": 0.04,
"grad_norm": 2.0599055392481076,
"learning_rate": 4.997229306296918e-06,
"loss": 0.6612,
"step": 46
},
{
"epoch": 0.04,
"grad_norm": 2.16832922087532,
"learning_rate": 4.997073271445686e-06,
"loss": 0.5949,
"step": 47
},
{
"epoch": 0.04,
"grad_norm": 2.0483598654319453,
"learning_rate": 4.9969129650496775e-06,
"loss": 0.6406,
"step": 48
},
{
"epoch": 0.04,
"grad_norm": 1.963056609139284,
"learning_rate": 4.996748387383113e-06,
"loss": 0.6361,
"step": 49
},
{
"epoch": 0.04,
"grad_norm": 2.2094923844269307,
"learning_rate": 4.996579538727527e-06,
"loss": 0.5901,
"step": 50
},
{
"epoch": 0.04,
"grad_norm": 2.1088153449411857,
"learning_rate": 4.996406419371749e-06,
"loss": 0.6458,
"step": 51
},
{
"epoch": 0.04,
"grad_norm": 2.093448940617732,
"learning_rate": 4.996229029611926e-06,
"loss": 0.6509,
"step": 52
},
{
"epoch": 0.04,
"grad_norm": 2.075116207412987,
"learning_rate": 4.996047369751502e-06,
"loss": 0.6295,
"step": 53
},
{
"epoch": 0.04,
"grad_norm": 2.138141165277684,
"learning_rate": 4.995861440101229e-06,
"loss": 0.6088,
"step": 54
},
{
"epoch": 0.05,
"grad_norm": 2.186316382848445,
"learning_rate": 4.995671240979161e-06,
"loss": 0.6307,
"step": 55
},
{
"epoch": 0.05,
"grad_norm": 2.2513741083982195,
"learning_rate": 4.995476772710657e-06,
"loss": 0.6175,
"step": 56
},
{
"epoch": 0.05,
"grad_norm": 2.0827167336870596,
"learning_rate": 4.995278035628379e-06,
"loss": 0.5935,
"step": 57
},
{
"epoch": 0.05,
"grad_norm": 2.117977588574442,
"learning_rate": 4.995075030072291e-06,
"loss": 0.5998,
"step": 58
},
{
"epoch": 0.05,
"grad_norm": 2.0996940200235485,
"learning_rate": 4.994867756389658e-06,
"loss": 0.6159,
"step": 59
},
{
"epoch": 0.05,
"grad_norm": 2.141096165691323,
"learning_rate": 4.994656214935045e-06,
"loss": 0.6294,
"step": 60
},
{
"epoch": 0.05,
"grad_norm": 2.022748830058395,
"learning_rate": 4.994440406070323e-06,
"loss": 0.6315,
"step": 61
},
{
"epoch": 0.05,
"grad_norm": 2.209132168720991,
"learning_rate": 4.994220330164654e-06,
"loss": 0.5645,
"step": 62
},
{
"epoch": 0.05,
"grad_norm": 2.0994557317862674,
"learning_rate": 4.993995987594509e-06,
"loss": 0.6272,
"step": 63
},
{
"epoch": 0.05,
"grad_norm": 2.204220831053169,
"learning_rate": 4.99376737874365e-06,
"loss": 0.6379,
"step": 64
},
{
"epoch": 0.05,
"grad_norm": 2.127733932186697,
"learning_rate": 4.993534504003141e-06,
"loss": 0.622,
"step": 65
},
{
"epoch": 0.05,
"grad_norm": 2.1338506582034316,
"learning_rate": 4.993297363771342e-06,
"loss": 0.6259,
"step": 66
},
{
"epoch": 0.06,
"grad_norm": 2.104802764460729,
"learning_rate": 4.993055958453912e-06,
"loss": 0.6414,
"step": 67
},
{
"epoch": 0.06,
"grad_norm": 2.0889535347771675,
"learning_rate": 4.9928102884638004e-06,
"loss": 0.6466,
"step": 68
},
{
"epoch": 0.06,
"grad_norm": 2.252225316694296,
"learning_rate": 4.992560354221258e-06,
"loss": 0.6167,
"step": 69
},
{
"epoch": 0.06,
"grad_norm": 2.015392533516649,
"learning_rate": 4.992306156153827e-06,
"loss": 0.5958,
"step": 70
},
{
"epoch": 0.06,
"grad_norm": 2.151741408948778,
"learning_rate": 4.992047694696343e-06,
"loss": 0.5875,
"step": 71
},
{
"epoch": 0.06,
"grad_norm": 2.0351299117412696,
"learning_rate": 4.991784970290935e-06,
"loss": 0.5935,
"step": 72
},
{
"epoch": 0.06,
"grad_norm": 2.0000962363827983,
"learning_rate": 4.991517983387026e-06,
"loss": 0.6091,
"step": 73
},
{
"epoch": 0.06,
"grad_norm": 2.202881736102415,
"learning_rate": 4.99124673444133e-06,
"loss": 0.6122,
"step": 74
},
{
"epoch": 0.06,
"grad_norm": 2.015074773396151,
"learning_rate": 4.990971223917848e-06,
"loss": 0.6134,
"step": 75
},
{
"epoch": 0.06,
"grad_norm": 2.009305960567766,
"learning_rate": 4.990691452287877e-06,
"loss": 0.6308,
"step": 76
},
{
"epoch": 0.06,
"grad_norm": 1.9967884756310221,
"learning_rate": 4.990407420029999e-06,
"loss": 0.6098,
"step": 77
},
{
"epoch": 0.06,
"grad_norm": 2.0858738033925905,
"learning_rate": 4.990119127630085e-06,
"loss": 0.6344,
"step": 78
},
{
"epoch": 0.07,
"grad_norm": 1.9427707561903895,
"learning_rate": 4.989826575581295e-06,
"loss": 0.6049,
"step": 79
},
{
"epoch": 0.07,
"grad_norm": 2.157150584766853,
"learning_rate": 4.989529764384073e-06,
"loss": 0.5965,
"step": 80
},
{
"epoch": 0.07,
"grad_norm": 2.0303527419352583,
"learning_rate": 4.989228694546151e-06,
"loss": 0.6524,
"step": 81
},
{
"epoch": 0.07,
"grad_norm": 2.128799919475717,
"learning_rate": 4.988923366582546e-06,
"loss": 0.5524,
"step": 82
},
{
"epoch": 0.07,
"grad_norm": 2.0122786280510696,
"learning_rate": 4.988613781015557e-06,
"loss": 0.6268,
"step": 83
},
{
"epoch": 0.07,
"grad_norm": 2.104580177719229,
"learning_rate": 4.988299938374769e-06,
"loss": 0.6229,
"step": 84
},
{
"epoch": 0.07,
"grad_norm": 2.3894843860356834,
"learning_rate": 4.9879818391970455e-06,
"loss": 0.6194,
"step": 85
},
{
"epoch": 0.07,
"grad_norm": 1.9615211372441477,
"learning_rate": 4.9876594840265355e-06,
"loss": 0.6355,
"step": 86
},
{
"epoch": 0.07,
"grad_norm": 2.4509852093141937,
"learning_rate": 4.987332873414666e-06,
"loss": 0.6405,
"step": 87
},
{
"epoch": 0.07,
"grad_norm": 2.178942375285086,
"learning_rate": 4.987002007920142e-06,
"loss": 0.5593,
"step": 88
},
{
"epoch": 0.07,
"grad_norm": 2.2625634345900445,
"learning_rate": 4.9866668881089515e-06,
"loss": 0.6133,
"step": 89
},
{
"epoch": 0.07,
"grad_norm": 2.363092638811143,
"learning_rate": 4.986327514554356e-06,
"loss": 0.6298,
"step": 90
},
{
"epoch": 0.08,
"grad_norm": 2.0401982492138546,
"learning_rate": 4.985983887836894e-06,
"loss": 0.6276,
"step": 91
},
{
"epoch": 0.08,
"grad_norm": 2.276956647922478,
"learning_rate": 4.985636008544381e-06,
"loss": 0.5691,
"step": 92
},
{
"epoch": 0.08,
"grad_norm": 2.1072762844110233,
"learning_rate": 4.985283877271908e-06,
"loss": 0.6175,
"step": 93
},
{
"epoch": 0.08,
"grad_norm": 2.2931866879442637,
"learning_rate": 4.984927494621836e-06,
"loss": 0.6419,
"step": 94
},
{
"epoch": 0.08,
"grad_norm": 2.112474101166308,
"learning_rate": 4.984566861203801e-06,
"loss": 0.607,
"step": 95
},
{
"epoch": 0.08,
"grad_norm": 2.1816059679212634,
"learning_rate": 4.984201977634711e-06,
"loss": 0.6136,
"step": 96
},
{
"epoch": 0.08,
"grad_norm": 2.0620776369966554,
"learning_rate": 4.9838328445387415e-06,
"loss": 0.6372,
"step": 97
},
{
"epoch": 0.08,
"grad_norm": 2.147592836641578,
"learning_rate": 4.983459462547341e-06,
"loss": 0.606,
"step": 98
},
{
"epoch": 0.08,
"grad_norm": 2.1808001877062453,
"learning_rate": 4.983081832299224e-06,
"loss": 0.6019,
"step": 99
},
{
"epoch": 0.08,
"grad_norm": 2.3751999527114087,
"learning_rate": 4.98269995444037e-06,
"loss": 0.6021,
"step": 100
},
{
"epoch": 0.08,
"grad_norm": 1.8769470206406913,
"learning_rate": 4.98231382962403e-06,
"loss": 0.6082,
"step": 101
},
{
"epoch": 0.08,
"grad_norm": 2.3060925784921347,
"learning_rate": 4.981923458510717e-06,
"loss": 0.6174,
"step": 102
},
{
"epoch": 0.09,
"grad_norm": 2.1543176832473683,
"learning_rate": 4.981528841768206e-06,
"loss": 0.6092,
"step": 103
},
{
"epoch": 0.09,
"grad_norm": 2.1558689520522547,
"learning_rate": 4.981129980071538e-06,
"loss": 0.587,
"step": 104
},
{
"epoch": 0.09,
"grad_norm": 2.3830532005188383,
"learning_rate": 4.980726874103014e-06,
"loss": 0.6518,
"step": 105
},
{
"epoch": 0.09,
"grad_norm": 2.3333119576634767,
"learning_rate": 4.980319524552195e-06,
"loss": 0.6096,
"step": 106
},
{
"epoch": 0.09,
"grad_norm": 2.1135146855324214,
"learning_rate": 4.9799079321159e-06,
"loss": 0.5728,
"step": 107
},
{
"epoch": 0.09,
"grad_norm": 2.2300463384326394,
"learning_rate": 4.9794920974982095e-06,
"loss": 0.6563,
"step": 108
},
{
"epoch": 0.09,
"grad_norm": 2.1745234017525443,
"learning_rate": 4.979072021410458e-06,
"loss": 0.5968,
"step": 109
},
{
"epoch": 0.09,
"grad_norm": 2.1536586182562334,
"learning_rate": 4.978647704571237e-06,
"loss": 0.6189,
"step": 110
},
{
"epoch": 0.09,
"grad_norm": 2.193809374687326,
"learning_rate": 4.97821914770639e-06,
"loss": 0.5864,
"step": 111
},
{
"epoch": 0.09,
"grad_norm": 2.0525896373682047,
"learning_rate": 4.977786351549017e-06,
"loss": 0.6101,
"step": 112
},
{
"epoch": 0.09,
"grad_norm": 2.216099286618384,
"learning_rate": 4.977349316839467e-06,
"loss": 0.5984,
"step": 113
},
{
"epoch": 0.09,
"grad_norm": 2.155122255962579,
"learning_rate": 4.97690804432534e-06,
"loss": 0.6311,
"step": 114
},
{
"epoch": 0.1,
"grad_norm": 2.2972101190291374,
"learning_rate": 4.976462534761487e-06,
"loss": 0.5813,
"step": 115
},
{
"epoch": 0.1,
"grad_norm": 1.9925413745245948,
"learning_rate": 4.9760127889100044e-06,
"loss": 0.6157,
"step": 116
},
{
"epoch": 0.1,
"grad_norm": 2.2802548684036568,
"learning_rate": 4.975558807540238e-06,
"loss": 0.6079,
"step": 117
},
{
"epoch": 0.1,
"grad_norm": 2.048888007394621,
"learning_rate": 4.9751005914287775e-06,
"loss": 0.6467,
"step": 118
},
{
"epoch": 0.1,
"grad_norm": 2.28661640438254,
"learning_rate": 4.974638141359456e-06,
"loss": 0.6029,
"step": 119
},
{
"epoch": 0.1,
"grad_norm": 2.004056683755783,
"learning_rate": 4.974171458123351e-06,
"loss": 0.6289,
"step": 120
},
{
"epoch": 0.1,
"grad_norm": 2.1628470048067667,
"learning_rate": 4.97370054251878e-06,
"loss": 0.6139,
"step": 121
},
{
"epoch": 0.1,
"grad_norm": 2.056119895466544,
"learning_rate": 4.9732253953513e-06,
"loss": 0.5798,
"step": 122
},
{
"epoch": 0.1,
"grad_norm": 2.1716513163164275,
"learning_rate": 4.972746017433709e-06,
"loss": 0.6085,
"step": 123
},
{
"epoch": 0.1,
"grad_norm": 2.255856676525811,
"learning_rate": 4.97226240958604e-06,
"loss": 0.6342,
"step": 124
},
{
"epoch": 0.1,
"grad_norm": 2.1049280498075373,
"learning_rate": 4.971774572635563e-06,
"loss": 0.6197,
"step": 125
},
{
"epoch": 0.1,
"grad_norm": 2.133349390995361,
"learning_rate": 4.97128250741678e-06,
"loss": 0.5751,
"step": 126
},
{
"epoch": 0.11,
"grad_norm": 2.2044887467317578,
"learning_rate": 4.97078621477143e-06,
"loss": 0.6611,
"step": 127
},
{
"epoch": 0.11,
"grad_norm": 2.1413863795698145,
"learning_rate": 4.970285695548481e-06,
"loss": 0.625,
"step": 128
},
{
"epoch": 0.11,
"grad_norm": 2.0229587336296615,
"learning_rate": 4.969780950604132e-06,
"loss": 0.5989,
"step": 129
},
{
"epoch": 0.11,
"grad_norm": 2.0983599595244247,
"learning_rate": 4.969271980801808e-06,
"loss": 0.5747,
"step": 130
},
{
"epoch": 0.11,
"grad_norm": 2.1059041140010786,
"learning_rate": 4.9687587870121645e-06,
"loss": 0.5869,
"step": 131
},
{
"epoch": 0.11,
"grad_norm": 1.8967441614595046,
"learning_rate": 4.9682413701130815e-06,
"loss": 0.6272,
"step": 132
},
{
"epoch": 0.11,
"grad_norm": 1.9976164993621088,
"learning_rate": 4.967719730989663e-06,
"loss": 0.6282,
"step": 133
},
{
"epoch": 0.11,
"grad_norm": 1.8719131324952145,
"learning_rate": 4.967193870534235e-06,
"loss": 0.6052,
"step": 134
},
{
"epoch": 0.11,
"grad_norm": 2.071702997476533,
"learning_rate": 4.9666637896463455e-06,
"loss": 0.5785,
"step": 135
},
{
"epoch": 0.11,
"grad_norm": 1.9549455320048341,
"learning_rate": 4.966129489232762e-06,
"loss": 0.5739,
"step": 136
},
{
"epoch": 0.11,
"grad_norm": 2.0656898626759315,
"learning_rate": 4.9655909702074684e-06,
"loss": 0.6651,
"step": 137
},
{
"epoch": 0.11,
"grad_norm": 2.1185948604203038,
"learning_rate": 4.965048233491669e-06,
"loss": 0.5759,
"step": 138
},
{
"epoch": 0.12,
"grad_norm": 2.08566019272993,
"learning_rate": 4.964501280013777e-06,
"loss": 0.6271,
"step": 139
},
{
"epoch": 0.12,
"grad_norm": 2.117420903965419,
"learning_rate": 4.963950110709425e-06,
"loss": 0.5968,
"step": 140
},
{
"epoch": 0.12,
"grad_norm": 1.9784944143818486,
"learning_rate": 4.963394726521453e-06,
"loss": 0.6112,
"step": 141
},
{
"epoch": 0.12,
"grad_norm": 2.077292948039572,
"learning_rate": 4.9628351283999144e-06,
"loss": 0.5636,
"step": 142
},
{
"epoch": 0.12,
"grad_norm": 2.223803520245629,
"learning_rate": 4.962271317302068e-06,
"loss": 0.6658,
"step": 143
},
{
"epoch": 0.12,
"grad_norm": 2.039369072186367,
"learning_rate": 4.9617032941923796e-06,
"loss": 0.5853,
"step": 144
},
{
"epoch": 0.12,
"grad_norm": 2.071470113085907,
"learning_rate": 4.961131060042522e-06,
"loss": 0.601,
"step": 145
},
{
"epoch": 0.12,
"grad_norm": 2.437470272347474,
"learning_rate": 4.960554615831372e-06,
"loss": 0.6593,
"step": 146
},
{
"epoch": 0.12,
"grad_norm": 2.178684122927139,
"learning_rate": 4.959973962545005e-06,
"loss": 0.607,
"step": 147
},
{
"epoch": 0.12,
"grad_norm": 2.097006749956471,
"learning_rate": 4.9593891011767e-06,
"loss": 0.5873,
"step": 148
},
{
"epoch": 0.12,
"grad_norm": 1.9801202541822784,
"learning_rate": 4.958800032726931e-06,
"loss": 0.5877,
"step": 149
},
{
"epoch": 0.12,
"grad_norm": 2.30001951085656,
"learning_rate": 4.958206758203373e-06,
"loss": 0.6368,
"step": 150
},
{
"epoch": 0.13,
"grad_norm": 1.990094260131078,
"learning_rate": 4.957609278620891e-06,
"loss": 0.59,
"step": 151
},
{
"epoch": 0.13,
"grad_norm": 2.262163752076628,
"learning_rate": 4.957007595001548e-06,
"loss": 0.5779,
"step": 152
},
{
"epoch": 0.13,
"grad_norm": 2.1970152093220983,
"learning_rate": 4.956401708374595e-06,
"loss": 0.5894,
"step": 153
},
{
"epoch": 0.13,
"grad_norm": 2.220825872684071,
"learning_rate": 4.9557916197764745e-06,
"loss": 0.6528,
"step": 154
},
{
"epoch": 0.13,
"grad_norm": 2.099472677591387,
"learning_rate": 4.955177330250817e-06,
"loss": 0.5798,
"step": 155
},
{
"epoch": 0.13,
"grad_norm": 2.159203936881569,
"learning_rate": 4.954558840848437e-06,
"loss": 0.6206,
"step": 156
},
{
"epoch": 0.13,
"grad_norm": 2.185152414039555,
"learning_rate": 4.953936152627338e-06,
"loss": 0.5624,
"step": 157
},
{
"epoch": 0.13,
"grad_norm": 2.0679748168992624,
"learning_rate": 4.953309266652701e-06,
"loss": 0.5859,
"step": 158
},
{
"epoch": 0.13,
"grad_norm": 2.327237187255128,
"learning_rate": 4.952678183996891e-06,
"loss": 0.5632,
"step": 159
},
{
"epoch": 0.13,
"grad_norm": 2.2865519679977417,
"learning_rate": 4.952042905739451e-06,
"loss": 0.6965,
"step": 160
},
{
"epoch": 0.13,
"grad_norm": 2.523435408018699,
"learning_rate": 4.9514034329671e-06,
"loss": 0.6217,
"step": 161
},
{
"epoch": 0.13,
"grad_norm": 2.4992653226709636,
"learning_rate": 4.950759766773734e-06,
"loss": 0.6175,
"step": 162
},
{
"epoch": 0.14,
"grad_norm": 2.432752824777114,
"learning_rate": 4.950111908260423e-06,
"loss": 0.5862,
"step": 163
},
{
"epoch": 0.14,
"grad_norm": 2.137500912204061,
"learning_rate": 4.949459858535404e-06,
"loss": 0.6124,
"step": 164
},
{
"epoch": 0.14,
"grad_norm": 2.2226376224120474,
"learning_rate": 4.94880361871409e-06,
"loss": 0.5891,
"step": 165
},
{
"epoch": 0.14,
"grad_norm": 2.3821839805775165,
"learning_rate": 4.9481431899190544e-06,
"loss": 0.6008,
"step": 166
},
{
"epoch": 0.14,
"grad_norm": 2.306242834684614,
"learning_rate": 4.947478573280044e-06,
"loss": 0.6159,
"step": 167
},
{
"epoch": 0.14,
"grad_norm": 2.3298092236851518,
"learning_rate": 4.946809769933963e-06,
"loss": 0.5809,
"step": 168
},
{
"epoch": 0.14,
"grad_norm": 2.364296499621558,
"learning_rate": 4.946136781024883e-06,
"loss": 0.5895,
"step": 169
},
{
"epoch": 0.14,
"grad_norm": 2.237241095609228,
"learning_rate": 4.945459607704029e-06,
"loss": 0.6144,
"step": 170
},
{
"epoch": 0.14,
"grad_norm": 2.4027419761972264,
"learning_rate": 4.9447782511297905e-06,
"loss": 0.5985,
"step": 171
},
{
"epoch": 0.14,
"grad_norm": 2.1547059182244284,
"learning_rate": 4.944092712467709e-06,
"loss": 0.5763,
"step": 172
},
{
"epoch": 0.14,
"grad_norm": 2.1530221667047984,
"learning_rate": 4.9434029928904805e-06,
"loss": 0.5692,
"step": 173
},
{
"epoch": 0.14,
"grad_norm": 2.228588593294869,
"learning_rate": 4.942709093577954e-06,
"loss": 0.5896,
"step": 174
},
{
"epoch": 0.15,
"grad_norm": 2.1597295307130198,
"learning_rate": 4.942011015717129e-06,
"loss": 0.5864,
"step": 175
},
{
"epoch": 0.15,
"grad_norm": 2.321140955498194,
"learning_rate": 4.941308760502149e-06,
"loss": 0.6089,
"step": 176
},
{
"epoch": 0.15,
"grad_norm": 2.220124736460707,
"learning_rate": 4.940602329134309e-06,
"loss": 0.5786,
"step": 177
},
{
"epoch": 0.15,
"grad_norm": 2.1698038563080417,
"learning_rate": 4.939891722822043e-06,
"loss": 0.5749,
"step": 178
},
{
"epoch": 0.15,
"grad_norm": 2.244425969121411,
"learning_rate": 4.93917694278093e-06,
"loss": 0.5877,
"step": 179
},
{
"epoch": 0.15,
"grad_norm": 2.143920008069458,
"learning_rate": 4.938457990233687e-06,
"loss": 0.6024,
"step": 180
},
{
"epoch": 0.15,
"grad_norm": 2.1786040820345813,
"learning_rate": 4.937734866410169e-06,
"loss": 0.5845,
"step": 181
},
{
"epoch": 0.15,
"grad_norm": 2.301832824481007,
"learning_rate": 4.9370075725473665e-06,
"loss": 0.6182,
"step": 182
},
{
"epoch": 0.15,
"grad_norm": 2.3748033727083997,
"learning_rate": 4.936276109889403e-06,
"loss": 0.6073,
"step": 183
},
{
"epoch": 0.15,
"grad_norm": 2.476334487382023,
"learning_rate": 4.935540479687534e-06,
"loss": 0.5793,
"step": 184
},
{
"epoch": 0.15,
"grad_norm": 2.2509466352322494,
"learning_rate": 4.934800683200143e-06,
"loss": 0.6133,
"step": 185
},
{
"epoch": 0.15,
"grad_norm": 2.8391697547684873,
"learning_rate": 4.934056721692742e-06,
"loss": 0.5967,
"step": 186
},
{
"epoch": 0.16,
"grad_norm": 2.4492364225391765,
"learning_rate": 4.933308596437965e-06,
"loss": 0.5676,
"step": 187
},
{
"epoch": 0.16,
"grad_norm": 2.685548141821295,
"learning_rate": 4.932556308715573e-06,
"loss": 0.6069,
"step": 188
},
{
"epoch": 0.16,
"grad_norm": 2.261217637824808,
"learning_rate": 4.931799859812443e-06,
"loss": 0.6411,
"step": 189
},
{
"epoch": 0.16,
"grad_norm": 2.3838284395200966,
"learning_rate": 4.931039251022573e-06,
"loss": 0.5745,
"step": 190
},
{
"epoch": 0.16,
"grad_norm": 2.2550921344466164,
"learning_rate": 4.930274483647074e-06,
"loss": 0.5989,
"step": 191
},
{
"epoch": 0.16,
"grad_norm": 2.078406234527636,
"learning_rate": 4.929505558994175e-06,
"loss": 0.5998,
"step": 192
},
{
"epoch": 0.16,
"grad_norm": 2.592864566091496,
"learning_rate": 4.928732478379214e-06,
"loss": 0.5842,
"step": 193
},
{
"epoch": 0.16,
"grad_norm": 2.092752299259724,
"learning_rate": 4.927955243124638e-06,
"loss": 0.5789,
"step": 194
},
{
"epoch": 0.16,
"grad_norm": 2.3799311595696966,
"learning_rate": 4.927173854560002e-06,
"loss": 0.6265,
"step": 195
},
{
"epoch": 0.16,
"grad_norm": 2.246876688010602,
"learning_rate": 4.926388314021964e-06,
"loss": 0.6126,
"step": 196
},
{
"epoch": 0.16,
"grad_norm": 2.1409898276704578,
"learning_rate": 4.925598622854287e-06,
"loss": 0.6073,
"step": 197
},
{
"epoch": 0.16,
"grad_norm": 2.5946158421875385,
"learning_rate": 4.924804782407834e-06,
"loss": 0.6154,
"step": 198
},
{
"epoch": 0.16,
"grad_norm": 2.1225494320427982,
"learning_rate": 4.924006794040562e-06,
"loss": 0.583,
"step": 199
},
{
"epoch": 0.17,
"grad_norm": 2.1971323526291338,
"learning_rate": 4.923204659117528e-06,
"loss": 0.6078,
"step": 200
},
{
"epoch": 0.17,
"grad_norm": 2.289185506404785,
"learning_rate": 4.92239837901088e-06,
"loss": 0.6127,
"step": 201
},
{
"epoch": 0.17,
"grad_norm": 2.0071007751625354,
"learning_rate": 4.921587955099858e-06,
"loss": 0.5804,
"step": 202
},
{
"epoch": 0.17,
"grad_norm": 2.2981840149068247,
"learning_rate": 4.920773388770789e-06,
"loss": 0.6027,
"step": 203
},
{
"epoch": 0.17,
"grad_norm": 2.236179116886702,
"learning_rate": 4.919954681417087e-06,
"loss": 0.6179,
"step": 204
},
{
"epoch": 0.17,
"grad_norm": 2.007422589251611,
"learning_rate": 4.91913183443925e-06,
"loss": 0.5647,
"step": 205
},
{
"epoch": 0.17,
"grad_norm": 2.1402813555735483,
"learning_rate": 4.918304849244857e-06,
"loss": 0.5841,
"step": 206
},
{
"epoch": 0.17,
"grad_norm": 2.0456415785177104,
"learning_rate": 4.917473727248565e-06,
"loss": 0.5524,
"step": 207
},
{
"epoch": 0.17,
"grad_norm": 1.9673558126020942,
"learning_rate": 4.916638469872109e-06,
"loss": 0.5698,
"step": 208
},
{
"epoch": 0.17,
"grad_norm": 2.015111672496819,
"learning_rate": 4.9157990785442964e-06,
"loss": 0.5957,
"step": 209
},
{
"epoch": 0.17,
"grad_norm": 1.9502065547578398,
"learning_rate": 4.9149555547010086e-06,
"loss": 0.5592,
"step": 210
},
{
"epoch": 0.17,
"grad_norm": 2.167936522558899,
"learning_rate": 4.9141078997851945e-06,
"loss": 0.5705,
"step": 211
},
{
"epoch": 0.18,
"grad_norm": 2.2066587458997935,
"learning_rate": 4.91325611524687e-06,
"loss": 0.5526,
"step": 212
},
{
"epoch": 0.18,
"grad_norm": 1.9132995625903553,
"learning_rate": 4.9124002025431136e-06,
"loss": 0.5767,
"step": 213
},
{
"epoch": 0.18,
"grad_norm": 2.0097281107801277,
"learning_rate": 4.91154016313807e-06,
"loss": 0.6185,
"step": 214
},
{
"epoch": 0.18,
"grad_norm": 2.023532008241332,
"learning_rate": 4.910675998502938e-06,
"loss": 0.6005,
"step": 215
},
{
"epoch": 0.18,
"grad_norm": 1.9253831001776973,
"learning_rate": 4.909807710115977e-06,
"loss": 0.5769,
"step": 216
},
{
"epoch": 0.18,
"grad_norm": 2.066862408842564,
"learning_rate": 4.908935299462497e-06,
"loss": 0.5671,
"step": 217
},
{
"epoch": 0.18,
"grad_norm": 1.9412704290792853,
"learning_rate": 4.908058768034862e-06,
"loss": 0.5568,
"step": 218
},
{
"epoch": 0.18,
"grad_norm": 2.185994457097553,
"learning_rate": 4.907178117332487e-06,
"loss": 0.5621,
"step": 219
},
{
"epoch": 0.18,
"grad_norm": 2.021517127546353,
"learning_rate": 4.906293348861829e-06,
"loss": 0.5672,
"step": 220
},
{
"epoch": 0.18,
"grad_norm": 2.099703967072734,
"learning_rate": 4.905404464136391e-06,
"loss": 0.5366,
"step": 221
},
{
"epoch": 0.18,
"grad_norm": 2.030197056583618,
"learning_rate": 4.904511464676718e-06,
"loss": 0.6064,
"step": 222
},
{
"epoch": 0.18,
"grad_norm": 2.4170102988954896,
"learning_rate": 4.903614352010393e-06,
"loss": 0.5919,
"step": 223
},
{
"epoch": 0.19,
"grad_norm": 2.0819468873015476,
"learning_rate": 4.9027131276720355e-06,
"loss": 0.5366,
"step": 224
},
{
"epoch": 0.19,
"grad_norm": 2.148008018153629,
"learning_rate": 4.901807793203299e-06,
"loss": 0.597,
"step": 225
},
{
"epoch": 0.19,
"grad_norm": 2.0303725862017186,
"learning_rate": 4.900898350152866e-06,
"loss": 0.6394,
"step": 226
},
{
"epoch": 0.19,
"grad_norm": 2.1598989214704334,
"learning_rate": 4.899984800076449e-06,
"loss": 0.5932,
"step": 227
},
{
"epoch": 0.19,
"grad_norm": 2.0816312637185255,
"learning_rate": 4.899067144536786e-06,
"loss": 0.5909,
"step": 228
},
{
"epoch": 0.19,
"grad_norm": 1.9024067197329315,
"learning_rate": 4.8981453851036365e-06,
"loss": 0.5463,
"step": 229
},
{
"epoch": 0.19,
"grad_norm": 2.1830926868871043,
"learning_rate": 4.897219523353781e-06,
"loss": 0.5821,
"step": 230
},
{
"epoch": 0.19,
"grad_norm": 2.1156269612794016,
"learning_rate": 4.8962895608710195e-06,
"loss": 0.5993,
"step": 231
},
{
"epoch": 0.19,
"grad_norm": 1.9653407654210864,
"learning_rate": 4.895355499246162e-06,
"loss": 0.5525,
"step": 232
},
{
"epoch": 0.19,
"grad_norm": 2.367769051061897,
"learning_rate": 4.894417340077036e-06,
"loss": 0.5683,
"step": 233
},
{
"epoch": 0.19,
"grad_norm": 2.078327064466567,
"learning_rate": 4.893475084968474e-06,
"loss": 0.6184,
"step": 234
},
{
"epoch": 0.19,
"grad_norm": 2.1661882731589475,
"learning_rate": 4.8925287355323195e-06,
"loss": 0.6321,
"step": 235
},
{
"epoch": 0.2,
"grad_norm": 2.182760952002799,
"learning_rate": 4.891578293387413e-06,
"loss": 0.6254,
"step": 236
},
{
"epoch": 0.2,
"grad_norm": 1.998723579962691,
"learning_rate": 4.890623760159605e-06,
"loss": 0.5371,
"step": 237
},
{
"epoch": 0.2,
"grad_norm": 2.319922346931926,
"learning_rate": 4.8896651374817365e-06,
"loss": 0.5941,
"step": 238
},
{
"epoch": 0.2,
"grad_norm": 2.090735197217999,
"learning_rate": 4.888702426993648e-06,
"loss": 0.577,
"step": 239
},
{
"epoch": 0.2,
"grad_norm": 2.1247199987228558,
"learning_rate": 4.887735630342173e-06,
"loss": 0.5928,
"step": 240
},
{
"epoch": 0.2,
"grad_norm": 2.33151114429804,
"learning_rate": 4.8867647491811315e-06,
"loss": 0.5838,
"step": 241
},
{
"epoch": 0.2,
"grad_norm": 2.1570026356289147,
"learning_rate": 4.885789785171334e-06,
"loss": 0.5642,
"step": 242
},
{
"epoch": 0.2,
"grad_norm": 2.049571197047368,
"learning_rate": 4.884810739980575e-06,
"loss": 0.6684,
"step": 243
},
{
"epoch": 0.2,
"grad_norm": 1.9810062424466381,
"learning_rate": 4.883827615283626e-06,
"loss": 0.5942,
"step": 244
},
{
"epoch": 0.2,
"grad_norm": 2.145869663660159,
"learning_rate": 4.882840412762244e-06,
"loss": 0.6356,
"step": 245
},
{
"epoch": 0.2,
"grad_norm": 2.19290302186514,
"learning_rate": 4.881849134105156e-06,
"loss": 0.6189,
"step": 246
},
{
"epoch": 0.2,
"grad_norm": 2.0561043419872984,
"learning_rate": 4.880853781008062e-06,
"loss": 0.5563,
"step": 247
},
{
"epoch": 0.21,
"grad_norm": 1.8831183793224635,
"learning_rate": 4.879854355173638e-06,
"loss": 0.5522,
"step": 248
},
{
"epoch": 0.21,
"grad_norm": 2.020981606684741,
"learning_rate": 4.878850858311518e-06,
"loss": 0.5548,
"step": 249
},
{
"epoch": 0.21,
"grad_norm": 2.060242570493272,
"learning_rate": 4.877843292138307e-06,
"loss": 0.5715,
"step": 250
},
{
"epoch": 0.21,
"grad_norm": 2.082455778933014,
"learning_rate": 4.8768316583775665e-06,
"loss": 0.5959,
"step": 251
},
{
"epoch": 0.21,
"grad_norm": 1.9830929719438626,
"learning_rate": 4.875815958759819e-06,
"loss": 0.5813,
"step": 252
},
{
"epoch": 0.21,
"grad_norm": 1.9772267506828567,
"learning_rate": 4.8747961950225406e-06,
"loss": 0.539,
"step": 253
},
{
"epoch": 0.21,
"grad_norm": 2.1492561995002104,
"learning_rate": 4.873772368910161e-06,
"loss": 0.6059,
"step": 254
},
{
"epoch": 0.21,
"grad_norm": 2.253757247139787,
"learning_rate": 4.872744482174058e-06,
"loss": 0.5897,
"step": 255
},
{
"epoch": 0.21,
"grad_norm": 2.3282624851882496,
"learning_rate": 4.8717125365725545e-06,
"loss": 0.5675,
"step": 256
},
{
"epoch": 0.21,
"grad_norm": 2.15573581133063,
"learning_rate": 4.8706765338709185e-06,
"loss": 0.5958,
"step": 257
},
{
"epoch": 0.21,
"grad_norm": 2.073289220218241,
"learning_rate": 4.869636475841358e-06,
"loss": 0.6052,
"step": 258
},
{
"epoch": 0.21,
"grad_norm": 2.293714090249444,
"learning_rate": 4.8685923642630165e-06,
"loss": 0.5786,
"step": 259
},
{
"epoch": 0.22,
"grad_norm": 1.9496544276539172,
"learning_rate": 4.867544200921974e-06,
"loss": 0.6163,
"step": 260
},
{
"epoch": 0.22,
"grad_norm": 2.5267016753690132,
"learning_rate": 4.866491987611239e-06,
"loss": 0.6223,
"step": 261
},
{
"epoch": 0.22,
"grad_norm": 1.8731249445320794,
"learning_rate": 4.865435726130751e-06,
"loss": 0.5632,
"step": 262
},
{
"epoch": 0.22,
"grad_norm": 2.3586331105798863,
"learning_rate": 4.86437541828737e-06,
"loss": 0.5769,
"step": 263
},
{
"epoch": 0.22,
"grad_norm": 2.0258106914510585,
"learning_rate": 4.863311065894883e-06,
"loss": 0.6103,
"step": 264
},
{
"epoch": 0.22,
"grad_norm": 2.2543614390885955,
"learning_rate": 4.862242670773991e-06,
"loss": 0.5844,
"step": 265
},
{
"epoch": 0.22,
"grad_norm": 1.9440299381244668,
"learning_rate": 4.861170234752314e-06,
"loss": 0.5559,
"step": 266
},
{
"epoch": 0.22,
"grad_norm": 2.254538268495492,
"learning_rate": 4.8600937596643815e-06,
"loss": 0.5709,
"step": 267
},
{
"epoch": 0.22,
"grad_norm": 2.007651746385687,
"learning_rate": 4.8590132473516346e-06,
"loss": 0.573,
"step": 268
},
{
"epoch": 0.22,
"grad_norm": 2.0735253118288837,
"learning_rate": 4.857928699662421e-06,
"loss": 0.5954,
"step": 269
},
{
"epoch": 0.22,
"grad_norm": 2.024775417101569,
"learning_rate": 4.856840118451989e-06,
"loss": 0.5992,
"step": 270
},
{
"epoch": 0.22,
"grad_norm": 2.1043310699945814,
"learning_rate": 4.855747505582488e-06,
"loss": 0.6507,
"step": 271
},
{
"epoch": 0.23,
"grad_norm": 2.0386353328313214,
"learning_rate": 4.854650862922965e-06,
"loss": 0.5666,
"step": 272
},
{
"epoch": 0.23,
"grad_norm": 1.978698841367705,
"learning_rate": 4.853550192349358e-06,
"loss": 0.5593,
"step": 273
},
{
"epoch": 0.23,
"grad_norm": 1.9386534247633986,
"learning_rate": 4.852445495744497e-06,
"loss": 0.5735,
"step": 274
},
{
"epoch": 0.23,
"grad_norm": 2.049346245018599,
"learning_rate": 4.8513367749981e-06,
"loss": 0.5415,
"step": 275
},
{
"epoch": 0.23,
"grad_norm": 2.1051969521216605,
"learning_rate": 4.850224032006765e-06,
"loss": 0.5532,
"step": 276
},
{
"epoch": 0.23,
"grad_norm": 2.2006792558872315,
"learning_rate": 4.849107268673975e-06,
"loss": 0.5696,
"step": 277
},
{
"epoch": 0.23,
"grad_norm": 2.0460787736353647,
"learning_rate": 4.847986486910088e-06,
"loss": 0.5658,
"step": 278
},
{
"epoch": 0.23,
"grad_norm": 2.1161843259225406,
"learning_rate": 4.846861688632336e-06,
"loss": 0.583,
"step": 279
},
{
"epoch": 0.23,
"grad_norm": 1.8882198480393542,
"learning_rate": 4.8457328757648224e-06,
"loss": 0.5693,
"step": 280
},
{
"epoch": 0.23,
"grad_norm": 2.1578413701109596,
"learning_rate": 4.844600050238517e-06,
"loss": 0.5409,
"step": 281
},
{
"epoch": 0.23,
"grad_norm": 2.03912467778954,
"learning_rate": 4.843463213991255e-06,
"loss": 0.5908,
"step": 282
},
{
"epoch": 0.23,
"grad_norm": 2.2333462480826247,
"learning_rate": 4.842322368967731e-06,
"loss": 0.6088,
"step": 283
},
{
"epoch": 0.24,
"grad_norm": 2.06698702157327,
"learning_rate": 4.8411775171194986e-06,
"loss": 0.5953,
"step": 284
},
{
"epoch": 0.24,
"grad_norm": 2.1433923121572045,
"learning_rate": 4.840028660404964e-06,
"loss": 0.5851,
"step": 285
},
{
"epoch": 0.24,
"grad_norm": 2.214858780835041,
"learning_rate": 4.838875800789386e-06,
"loss": 0.5913,
"step": 286
},
{
"epoch": 0.24,
"grad_norm": 2.038128612492624,
"learning_rate": 4.837718940244871e-06,
"loss": 0.5827,
"step": 287
},
{
"epoch": 0.24,
"grad_norm": 1.9894065096959768,
"learning_rate": 4.836558080750365e-06,
"loss": 0.5769,
"step": 288
},
{
"epoch": 0.24,
"grad_norm": 2.1711590153285822,
"learning_rate": 4.835393224291662e-06,
"loss": 0.654,
"step": 289
},
{
"epoch": 0.24,
"grad_norm": 2.105004451988696,
"learning_rate": 4.834224372861386e-06,
"loss": 0.6158,
"step": 290
},
{
"epoch": 0.24,
"grad_norm": 1.9554568023729102,
"learning_rate": 4.833051528459001e-06,
"loss": 0.5807,
"step": 291
},
{
"epoch": 0.24,
"grad_norm": 2.2693917834500312,
"learning_rate": 4.831874693090797e-06,
"loss": 0.5557,
"step": 292
},
{
"epoch": 0.24,
"grad_norm": 1.9081391627126192,
"learning_rate": 4.830693868769892e-06,
"loss": 0.6057,
"step": 293
},
{
"epoch": 0.24,
"grad_norm": 2.2133664110768585,
"learning_rate": 4.82950905751623e-06,
"loss": 0.6103,
"step": 294
},
{
"epoch": 0.24,
"grad_norm": 2.015392814211589,
"learning_rate": 4.8283202613565735e-06,
"loss": 0.5578,
"step": 295
},
{
"epoch": 0.25,
"grad_norm": 2.142124020349717,
"learning_rate": 4.8271274823245e-06,
"loss": 0.5675,
"step": 296
},
{
"epoch": 0.25,
"grad_norm": 1.981611826462286,
"learning_rate": 4.825930722460405e-06,
"loss": 0.5696,
"step": 297
},
{
"epoch": 0.25,
"grad_norm": 1.966759748348117,
"learning_rate": 4.824729983811486e-06,
"loss": 0.58,
"step": 298
},
{
"epoch": 0.25,
"grad_norm": 2.0117040369769397,
"learning_rate": 4.823525268431754e-06,
"loss": 0.6005,
"step": 299
},
{
"epoch": 0.25,
"grad_norm": 1.9579664917991193,
"learning_rate": 4.822316578382019e-06,
"loss": 0.5472,
"step": 300
},
{
"epoch": 0.25,
"grad_norm": 1.9075723479635032,
"learning_rate": 4.821103915729892e-06,
"loss": 0.5834,
"step": 301
},
{
"epoch": 0.25,
"grad_norm": 2.289340229011896,
"learning_rate": 4.819887282549777e-06,
"loss": 0.6088,
"step": 302
},
{
"epoch": 0.25,
"grad_norm": 2.0410700553735235,
"learning_rate": 4.818666680922874e-06,
"loss": 0.5449,
"step": 303
},
{
"epoch": 0.25,
"grad_norm": 2.074434792511819,
"learning_rate": 4.8174421129371675e-06,
"loss": 0.5826,
"step": 304
},
{
"epoch": 0.25,
"grad_norm": 2.1377170527698865,
"learning_rate": 4.816213580687428e-06,
"loss": 0.6262,
"step": 305
},
{
"epoch": 0.25,
"grad_norm": 2.060340839248083,
"learning_rate": 4.814981086275209e-06,
"loss": 0.5479,
"step": 306
},
{
"epoch": 0.25,
"grad_norm": 2.007036467413588,
"learning_rate": 4.813744631808841e-06,
"loss": 0.5642,
"step": 307
},
{
"epoch": 0.26,
"grad_norm": 2.016779606220332,
"learning_rate": 4.8125042194034285e-06,
"loss": 0.5503,
"step": 308
},
{
"epoch": 0.26,
"grad_norm": 1.930004252757651,
"learning_rate": 4.811259851180845e-06,
"loss": 0.582,
"step": 309
},
{
"epoch": 0.26,
"grad_norm": 1.9179477992752856,
"learning_rate": 4.810011529269734e-06,
"loss": 0.5678,
"step": 310
},
{
"epoch": 0.26,
"grad_norm": 2.023430757276848,
"learning_rate": 4.808759255805498e-06,
"loss": 0.614,
"step": 311
},
{
"epoch": 0.26,
"grad_norm": 1.8334738409404936,
"learning_rate": 4.807503032930306e-06,
"loss": 0.5742,
"step": 312
},
{
"epoch": 0.26,
"grad_norm": 1.937332706274502,
"learning_rate": 4.806242862793075e-06,
"loss": 0.6257,
"step": 313
},
{
"epoch": 0.26,
"grad_norm": 2.0265383045700363,
"learning_rate": 4.8049787475494786e-06,
"loss": 0.5733,
"step": 314
},
{
"epoch": 0.26,
"grad_norm": 2.056444039073761,
"learning_rate": 4.803710689361939e-06,
"loss": 0.578,
"step": 315
},
{
"epoch": 0.26,
"grad_norm": 2.411132719183335,
"learning_rate": 4.802438690399622e-06,
"loss": 0.5778,
"step": 316
},
{
"epoch": 0.26,
"grad_norm": 2.0233969242222853,
"learning_rate": 4.801162752838436e-06,
"loss": 0.5649,
"step": 317
},
{
"epoch": 0.26,
"grad_norm": 2.2809121915132815,
"learning_rate": 4.799882878861025e-06,
"loss": 0.5589,
"step": 318
},
{
"epoch": 0.26,
"grad_norm": 1.9806834041020271,
"learning_rate": 4.798599070656768e-06,
"loss": 0.5753,
"step": 319
},
{
"epoch": 0.27,
"grad_norm": 2.095099671577702,
"learning_rate": 4.797311330421773e-06,
"loss": 0.5644,
"step": 320
},
{
"epoch": 0.27,
"grad_norm": 2.1697606190375764,
"learning_rate": 4.796019660358877e-06,
"loss": 0.6009,
"step": 321
},
{
"epoch": 0.27,
"grad_norm": 1.9549416103216173,
"learning_rate": 4.794724062677635e-06,
"loss": 0.5429,
"step": 322
},
{
"epoch": 0.27,
"grad_norm": 1.9986949357292838,
"learning_rate": 4.793424539594323e-06,
"loss": 0.5456,
"step": 323
},
{
"epoch": 0.27,
"grad_norm": 1.9414831957796765,
"learning_rate": 4.792121093331935e-06,
"loss": 0.5468,
"step": 324
},
{
"epoch": 0.27,
"grad_norm": 2.100702188933012,
"learning_rate": 4.7908137261201685e-06,
"loss": 0.5763,
"step": 325
},
{
"epoch": 0.27,
"grad_norm": 2.2747471285831025,
"learning_rate": 4.789502440195436e-06,
"loss": 0.5637,
"step": 326
},
{
"epoch": 0.27,
"grad_norm": 1.8996382919319124,
"learning_rate": 4.788187237800849e-06,
"loss": 0.5285,
"step": 327
},
{
"epoch": 0.27,
"grad_norm": 2.3451495174978847,
"learning_rate": 4.786868121186218e-06,
"loss": 0.5638,
"step": 328
},
{
"epoch": 0.27,
"grad_norm": 2.0437536068229565,
"learning_rate": 4.7855450926080535e-06,
"loss": 0.5282,
"step": 329
},
{
"epoch": 0.27,
"grad_norm": 2.1185488514745554,
"learning_rate": 4.784218154329555e-06,
"loss": 0.5689,
"step": 330
},
{
"epoch": 0.27,
"grad_norm": 2.08745956731504,
"learning_rate": 4.78288730862061e-06,
"loss": 0.5772,
"step": 331
},
{
"epoch": 0.28,
"grad_norm": 1.9479507156354359,
"learning_rate": 4.781552557757789e-06,
"loss": 0.5419,
"step": 332
},
{
"epoch": 0.28,
"grad_norm": 2.0211480847937255,
"learning_rate": 4.780213904024346e-06,
"loss": 0.5757,
"step": 333
},
{
"epoch": 0.28,
"grad_norm": 1.9075335749936069,
"learning_rate": 4.7788713497102094e-06,
"loss": 0.5693,
"step": 334
},
{
"epoch": 0.28,
"grad_norm": 1.9590727137410602,
"learning_rate": 4.777524897111979e-06,
"loss": 0.5501,
"step": 335
},
{
"epoch": 0.28,
"grad_norm": 2.0328480247612752,
"learning_rate": 4.776174548532926e-06,
"loss": 0.587,
"step": 336
},
{
"epoch": 0.28,
"grad_norm": 2.062540517496736,
"learning_rate": 4.774820306282982e-06,
"loss": 0.5819,
"step": 337
},
{
"epoch": 0.28,
"grad_norm": 2.0054452800156195,
"learning_rate": 4.773462172678744e-06,
"loss": 0.5529,
"step": 338
},
{
"epoch": 0.28,
"grad_norm": 1.9641125644599562,
"learning_rate": 4.772100150043462e-06,
"loss": 0.5895,
"step": 339
},
{
"epoch": 0.28,
"grad_norm": 1.9196744569285298,
"learning_rate": 4.77073424070704e-06,
"loss": 0.5504,
"step": 340
},
{
"epoch": 0.28,
"grad_norm": 2.0002752186146484,
"learning_rate": 4.76936444700603e-06,
"loss": 0.5307,
"step": 341
},
{
"epoch": 0.28,
"grad_norm": 2.1068919823054344,
"learning_rate": 4.76799077128363e-06,
"loss": 0.5908,
"step": 342
},
{
"epoch": 0.28,
"grad_norm": 1.919597745459612,
"learning_rate": 4.766613215889678e-06,
"loss": 0.5423,
"step": 343
},
{
"epoch": 0.29,
"grad_norm": 2.0670928578728716,
"learning_rate": 4.765231783180648e-06,
"loss": 0.5901,
"step": 344
},
{
"epoch": 0.29,
"grad_norm": 1.906116148793229,
"learning_rate": 4.763846475519648e-06,
"loss": 0.5919,
"step": 345
},
{
"epoch": 0.29,
"grad_norm": 1.9133575268702454,
"learning_rate": 4.762457295276413e-06,
"loss": 0.585,
"step": 346
},
{
"epoch": 0.29,
"grad_norm": 2.133902651855379,
"learning_rate": 4.7610642448273025e-06,
"loss": 0.5444,
"step": 347
},
{
"epoch": 0.29,
"grad_norm": 1.95222194640397,
"learning_rate": 4.7596673265552985e-06,
"loss": 0.5941,
"step": 348
},
{
"epoch": 0.29,
"grad_norm": 2.095010268380277,
"learning_rate": 4.758266542849997e-06,
"loss": 0.6045,
"step": 349
},
{
"epoch": 0.29,
"grad_norm": 2.0493864712059655,
"learning_rate": 4.756861896107609e-06,
"loss": 0.6011,
"step": 350
},
{
"epoch": 0.29,
"grad_norm": 1.9222198823064967,
"learning_rate": 4.755453388730949e-06,
"loss": 0.5521,
"step": 351
},
{
"epoch": 0.29,
"grad_norm": 2.368147154955994,
"learning_rate": 4.754041023129442e-06,
"loss": 0.6117,
"step": 352
},
{
"epoch": 0.29,
"grad_norm": 1.9734596786106697,
"learning_rate": 4.752624801719108e-06,
"loss": 0.5727,
"step": 353
},
{
"epoch": 0.29,
"grad_norm": 2.151510566977991,
"learning_rate": 4.751204726922564e-06,
"loss": 0.6085,
"step": 354
},
{
"epoch": 0.29,
"grad_norm": 1.9291219072892685,
"learning_rate": 4.74978080116902e-06,
"loss": 0.5655,
"step": 355
},
{
"epoch": 0.3,
"grad_norm": 1.838592559018919,
"learning_rate": 4.748353026894273e-06,
"loss": 0.5508,
"step": 356
},
{
"epoch": 0.3,
"grad_norm": 2.069156589116884,
"learning_rate": 4.7469214065407e-06,
"loss": 0.5942,
"step": 357
},
{
"epoch": 0.3,
"grad_norm": 1.8960817746615841,
"learning_rate": 4.745485942557264e-06,
"loss": 0.5902,
"step": 358
},
{
"epoch": 0.3,
"grad_norm": 2.0606557307859634,
"learning_rate": 4.744046637399497e-06,
"loss": 0.556,
"step": 359
},
{
"epoch": 0.3,
"grad_norm": 1.9660065879130573,
"learning_rate": 4.742603493529505e-06,
"loss": 0.5364,
"step": 360
},
{
"epoch": 0.3,
"grad_norm": 1.9647921383638112,
"learning_rate": 4.741156513415958e-06,
"loss": 0.5601,
"step": 361
},
{
"epoch": 0.3,
"grad_norm": 2.049074688423064,
"learning_rate": 4.739705699534092e-06,
"loss": 0.556,
"step": 362
},
{
"epoch": 0.3,
"grad_norm": 1.962593945802751,
"learning_rate": 4.738251054365697e-06,
"loss": 0.5609,
"step": 363
},
{
"epoch": 0.3,
"grad_norm": 2.059675349950347,
"learning_rate": 4.736792580399119e-06,
"loss": 0.5499,
"step": 364
},
{
"epoch": 0.3,
"grad_norm": 1.8479566025134508,
"learning_rate": 4.7353302801292555e-06,
"loss": 0.5621,
"step": 365
},
{
"epoch": 0.3,
"grad_norm": 1.9405450724813613,
"learning_rate": 4.733864156057545e-06,
"loss": 0.5437,
"step": 366
},
{
"epoch": 0.3,
"grad_norm": 2.122487864033456,
"learning_rate": 4.7323942106919715e-06,
"loss": 0.5984,
"step": 367
},
{
"epoch": 0.31,
"grad_norm": 2.6822841144123046,
"learning_rate": 4.730920446547052e-06,
"loss": 0.5951,
"step": 368
},
{
"epoch": 0.31,
"grad_norm": 2.001405394086718,
"learning_rate": 4.729442866143838e-06,
"loss": 0.5552,
"step": 369
},
{
"epoch": 0.31,
"grad_norm": 2.081154186949651,
"learning_rate": 4.72796147200991e-06,
"loss": 0.587,
"step": 370
},
{
"epoch": 0.31,
"grad_norm": 2.1196544292473236,
"learning_rate": 4.72647626667937e-06,
"loss": 0.5882,
"step": 371
},
{
"epoch": 0.31,
"grad_norm": 2.107445583509131,
"learning_rate": 4.724987252692841e-06,
"loss": 0.5389,
"step": 372
},
{
"epoch": 0.31,
"grad_norm": 1.9529785007256542,
"learning_rate": 4.723494432597462e-06,
"loss": 0.6439,
"step": 373
},
{
"epoch": 0.31,
"grad_norm": 2.11513441515607,
"learning_rate": 4.72199780894688e-06,
"loss": 0.6089,
"step": 374
},
{
"epoch": 0.31,
"grad_norm": 1.9769899713721226,
"learning_rate": 4.7204973843012504e-06,
"loss": 0.5393,
"step": 375
},
{
"epoch": 0.31,
"grad_norm": 2.063749623036316,
"learning_rate": 4.718993161227231e-06,
"loss": 0.5987,
"step": 376
},
{
"epoch": 0.31,
"grad_norm": 2.0515862288253883,
"learning_rate": 4.717485142297977e-06,
"loss": 0.5772,
"step": 377
},
{
"epoch": 0.31,
"grad_norm": 1.8962297741946081,
"learning_rate": 4.715973330093135e-06,
"loss": 0.5424,
"step": 378
},
{
"epoch": 0.31,
"grad_norm": 2.2210958340400087,
"learning_rate": 4.7144577271988435e-06,
"loss": 0.6072,
"step": 379
},
{
"epoch": 0.32,
"grad_norm": 2.067113337475314,
"learning_rate": 4.712938336207724e-06,
"loss": 0.5482,
"step": 380
},
{
"epoch": 0.32,
"grad_norm": 1.8985489253954526,
"learning_rate": 4.711415159718876e-06,
"loss": 0.5593,
"step": 381
},
{
"epoch": 0.32,
"grad_norm": 2.085236381118245,
"learning_rate": 4.709888200337879e-06,
"loss": 0.5704,
"step": 382
},
{
"epoch": 0.32,
"grad_norm": 2.0967664183909784,
"learning_rate": 4.708357460676779e-06,
"loss": 0.5997,
"step": 383
},
{
"epoch": 0.32,
"grad_norm": 2.0454278026009645,
"learning_rate": 4.706822943354092e-06,
"loss": 0.5669,
"step": 384
},
{
"epoch": 0.32,
"grad_norm": 1.9171673309342674,
"learning_rate": 4.705284650994793e-06,
"loss": 0.517,
"step": 385
},
{
"epoch": 0.32,
"grad_norm": 2.2003223432761287,
"learning_rate": 4.70374258623032e-06,
"loss": 0.5957,
"step": 386
},
{
"epoch": 0.32,
"grad_norm": 1.936392519491186,
"learning_rate": 4.702196751698557e-06,
"loss": 0.5767,
"step": 387
},
{
"epoch": 0.32,
"grad_norm": 2.354272003403086,
"learning_rate": 4.700647150043841e-06,
"loss": 0.6515,
"step": 388
},
{
"epoch": 0.32,
"grad_norm": 1.9115059027323418,
"learning_rate": 4.699093783916955e-06,
"loss": 0.5579,
"step": 389
},
{
"epoch": 0.32,
"grad_norm": 1.9878827587010002,
"learning_rate": 4.697536655975115e-06,
"loss": 0.572,
"step": 390
},
{
"epoch": 0.32,
"grad_norm": 1.9729552535473858,
"learning_rate": 4.69597576888198e-06,
"loss": 0.5665,
"step": 391
},
{
"epoch": 0.32,
"grad_norm": 2.177634366499155,
"learning_rate": 4.694411125307632e-06,
"loss": 0.6363,
"step": 392
},
{
"epoch": 0.33,
"grad_norm": 1.8955146664976508,
"learning_rate": 4.692842727928584e-06,
"loss": 0.5682,
"step": 393
},
{
"epoch": 0.33,
"grad_norm": 2.175305874476245,
"learning_rate": 4.691270579427769e-06,
"loss": 0.5943,
"step": 394
},
{
"epoch": 0.33,
"grad_norm": 2.068140527232831,
"learning_rate": 4.689694682494537e-06,
"loss": 0.5659,
"step": 395
},
{
"epoch": 0.33,
"grad_norm": 1.9112960694448755,
"learning_rate": 4.688115039824648e-06,
"loss": 0.6048,
"step": 396
},
{
"epoch": 0.33,
"grad_norm": 1.9778305624626604,
"learning_rate": 4.686531654120272e-06,
"loss": 0.5695,
"step": 397
},
{
"epoch": 0.33,
"grad_norm": 2.096904163204813,
"learning_rate": 4.684944528089981e-06,
"loss": 0.6113,
"step": 398
},
{
"epoch": 0.33,
"grad_norm": 2.0011934144948516,
"learning_rate": 4.683353664448745e-06,
"loss": 0.5568,
"step": 399
},
{
"epoch": 0.33,
"grad_norm": 1.8562851971757464,
"learning_rate": 4.681759065917929e-06,
"loss": 0.5474,
"step": 400
},
{
"epoch": 0.33,
"grad_norm": 1.8190547574166316,
"learning_rate": 4.680160735225285e-06,
"loss": 0.5315,
"step": 401
},
{
"epoch": 0.33,
"grad_norm": 1.9247862956929132,
"learning_rate": 4.6785586751049505e-06,
"loss": 0.5568,
"step": 402
},
{
"epoch": 0.33,
"grad_norm": 1.8469793674077621,
"learning_rate": 4.676952888297442e-06,
"loss": 0.5811,
"step": 403
},
{
"epoch": 0.33,
"grad_norm": 1.946943145198674,
"learning_rate": 4.675343377549653e-06,
"loss": 0.5475,
"step": 404
},
{
"epoch": 0.34,
"grad_norm": 1.991304422730463,
"learning_rate": 4.6737301456148445e-06,
"loss": 0.5856,
"step": 405
},
{
"epoch": 0.34,
"grad_norm": 1.9168241989446437,
"learning_rate": 4.672113195252644e-06,
"loss": 0.6069,
"step": 406
},
{
"epoch": 0.34,
"grad_norm": 1.9305433665377905,
"learning_rate": 4.670492529229039e-06,
"loss": 0.5536,
"step": 407
},
{
"epoch": 0.34,
"grad_norm": 1.8441008898830742,
"learning_rate": 4.668868150316377e-06,
"loss": 0.5859,
"step": 408
},
{
"epoch": 0.34,
"grad_norm": 1.8879301596961315,
"learning_rate": 4.667240061293351e-06,
"loss": 0.5483,
"step": 409
},
{
"epoch": 0.34,
"grad_norm": 2.024767417636281,
"learning_rate": 4.665608264945004e-06,
"loss": 0.5414,
"step": 410
},
{
"epoch": 0.34,
"grad_norm": 2.1331610141797395,
"learning_rate": 4.663972764062722e-06,
"loss": 0.5811,
"step": 411
},
{
"epoch": 0.34,
"grad_norm": 1.8132480265817386,
"learning_rate": 4.662333561444226e-06,
"loss": 0.5573,
"step": 412
},
{
"epoch": 0.34,
"grad_norm": 1.9795813972027145,
"learning_rate": 4.6606906598935675e-06,
"loss": 0.5814,
"step": 413
},
{
"epoch": 0.34,
"grad_norm": 1.8782931074297053,
"learning_rate": 4.6590440622211295e-06,
"loss": 0.569,
"step": 414
},
{
"epoch": 0.34,
"grad_norm": 1.8219945335518706,
"learning_rate": 4.657393771243614e-06,
"loss": 0.5669,
"step": 415
},
{
"epoch": 0.34,
"grad_norm": 2.4047268604371306,
"learning_rate": 4.6557397897840454e-06,
"loss": 0.5602,
"step": 416
},
{
"epoch": 0.35,
"grad_norm": 2.064501780523946,
"learning_rate": 4.654082120671757e-06,
"loss": 0.5699,
"step": 417
},
{
"epoch": 0.35,
"grad_norm": 1.9183128854940252,
"learning_rate": 4.65242076674239e-06,
"loss": 0.6112,
"step": 418
},
{
"epoch": 0.35,
"grad_norm": 1.9315698971629633,
"learning_rate": 4.650755730837894e-06,
"loss": 0.5537,
"step": 419
},
{
"epoch": 0.35,
"grad_norm": 1.9527809333659218,
"learning_rate": 4.649087015806509e-06,
"loss": 0.5423,
"step": 420
},
{
"epoch": 0.35,
"grad_norm": 1.8940523915995442,
"learning_rate": 4.647414624502777e-06,
"loss": 0.5708,
"step": 421
},
{
"epoch": 0.35,
"grad_norm": 1.9976964785548623,
"learning_rate": 4.645738559787524e-06,
"loss": 0.6006,
"step": 422
},
{
"epoch": 0.35,
"grad_norm": 1.9098681403283917,
"learning_rate": 4.64405882452786e-06,
"loss": 0.5591,
"step": 423
},
{
"epoch": 0.35,
"grad_norm": 1.8695612182804557,
"learning_rate": 4.642375421597175e-06,
"loss": 0.5219,
"step": 424
},
{
"epoch": 0.35,
"grad_norm": 1.8912077704810082,
"learning_rate": 4.6406883538751315e-06,
"loss": 0.5224,
"step": 425
},
{
"epoch": 0.35,
"grad_norm": 1.9390714726978922,
"learning_rate": 4.638997624247664e-06,
"loss": 0.5359,
"step": 426
},
{
"epoch": 0.35,
"grad_norm": 2.051545992296337,
"learning_rate": 4.637303235606968e-06,
"loss": 0.544,
"step": 427
},
{
"epoch": 0.35,
"grad_norm": 2.0657109136265914,
"learning_rate": 4.6356051908515e-06,
"loss": 0.5429,
"step": 428
},
{
"epoch": 0.36,
"grad_norm": 2.0301022307984793,
"learning_rate": 4.63390349288597e-06,
"loss": 0.5787,
"step": 429
},
{
"epoch": 0.36,
"grad_norm": 2.052515756169346,
"learning_rate": 4.632198144621338e-06,
"loss": 0.5778,
"step": 430
},
{
"epoch": 0.36,
"grad_norm": 1.9741370495474897,
"learning_rate": 4.630489148974807e-06,
"loss": 0.5142,
"step": 431
},
{
"epoch": 0.36,
"grad_norm": 1.9713229498863698,
"learning_rate": 4.62877650886982e-06,
"loss": 0.6127,
"step": 432
},
{
"epoch": 0.36,
"grad_norm": 2.1609440121306007,
"learning_rate": 4.627060227236055e-06,
"loss": 0.5886,
"step": 433
},
{
"epoch": 0.36,
"grad_norm": 1.944966445355139,
"learning_rate": 4.625340307009418e-06,
"loss": 0.5657,
"step": 434
},
{
"epoch": 0.36,
"grad_norm": 2.031003925680835,
"learning_rate": 4.623616751132041e-06,
"loss": 0.5628,
"step": 435
},
{
"epoch": 0.36,
"grad_norm": 1.8774113373137704,
"learning_rate": 4.621889562552272e-06,
"loss": 0.6068,
"step": 436
},
{
"epoch": 0.36,
"grad_norm": 2.0385201543401785,
"learning_rate": 4.620158744224677e-06,
"loss": 0.5511,
"step": 437
},
{
"epoch": 0.36,
"grad_norm": 1.8440750841938207,
"learning_rate": 4.618424299110028e-06,
"loss": 0.5261,
"step": 438
},
{
"epoch": 0.36,
"grad_norm": 1.8978691755923442,
"learning_rate": 4.616686230175303e-06,
"loss": 0.5862,
"step": 439
},
{
"epoch": 0.36,
"grad_norm": 1.8120850246861446,
"learning_rate": 4.614944540393679e-06,
"loss": 0.5652,
"step": 440
},
{
"epoch": 0.37,
"grad_norm": 2.1821084695714914,
"learning_rate": 4.613199232744525e-06,
"loss": 0.5598,
"step": 441
},
{
"epoch": 0.37,
"grad_norm": 1.9626422737625222,
"learning_rate": 4.611450310213401e-06,
"loss": 0.5267,
"step": 442
},
{
"epoch": 0.37,
"grad_norm": 1.9714913234889215,
"learning_rate": 4.6096977757920505e-06,
"loss": 0.5658,
"step": 443
},
{
"epoch": 0.37,
"grad_norm": 2.0179324078198233,
"learning_rate": 4.607941632478393e-06,
"loss": 0.582,
"step": 444
},
{
"epoch": 0.37,
"grad_norm": 1.8565193856331161,
"learning_rate": 4.6061818832765246e-06,
"loss": 0.5715,
"step": 445
},
{
"epoch": 0.37,
"grad_norm": 1.9798501479599246,
"learning_rate": 4.604418531196708e-06,
"loss": 0.6007,
"step": 446
},
{
"epoch": 0.37,
"grad_norm": 2.0095846956468257,
"learning_rate": 4.602651579255369e-06,
"loss": 0.5947,
"step": 447
},
{
"epoch": 0.37,
"grad_norm": 1.9316541079988245,
"learning_rate": 4.600881030475093e-06,
"loss": 0.5501,
"step": 448
},
{
"epoch": 0.37,
"grad_norm": 2.080069353365406,
"learning_rate": 4.599106887884616e-06,
"loss": 0.5631,
"step": 449
},
{
"epoch": 0.37,
"grad_norm": 1.965973137652201,
"learning_rate": 4.5973291545188235e-06,
"loss": 0.5267,
"step": 450
},
{
"epoch": 0.37,
"grad_norm": 2.1082225966704087,
"learning_rate": 4.595547833418741e-06,
"loss": 0.6418,
"step": 451
},
{
"epoch": 0.37,
"grad_norm": 2.0359312594194083,
"learning_rate": 4.593762927631536e-06,
"loss": 0.5644,
"step": 452
},
{
"epoch": 0.38,
"grad_norm": 2.1254892914109433,
"learning_rate": 4.591974440210502e-06,
"loss": 0.5693,
"step": 453
},
{
"epoch": 0.38,
"grad_norm": 1.9121188587334927,
"learning_rate": 4.590182374215064e-06,
"loss": 0.5572,
"step": 454
},
{
"epoch": 0.38,
"grad_norm": 1.9348642624953207,
"learning_rate": 4.588386732710765e-06,
"loss": 0.5446,
"step": 455
},
{
"epoch": 0.38,
"grad_norm": 1.8667846547370581,
"learning_rate": 4.5865875187692695e-06,
"loss": 0.5681,
"step": 456
},
{
"epoch": 0.38,
"grad_norm": 1.9219061327454674,
"learning_rate": 4.5847847354683465e-06,
"loss": 0.5508,
"step": 457
},
{
"epoch": 0.38,
"grad_norm": 1.8106132369123122,
"learning_rate": 4.5829783858918756e-06,
"loss": 0.5626,
"step": 458
},
{
"epoch": 0.38,
"grad_norm": 1.7827483964442634,
"learning_rate": 4.5811684731298355e-06,
"loss": 0.5575,
"step": 459
},
{
"epoch": 0.38,
"grad_norm": 1.9284196979863513,
"learning_rate": 4.5793550002783e-06,
"loss": 0.5363,
"step": 460
},
{
"epoch": 0.38,
"grad_norm": 2.029647468705457,
"learning_rate": 4.577537970439433e-06,
"loss": 0.5415,
"step": 461
},
{
"epoch": 0.38,
"grad_norm": 2.0997127029950087,
"learning_rate": 4.575717386721482e-06,
"loss": 0.5814,
"step": 462
},
{
"epoch": 0.38,
"grad_norm": 1.9589290300656341,
"learning_rate": 4.573893252238777e-06,
"loss": 0.5156,
"step": 463
},
{
"epoch": 0.38,
"grad_norm": 1.905237143908251,
"learning_rate": 4.572065570111717e-06,
"loss": 0.5536,
"step": 464
},
{
"epoch": 0.39,
"grad_norm": 1.929519794935609,
"learning_rate": 4.570234343466775e-06,
"loss": 0.5879,
"step": 465
},
{
"epoch": 0.39,
"grad_norm": 2.096095808886982,
"learning_rate": 4.568399575436484e-06,
"loss": 0.6241,
"step": 466
},
{
"epoch": 0.39,
"grad_norm": 1.9486118894048778,
"learning_rate": 4.566561269159437e-06,
"loss": 0.6307,
"step": 467
},
{
"epoch": 0.39,
"grad_norm": 2.0839490306744586,
"learning_rate": 4.564719427780276e-06,
"loss": 0.5655,
"step": 468
},
{
"epoch": 0.39,
"grad_norm": 1.9439525665822102,
"learning_rate": 4.562874054449694e-06,
"loss": 0.5437,
"step": 469
},
{
"epoch": 0.39,
"grad_norm": 1.9409142791465297,
"learning_rate": 4.5610251523244244e-06,
"loss": 0.6429,
"step": 470
},
{
"epoch": 0.39,
"grad_norm": 1.8664574493795525,
"learning_rate": 4.559172724567238e-06,
"loss": 0.5826,
"step": 471
},
{
"epoch": 0.39,
"grad_norm": 1.80819349503324,
"learning_rate": 4.557316774346934e-06,
"loss": 0.5372,
"step": 472
},
{
"epoch": 0.39,
"grad_norm": 1.8680097526865296,
"learning_rate": 4.555457304838341e-06,
"loss": 0.5503,
"step": 473
},
{
"epoch": 0.39,
"grad_norm": 1.7466938790815696,
"learning_rate": 4.553594319222303e-06,
"loss": 0.5425,
"step": 474
},
{
"epoch": 0.39,
"grad_norm": 1.9610557658505607,
"learning_rate": 4.551727820685684e-06,
"loss": 0.5755,
"step": 475
},
{
"epoch": 0.39,
"grad_norm": 1.9414839604282412,
"learning_rate": 4.549857812421353e-06,
"loss": 0.5915,
"step": 476
},
{
"epoch": 0.4,
"grad_norm": 1.8484957644576423,
"learning_rate": 4.547984297628186e-06,
"loss": 0.5676,
"step": 477
},
{
"epoch": 0.4,
"grad_norm": 2.074524028551078,
"learning_rate": 4.546107279511055e-06,
"loss": 0.6084,
"step": 478
},
{
"epoch": 0.4,
"grad_norm": 2.069692704122282,
"learning_rate": 4.544226761280826e-06,
"loss": 0.5676,
"step": 479
},
{
"epoch": 0.4,
"grad_norm": 1.8975472248317244,
"learning_rate": 4.54234274615435e-06,
"loss": 0.5904,
"step": 480
},
{
"epoch": 0.4,
"grad_norm": 2.0118868982719897,
"learning_rate": 4.540455237354466e-06,
"loss": 0.5722,
"step": 481
},
{
"epoch": 0.4,
"grad_norm": 1.9733105429381828,
"learning_rate": 4.5385642381099814e-06,
"loss": 0.6112,
"step": 482
},
{
"epoch": 0.4,
"grad_norm": 1.862156914026863,
"learning_rate": 4.53666975165568e-06,
"loss": 0.5951,
"step": 483
},
{
"epoch": 0.4,
"grad_norm": 1.9512940035297868,
"learning_rate": 4.53477178123231e-06,
"loss": 0.5223,
"step": 484
},
{
"epoch": 0.4,
"grad_norm": 1.9202464191558823,
"learning_rate": 4.532870330086577e-06,
"loss": 0.5638,
"step": 485
},
{
"epoch": 0.4,
"grad_norm": 1.9015767656854419,
"learning_rate": 4.530965401471143e-06,
"loss": 0.5911,
"step": 486
},
{
"epoch": 0.4,
"grad_norm": 1.95190921973106,
"learning_rate": 4.529056998644619e-06,
"loss": 0.6053,
"step": 487
},
{
"epoch": 0.4,
"grad_norm": 2.0058459596081644,
"learning_rate": 4.527145124871556e-06,
"loss": 0.5466,
"step": 488
},
{
"epoch": 0.41,
"grad_norm": 1.8902620959998047,
"learning_rate": 4.5252297834224454e-06,
"loss": 0.5526,
"step": 489
},
{
"epoch": 0.41,
"grad_norm": 1.985466416169018,
"learning_rate": 4.523310977573711e-06,
"loss": 0.5958,
"step": 490
},
{
"epoch": 0.41,
"grad_norm": 2.1140148957176415,
"learning_rate": 4.521388710607699e-06,
"loss": 0.613,
"step": 491
},
{
"epoch": 0.41,
"grad_norm": 1.9470601192089525,
"learning_rate": 4.51946298581268e-06,
"loss": 0.5847,
"step": 492
},
{
"epoch": 0.41,
"grad_norm": 2.0227057176069603,
"learning_rate": 4.51753380648284e-06,
"loss": 0.5784,
"step": 493
},
{
"epoch": 0.41,
"grad_norm": 2.05501863673554,
"learning_rate": 4.515601175918269e-06,
"loss": 0.5501,
"step": 494
},
{
"epoch": 0.41,
"grad_norm": 2.0129325402811715,
"learning_rate": 4.513665097424967e-06,
"loss": 0.5641,
"step": 495
},
{
"epoch": 0.41,
"grad_norm": 2.0322333044110468,
"learning_rate": 4.51172557431483e-06,
"loss": 0.5422,
"step": 496
},
{
"epoch": 0.41,
"grad_norm": 1.9573055659958774,
"learning_rate": 4.509782609905644e-06,
"loss": 0.516,
"step": 497
},
{
"epoch": 0.41,
"grad_norm": 1.8223127451485421,
"learning_rate": 4.507836207521085e-06,
"loss": 0.5714,
"step": 498
},
{
"epoch": 0.41,
"grad_norm": 1.9343089861079434,
"learning_rate": 4.50588637049071e-06,
"loss": 0.5424,
"step": 499
},
{
"epoch": 0.41,
"grad_norm": 1.8940990649350729,
"learning_rate": 4.503933102149948e-06,
"loss": 0.5832,
"step": 500
},
{
"epoch": 0.42,
"grad_norm": 1.908617301933682,
"learning_rate": 4.501976405840101e-06,
"loss": 0.5399,
"step": 501
},
{
"epoch": 0.42,
"grad_norm": 1.8290259512093785,
"learning_rate": 4.500016284908334e-06,
"loss": 0.5561,
"step": 502
},
{
"epoch": 0.42,
"grad_norm": 1.9840280991844164,
"learning_rate": 4.49805274270767e-06,
"loss": 0.5645,
"step": 503
},
{
"epoch": 0.42,
"grad_norm": 1.9864953051636856,
"learning_rate": 4.496085782596984e-06,
"loss": 0.5369,
"step": 504
},
{
"epoch": 0.42,
"grad_norm": 1.979387839103732,
"learning_rate": 4.494115407940999e-06,
"loss": 0.6196,
"step": 505
},
{
"epoch": 0.42,
"grad_norm": 1.9266869362165981,
"learning_rate": 4.492141622110279e-06,
"loss": 0.5687,
"step": 506
},
{
"epoch": 0.42,
"grad_norm": 1.9887461782376619,
"learning_rate": 4.4901644284812205e-06,
"loss": 0.5264,
"step": 507
},
{
"epoch": 0.42,
"grad_norm": 1.8717867803152208,
"learning_rate": 4.488183830436052e-06,
"loss": 0.5612,
"step": 508
},
{
"epoch": 0.42,
"grad_norm": 2.0044226171493,
"learning_rate": 4.486199831362828e-06,
"loss": 0.5571,
"step": 509
},
{
"epoch": 0.42,
"grad_norm": 2.1075571016617958,
"learning_rate": 4.484212434655414e-06,
"loss": 0.5642,
"step": 510
},
{
"epoch": 0.42,
"grad_norm": 1.8031612547539957,
"learning_rate": 4.482221643713494e-06,
"loss": 0.5805,
"step": 511
},
{
"epoch": 0.42,
"grad_norm": 1.8782516337672304,
"learning_rate": 4.480227461942556e-06,
"loss": 0.5596,
"step": 512
},
{
"epoch": 0.43,
"grad_norm": 2.075073901596185,
"learning_rate": 4.478229892753886e-06,
"loss": 0.6124,
"step": 513
},
{
"epoch": 0.43,
"grad_norm": 2.0588983460568304,
"learning_rate": 4.47622893956457e-06,
"loss": 0.5589,
"step": 514
},
{
"epoch": 0.43,
"grad_norm": 1.850248236464706,
"learning_rate": 4.474224605797476e-06,
"loss": 0.5603,
"step": 515
},
{
"epoch": 0.43,
"grad_norm": 1.932844310652863,
"learning_rate": 4.472216894881261e-06,
"loss": 0.5571,
"step": 516
},
{
"epoch": 0.43,
"grad_norm": 2.09975454805468,
"learning_rate": 4.470205810250357e-06,
"loss": 0.5975,
"step": 517
},
{
"epoch": 0.43,
"grad_norm": 1.9694087093010304,
"learning_rate": 4.468191355344965e-06,
"loss": 0.5698,
"step": 518
},
{
"epoch": 0.43,
"grad_norm": 1.8794788153917539,
"learning_rate": 4.466173533611053e-06,
"loss": 0.5559,
"step": 519
},
{
"epoch": 0.43,
"grad_norm": 2.0650455557855434,
"learning_rate": 4.46415234850035e-06,
"loss": 0.5644,
"step": 520
},
{
"epoch": 0.43,
"grad_norm": 2.0062649027982022,
"learning_rate": 4.462127803470334e-06,
"loss": 0.608,
"step": 521
},
{
"epoch": 0.43,
"grad_norm": 2.043267877462657,
"learning_rate": 4.460099901984235e-06,
"loss": 0.573,
"step": 522
},
{
"epoch": 0.43,
"grad_norm": 2.056372436619027,
"learning_rate": 4.4580686475110235e-06,
"loss": 0.5748,
"step": 523
},
{
"epoch": 0.43,
"grad_norm": 1.8871033520138176,
"learning_rate": 4.456034043525404e-06,
"loss": 0.5339,
"step": 524
},
{
"epoch": 0.44,
"grad_norm": 1.889474616209236,
"learning_rate": 4.45399609350781e-06,
"loss": 0.5185,
"step": 525
},
{
"epoch": 0.44,
"grad_norm": 1.9767406217632912,
"learning_rate": 4.451954800944405e-06,
"loss": 0.5758,
"step": 526
},
{
"epoch": 0.44,
"grad_norm": 1.9588695861513832,
"learning_rate": 4.449910169327062e-06,
"loss": 0.5472,
"step": 527
},
{
"epoch": 0.44,
"grad_norm": 1.8852210889000718,
"learning_rate": 4.447862202153372e-06,
"loss": 0.5917,
"step": 528
},
{
"epoch": 0.44,
"grad_norm": 2.0103638871993077,
"learning_rate": 4.445810902926629e-06,
"loss": 0.5761,
"step": 529
},
{
"epoch": 0.44,
"grad_norm": 2.201836945389513,
"learning_rate": 4.443756275155827e-06,
"loss": 0.5614,
"step": 530
},
{
"epoch": 0.44,
"grad_norm": 1.900702305836831,
"learning_rate": 4.441698322355656e-06,
"loss": 0.5254,
"step": 531
},
{
"epoch": 0.44,
"grad_norm": 2.134694583439314,
"learning_rate": 4.4396370480464915e-06,
"loss": 0.5607,
"step": 532
},
{
"epoch": 0.44,
"grad_norm": 1.8073751630381198,
"learning_rate": 4.437572455754391e-06,
"loss": 0.536,
"step": 533
},
{
"epoch": 0.44,
"grad_norm": 1.9607338020142653,
"learning_rate": 4.435504549011088e-06,
"loss": 0.59,
"step": 534
},
{
"epoch": 0.44,
"grad_norm": 2.0756430867435274,
"learning_rate": 4.433433331353988e-06,
"loss": 0.5538,
"step": 535
},
{
"epoch": 0.44,
"grad_norm": 1.8280570853718465,
"learning_rate": 4.431358806326158e-06,
"loss": 0.5789,
"step": 536
},
{
"epoch": 0.45,
"grad_norm": 2.2005143967434977,
"learning_rate": 4.429280977476321e-06,
"loss": 0.545,
"step": 537
},
{
"epoch": 0.45,
"grad_norm": 1.896479397543979,
"learning_rate": 4.4271998483588565e-06,
"loss": 0.5791,
"step": 538
},
{
"epoch": 0.45,
"grad_norm": 2.117773381781195,
"learning_rate": 4.425115422533785e-06,
"loss": 0.5234,
"step": 539
},
{
"epoch": 0.45,
"grad_norm": 2.4438942429566617,
"learning_rate": 4.423027703566769e-06,
"loss": 0.5692,
"step": 540
},
{
"epoch": 0.45,
"grad_norm": 1.873481152225171,
"learning_rate": 4.4209366950291025e-06,
"loss": 0.5739,
"step": 541
},
{
"epoch": 0.45,
"grad_norm": 1.8655199147974673,
"learning_rate": 4.4188424004977085e-06,
"loss": 0.5795,
"step": 542
},
{
"epoch": 0.45,
"grad_norm": 1.948840412241188,
"learning_rate": 4.416744823555129e-06,
"loss": 0.5304,
"step": 543
},
{
"epoch": 0.45,
"grad_norm": 1.8389034133315045,
"learning_rate": 4.414643967789523e-06,
"loss": 0.5076,
"step": 544
},
{
"epoch": 0.45,
"grad_norm": 1.8269235720085213,
"learning_rate": 4.412539836794657e-06,
"loss": 0.5837,
"step": 545
},
{
"epoch": 0.45,
"grad_norm": 2.1298715969759505,
"learning_rate": 4.410432434169902e-06,
"loss": 0.5694,
"step": 546
},
{
"epoch": 0.45,
"grad_norm": 2.0057741366005746,
"learning_rate": 4.408321763520223e-06,
"loss": 0.557,
"step": 547
},
{
"epoch": 0.45,
"grad_norm": 1.7901331374893255,
"learning_rate": 4.406207828456177e-06,
"loss": 0.5746,
"step": 548
},
{
"epoch": 0.46,
"grad_norm": 2.1994839889416187,
"learning_rate": 4.404090632593904e-06,
"loss": 0.5407,
"step": 549
},
{
"epoch": 0.46,
"grad_norm": 1.9664921082690268,
"learning_rate": 4.401970179555123e-06,
"loss": 0.5322,
"step": 550
},
{
"epoch": 0.46,
"grad_norm": 1.9933486180243851,
"learning_rate": 4.399846472967124e-06,
"loss": 0.5798,
"step": 551
},
{
"epoch": 0.46,
"grad_norm": 1.986612256562151,
"learning_rate": 4.397719516462765e-06,
"loss": 0.5213,
"step": 552
},
{
"epoch": 0.46,
"grad_norm": 2.046550123292336,
"learning_rate": 4.395589313680459e-06,
"loss": 0.5857,
"step": 553
},
{
"epoch": 0.46,
"grad_norm": 1.7902327250340486,
"learning_rate": 4.393455868264176e-06,
"loss": 0.555,
"step": 554
},
{
"epoch": 0.46,
"grad_norm": 2.0203627138517146,
"learning_rate": 4.391319183863432e-06,
"loss": 0.6329,
"step": 555
},
{
"epoch": 0.46,
"grad_norm": 1.9373549045181289,
"learning_rate": 4.389179264133281e-06,
"loss": 0.566,
"step": 556
},
{
"epoch": 0.46,
"grad_norm": 1.8936753353678124,
"learning_rate": 4.387036112734316e-06,
"loss": 0.5555,
"step": 557
},
{
"epoch": 0.46,
"grad_norm": 1.8493817575820743,
"learning_rate": 4.3848897333326545e-06,
"loss": 0.5427,
"step": 558
},
{
"epoch": 0.46,
"grad_norm": 1.9119588677783816,
"learning_rate": 4.382740129599937e-06,
"loss": 0.5157,
"step": 559
},
{
"epoch": 0.46,
"grad_norm": 1.8190137094200924,
"learning_rate": 4.380587305213321e-06,
"loss": 0.503,
"step": 560
},
{
"epoch": 0.47,
"grad_norm": 1.9891332712764953,
"learning_rate": 4.37843126385547e-06,
"loss": 0.5761,
"step": 561
},
{
"epoch": 0.47,
"grad_norm": 1.8620896547461154,
"learning_rate": 4.376272009214555e-06,
"loss": 0.5259,
"step": 562
},
{
"epoch": 0.47,
"grad_norm": 1.8896721756477406,
"learning_rate": 4.37410954498424e-06,
"loss": 0.5632,
"step": 563
},
{
"epoch": 0.47,
"grad_norm": 1.8302281976781984,
"learning_rate": 4.37194387486368e-06,
"loss": 0.5612,
"step": 564
},
{
"epoch": 0.47,
"grad_norm": 2.0721820586440165,
"learning_rate": 4.369775002557516e-06,
"loss": 0.533,
"step": 565
},
{
"epoch": 0.47,
"grad_norm": 1.8259926551813157,
"learning_rate": 4.367602931775865e-06,
"loss": 0.526,
"step": 566
},
{
"epoch": 0.47,
"grad_norm": 1.8096334574000785,
"learning_rate": 4.3654276662343155e-06,
"loss": 0.5306,
"step": 567
},
{
"epoch": 0.47,
"grad_norm": 1.9675637591445598,
"learning_rate": 4.363249209653922e-06,
"loss": 0.5577,
"step": 568
},
{
"epoch": 0.47,
"grad_norm": 1.8800389115841605,
"learning_rate": 4.361067565761197e-06,
"loss": 0.5553,
"step": 569
},
{
"epoch": 0.47,
"grad_norm": 1.827485496395265,
"learning_rate": 4.358882738288105e-06,
"loss": 0.5587,
"step": 570
},
{
"epoch": 0.47,
"grad_norm": 1.820954908943235,
"learning_rate": 4.356694730972056e-06,
"loss": 0.6186,
"step": 571
},
{
"epoch": 0.47,
"grad_norm": 1.952072431699686,
"learning_rate": 4.3545035475559025e-06,
"loss": 0.5488,
"step": 572
},
{
"epoch": 0.48,
"grad_norm": 1.8292648968688423,
"learning_rate": 4.352309191787924e-06,
"loss": 0.5534,
"step": 573
},
{
"epoch": 0.48,
"grad_norm": 1.826293122529813,
"learning_rate": 4.350111667421835e-06,
"loss": 0.5872,
"step": 574
},
{
"epoch": 0.48,
"grad_norm": 1.9251425791166785,
"learning_rate": 4.347910978216763e-06,
"loss": 0.5298,
"step": 575
},
{
"epoch": 0.48,
"grad_norm": 1.8330818196811385,
"learning_rate": 4.345707127937253e-06,
"loss": 0.5871,
"step": 576
},
{
"epoch": 0.48,
"grad_norm": 1.7842986545873851,
"learning_rate": 4.3435001203532555e-06,
"loss": 0.4898,
"step": 577
},
{
"epoch": 0.48,
"grad_norm": 1.8778666245156521,
"learning_rate": 4.341289959240124e-06,
"loss": 0.5385,
"step": 578
},
{
"epoch": 0.48,
"grad_norm": 1.9300679499181266,
"learning_rate": 4.339076648378605e-06,
"loss": 0.5698,
"step": 579
},
{
"epoch": 0.48,
"grad_norm": 1.9440861965960357,
"learning_rate": 4.336860191554833e-06,
"loss": 0.5984,
"step": 580
},
{
"epoch": 0.48,
"grad_norm": 1.929951096053947,
"learning_rate": 4.3346405925603265e-06,
"loss": 0.6222,
"step": 581
},
{
"epoch": 0.48,
"grad_norm": 1.9138258400335695,
"learning_rate": 4.332417855191974e-06,
"loss": 0.5498,
"step": 582
},
{
"epoch": 0.48,
"grad_norm": 2.058548455869675,
"learning_rate": 4.330191983252039e-06,
"loss": 0.5218,
"step": 583
},
{
"epoch": 0.48,
"grad_norm": 2.243429045583125,
"learning_rate": 4.327962980548142e-06,
"loss": 0.5768,
"step": 584
},
{
"epoch": 0.48,
"grad_norm": 1.9213537104634244,
"learning_rate": 4.32573085089326e-06,
"loss": 0.5784,
"step": 585
},
{
"epoch": 0.49,
"grad_norm": 1.9165291289119128,
"learning_rate": 4.32349559810572e-06,
"loss": 0.5697,
"step": 586
},
{
"epoch": 0.49,
"grad_norm": 1.9674279518735756,
"learning_rate": 4.321257226009193e-06,
"loss": 0.5104,
"step": 587
},
{
"epoch": 0.49,
"grad_norm": 1.9051339015323923,
"learning_rate": 4.319015738432683e-06,
"loss": 0.5711,
"step": 588
},
{
"epoch": 0.49,
"grad_norm": 1.957357618850765,
"learning_rate": 4.3167711392105245e-06,
"loss": 0.5854,
"step": 589
},
{
"epoch": 0.49,
"grad_norm": 1.9859311708308915,
"learning_rate": 4.314523432182376e-06,
"loss": 0.547,
"step": 590
},
{
"epoch": 0.49,
"grad_norm": 1.773704456523191,
"learning_rate": 4.312272621193209e-06,
"loss": 0.5259,
"step": 591
},
{
"epoch": 0.49,
"grad_norm": 1.82988033655793,
"learning_rate": 4.31001871009331e-06,
"loss": 0.5209,
"step": 592
},
{
"epoch": 0.49,
"grad_norm": 1.8925134832060522,
"learning_rate": 4.307761702738264e-06,
"loss": 0.59,
"step": 593
},
{
"epoch": 0.49,
"grad_norm": 1.8477075780641046,
"learning_rate": 4.305501602988953e-06,
"loss": 0.5714,
"step": 594
},
{
"epoch": 0.49,
"grad_norm": 1.8568432886623798,
"learning_rate": 4.303238414711552e-06,
"loss": 0.5877,
"step": 595
},
{
"epoch": 0.49,
"grad_norm": 1.8179798660158206,
"learning_rate": 4.3009721417775166e-06,
"loss": 0.6029,
"step": 596
},
{
"epoch": 0.49,
"grad_norm": 1.8494963193854803,
"learning_rate": 4.29870278806358e-06,
"loss": 0.5236,
"step": 597
},
{
"epoch": 0.5,
"grad_norm": 1.9586017397154731,
"learning_rate": 4.296430357451744e-06,
"loss": 0.5998,
"step": 598
},
{
"epoch": 0.5,
"grad_norm": 1.926616057974202,
"learning_rate": 4.2941548538292765e-06,
"loss": 0.5914,
"step": 599
},
{
"epoch": 0.5,
"grad_norm": 1.9321738359144827,
"learning_rate": 4.291876281088701e-06,
"loss": 0.5358,
"step": 600
},
{
"epoch": 0.5,
"grad_norm": 1.8229177571361932,
"learning_rate": 4.289594643127788e-06,
"loss": 0.5284,
"step": 601
},
{
"epoch": 0.5,
"grad_norm": 1.849252449531427,
"learning_rate": 4.287309943849558e-06,
"loss": 0.5689,
"step": 602
},
{
"epoch": 0.5,
"grad_norm": 1.985343175388319,
"learning_rate": 4.285022187162261e-06,
"loss": 0.6101,
"step": 603
},
{
"epoch": 0.5,
"grad_norm": 1.9437791826489255,
"learning_rate": 4.2827313769793835e-06,
"loss": 0.5419,
"step": 604
},
{
"epoch": 0.5,
"grad_norm": 1.8027421078538746,
"learning_rate": 4.28043751721963e-06,
"loss": 0.5504,
"step": 605
},
{
"epoch": 0.5,
"grad_norm": 1.8221230935939319,
"learning_rate": 4.278140611806926e-06,
"loss": 0.5284,
"step": 606
},
{
"epoch": 0.5,
"grad_norm": 1.8597205853821357,
"learning_rate": 4.275840664670403e-06,
"loss": 0.623,
"step": 607
},
{
"epoch": 0.5,
"grad_norm": 1.7801370844338822,
"learning_rate": 4.2735376797444e-06,
"loss": 0.5265,
"step": 608
},
{
"epoch": 0.5,
"grad_norm": 1.9028094416250234,
"learning_rate": 4.271231660968449e-06,
"loss": 0.5764,
"step": 609
},
{
"epoch": 0.51,
"grad_norm": 1.9385737581380094,
"learning_rate": 4.268922612287273e-06,
"loss": 0.6047,
"step": 610
},
{
"epoch": 0.51,
"grad_norm": 1.760006169733744,
"learning_rate": 4.266610537650778e-06,
"loss": 0.4944,
"step": 611
},
{
"epoch": 0.51,
"grad_norm": 1.857083980479501,
"learning_rate": 4.264295441014047e-06,
"loss": 0.5174,
"step": 612
},
{
"epoch": 0.51,
"grad_norm": 1.8299942480819913,
"learning_rate": 4.261977326337332e-06,
"loss": 0.5814,
"step": 613
},
{
"epoch": 0.51,
"grad_norm": 1.8943903433033418,
"learning_rate": 4.259656197586046e-06,
"loss": 0.5514,
"step": 614
},
{
"epoch": 0.51,
"grad_norm": 1.7839062839610529,
"learning_rate": 4.257332058730761e-06,
"loss": 0.5857,
"step": 615
},
{
"epoch": 0.51,
"grad_norm": 2.7188975139736256,
"learning_rate": 4.255004913747196e-06,
"loss": 0.5509,
"step": 616
},
{
"epoch": 0.51,
"grad_norm": 1.8767461602206779,
"learning_rate": 4.252674766616212e-06,
"loss": 0.5038,
"step": 617
},
{
"epoch": 0.51,
"grad_norm": 1.8391588901867753,
"learning_rate": 4.250341621323809e-06,
"loss": 0.5196,
"step": 618
},
{
"epoch": 0.51,
"grad_norm": 1.8106924420187829,
"learning_rate": 4.248005481861111e-06,
"loss": 0.5458,
"step": 619
},
{
"epoch": 0.51,
"grad_norm": 1.9698953511074666,
"learning_rate": 4.245666352224367e-06,
"loss": 0.5963,
"step": 620
},
{
"epoch": 0.51,
"grad_norm": 1.8890424031569348,
"learning_rate": 4.243324236414939e-06,
"loss": 0.5277,
"step": 621
},
{
"epoch": 0.52,
"grad_norm": 1.8537879418167673,
"learning_rate": 4.240979138439301e-06,
"loss": 0.5407,
"step": 622
},
{
"epoch": 0.52,
"grad_norm": 1.9264981771759184,
"learning_rate": 4.238631062309023e-06,
"loss": 0.5788,
"step": 623
},
{
"epoch": 0.52,
"grad_norm": 1.949693389062837,
"learning_rate": 4.236280012040773e-06,
"loss": 0.5007,
"step": 624
},
{
"epoch": 0.52,
"grad_norm": 1.8845778025905608,
"learning_rate": 4.233925991656307e-06,
"loss": 0.5905,
"step": 625
},
{
"epoch": 0.52,
"grad_norm": 1.8977167810192608,
"learning_rate": 4.231569005182459e-06,
"loss": 0.5342,
"step": 626
},
{
"epoch": 0.52,
"grad_norm": 1.9579196623045914,
"learning_rate": 4.229209056651139e-06,
"loss": 0.554,
"step": 627
},
{
"epoch": 0.52,
"grad_norm": 1.8427820272426025,
"learning_rate": 4.226846150099324e-06,
"loss": 0.5629,
"step": 628
},
{
"epoch": 0.52,
"grad_norm": 1.865218131227253,
"learning_rate": 4.22448028956905e-06,
"loss": 0.558,
"step": 629
},
{
"epoch": 0.52,
"grad_norm": 1.7348773966225364,
"learning_rate": 4.222111479107406e-06,
"loss": 0.5332,
"step": 630
},
{
"epoch": 0.52,
"grad_norm": 1.779367140127678,
"learning_rate": 4.219739722766528e-06,
"loss": 0.569,
"step": 631
},
{
"epoch": 0.52,
"grad_norm": 1.92860570712595,
"learning_rate": 4.217365024603592e-06,
"loss": 0.5342,
"step": 632
},
{
"epoch": 0.52,
"grad_norm": 1.946965997476449,
"learning_rate": 4.214987388680804e-06,
"loss": 0.5482,
"step": 633
},
{
"epoch": 0.53,
"grad_norm": 1.7930454990298659,
"learning_rate": 4.212606819065399e-06,
"loss": 0.5376,
"step": 634
},
{
"epoch": 0.53,
"grad_norm": 1.8379498458279013,
"learning_rate": 4.210223319829626e-06,
"loss": 0.5741,
"step": 635
},
{
"epoch": 0.53,
"grad_norm": 1.742977498596499,
"learning_rate": 4.207836895050748e-06,
"loss": 0.5569,
"step": 636
},
{
"epoch": 0.53,
"grad_norm": 1.852541709372898,
"learning_rate": 4.205447548811032e-06,
"loss": 0.578,
"step": 637
},
{
"epoch": 0.53,
"grad_norm": 1.8180259569107267,
"learning_rate": 4.203055285197745e-06,
"loss": 0.5189,
"step": 638
},
{
"epoch": 0.53,
"grad_norm": 1.8177842562763082,
"learning_rate": 4.20066010830314e-06,
"loss": 0.5424,
"step": 639
},
{
"epoch": 0.53,
"grad_norm": 1.8068654723170434,
"learning_rate": 4.198262022224457e-06,
"loss": 0.5336,
"step": 640
},
{
"epoch": 0.53,
"grad_norm": 1.9664843499052276,
"learning_rate": 4.195861031063909e-06,
"loss": 0.5399,
"step": 641
},
{
"epoch": 0.53,
"grad_norm": 1.7812265481792608,
"learning_rate": 4.193457138928683e-06,
"loss": 0.534,
"step": 642
},
{
"epoch": 0.53,
"grad_norm": 1.908377487778027,
"learning_rate": 4.191050349930925e-06,
"loss": 0.5831,
"step": 643
},
{
"epoch": 0.53,
"grad_norm": 1.8124678634933105,
"learning_rate": 4.18864066818774e-06,
"loss": 0.5309,
"step": 644
},
{
"epoch": 0.53,
"grad_norm": 1.902443199964304,
"learning_rate": 4.186228097821176e-06,
"loss": 0.5452,
"step": 645
},
{
"epoch": 0.54,
"grad_norm": 1.9694387068719457,
"learning_rate": 4.183812642958227e-06,
"loss": 0.5462,
"step": 646
},
{
"epoch": 0.54,
"grad_norm": 1.945352264767711,
"learning_rate": 4.181394307730819e-06,
"loss": 0.4853,
"step": 647
},
{
"epoch": 0.54,
"grad_norm": 1.7967416728436914,
"learning_rate": 4.178973096275806e-06,
"loss": 0.5952,
"step": 648
},
{
"epoch": 0.54,
"grad_norm": 2.0602433101771616,
"learning_rate": 4.176549012734963e-06,
"loss": 0.6346,
"step": 649
},
{
"epoch": 0.54,
"grad_norm": 1.9158731498204968,
"learning_rate": 4.1741220612549746e-06,
"loss": 0.5101,
"step": 650
},
{
"epoch": 0.54,
"grad_norm": 1.951875972207364,
"learning_rate": 4.171692245987436e-06,
"loss": 0.5718,
"step": 651
},
{
"epoch": 0.54,
"grad_norm": 1.871788727804539,
"learning_rate": 4.169259571088839e-06,
"loss": 0.5516,
"step": 652
},
{
"epoch": 0.54,
"grad_norm": 1.945571804366465,
"learning_rate": 4.166824040720566e-06,
"loss": 0.5544,
"step": 653
},
{
"epoch": 0.54,
"grad_norm": 1.8975723622706568,
"learning_rate": 4.1643856590488866e-06,
"loss": 0.5643,
"step": 654
},
{
"epoch": 0.54,
"grad_norm": 1.9772846459626554,
"learning_rate": 4.161944430244945e-06,
"loss": 0.5487,
"step": 655
},
{
"epoch": 0.54,
"grad_norm": 2.036472038769578,
"learning_rate": 4.159500358484759e-06,
"loss": 0.5232,
"step": 656
},
{
"epoch": 0.54,
"grad_norm": 1.7742095436926848,
"learning_rate": 4.157053447949206e-06,
"loss": 0.4963,
"step": 657
},
{
"epoch": 0.55,
"grad_norm": 2.1819742476725814,
"learning_rate": 4.154603702824023e-06,
"loss": 0.5416,
"step": 658
},
{
"epoch": 0.55,
"grad_norm": 1.9151345309457093,
"learning_rate": 4.152151127299794e-06,
"loss": 0.5822,
"step": 659
},
{
"epoch": 0.55,
"grad_norm": 2.033640859083771,
"learning_rate": 4.149695725571944e-06,
"loss": 0.5876,
"step": 660
},
{
"epoch": 0.55,
"grad_norm": 1.8935471013235925,
"learning_rate": 4.147237501840734e-06,
"loss": 0.548,
"step": 661
},
{
"epoch": 0.55,
"grad_norm": 1.7836299476774775,
"learning_rate": 4.144776460311253e-06,
"loss": 0.5274,
"step": 662
},
{
"epoch": 0.55,
"grad_norm": 2.194666072449123,
"learning_rate": 4.142312605193407e-06,
"loss": 0.5934,
"step": 663
},
{
"epoch": 0.55,
"grad_norm": 1.988265407508224,
"learning_rate": 4.13984594070192e-06,
"loss": 0.5539,
"step": 664
},
{
"epoch": 0.55,
"grad_norm": 1.7594955740187146,
"learning_rate": 4.137376471056317e-06,
"loss": 0.5324,
"step": 665
},
{
"epoch": 0.55,
"grad_norm": 1.9342530277100989,
"learning_rate": 4.1349042004809224e-06,
"loss": 0.5902,
"step": 666
},
{
"epoch": 0.55,
"grad_norm": 1.9757082453588417,
"learning_rate": 4.132429133204856e-06,
"loss": 0.5874,
"step": 667
},
{
"epoch": 0.55,
"grad_norm": 1.7792467343474774,
"learning_rate": 4.129951273462016e-06,
"loss": 0.5516,
"step": 668
},
{
"epoch": 0.55,
"grad_norm": 1.9010392264817964,
"learning_rate": 4.127470625491082e-06,
"loss": 0.5793,
"step": 669
},
{
"epoch": 0.56,
"grad_norm": 2.054505290884914,
"learning_rate": 4.1249871935355e-06,
"loss": 0.5718,
"step": 670
},
{
"epoch": 0.56,
"grad_norm": 1.8010036617727825,
"learning_rate": 4.1225009818434805e-06,
"loss": 0.5698,
"step": 671
},
{
"epoch": 0.56,
"grad_norm": 1.975020822034628,
"learning_rate": 4.120011994667988e-06,
"loss": 0.5739,
"step": 672
},
{
"epoch": 0.56,
"grad_norm": 1.9801075045379748,
"learning_rate": 4.117520236266734e-06,
"loss": 0.5589,
"step": 673
},
{
"epoch": 0.56,
"grad_norm": 1.7773808874926829,
"learning_rate": 4.115025710902173e-06,
"loss": 0.5276,
"step": 674
},
{
"epoch": 0.56,
"grad_norm": 1.890298398205481,
"learning_rate": 4.112528422841491e-06,
"loss": 0.4914,
"step": 675
},
{
"epoch": 0.56,
"grad_norm": 1.9087570296379215,
"learning_rate": 4.110028376356599e-06,
"loss": 0.5412,
"step": 676
},
{
"epoch": 0.56,
"grad_norm": 1.8908271691889404,
"learning_rate": 4.1075255757241295e-06,
"loss": 0.5618,
"step": 677
},
{
"epoch": 0.56,
"grad_norm": 2.024312170169272,
"learning_rate": 4.105020025225423e-06,
"loss": 0.5618,
"step": 678
},
{
"epoch": 0.56,
"grad_norm": 1.8072403207581518,
"learning_rate": 4.102511729146528e-06,
"loss": 0.5744,
"step": 679
},
{
"epoch": 0.56,
"grad_norm": 1.7750572145097157,
"learning_rate": 4.100000691778185e-06,
"loss": 0.5716,
"step": 680
},
{
"epoch": 0.56,
"grad_norm": 1.8778337896632162,
"learning_rate": 4.097486917415827e-06,
"loss": 0.5683,
"step": 681
},
{
"epoch": 0.57,
"grad_norm": 1.9710167098273688,
"learning_rate": 4.094970410359568e-06,
"loss": 0.5273,
"step": 682
},
{
"epoch": 0.57,
"grad_norm": 1.9136975523972874,
"learning_rate": 4.092451174914196e-06,
"loss": 0.5239,
"step": 683
},
{
"epoch": 0.57,
"grad_norm": 1.929344793900944,
"learning_rate": 4.089929215389167e-06,
"loss": 0.5388,
"step": 684
},
{
"epoch": 0.57,
"grad_norm": 1.7211535229712278,
"learning_rate": 4.087404536098597e-06,
"loss": 0.5068,
"step": 685
},
{
"epoch": 0.57,
"grad_norm": 1.8739637749458882,
"learning_rate": 4.084877141361254e-06,
"loss": 0.5537,
"step": 686
},
{
"epoch": 0.57,
"grad_norm": 1.9268469960932768,
"learning_rate": 4.082347035500553e-06,
"loss": 0.5875,
"step": 687
},
{
"epoch": 0.57,
"grad_norm": 1.896542320004603,
"learning_rate": 4.079814222844541e-06,
"loss": 0.5314,
"step": 688
},
{
"epoch": 0.57,
"grad_norm": 1.723925126440519,
"learning_rate": 4.077278707725904e-06,
"loss": 0.5009,
"step": 689
},
{
"epoch": 0.57,
"grad_norm": 1.8345210205201996,
"learning_rate": 4.074740494481942e-06,
"loss": 0.5544,
"step": 690
},
{
"epoch": 0.57,
"grad_norm": 1.766819080519227,
"learning_rate": 4.072199587454578e-06,
"loss": 0.5393,
"step": 691
},
{
"epoch": 0.57,
"grad_norm": 1.9577975399484282,
"learning_rate": 4.069655990990337e-06,
"loss": 0.5357,
"step": 692
},
{
"epoch": 0.57,
"grad_norm": 1.8254761359015224,
"learning_rate": 4.06710970944035e-06,
"loss": 0.5797,
"step": 693
},
{
"epoch": 0.58,
"grad_norm": 2.1203973374999214,
"learning_rate": 4.064560747160337e-06,
"loss": 0.5811,
"step": 694
},
{
"epoch": 0.58,
"grad_norm": 1.9066221824053846,
"learning_rate": 4.062009108510605e-06,
"loss": 0.5014,
"step": 695
},
{
"epoch": 0.58,
"grad_norm": 1.951489716071849,
"learning_rate": 4.059454797856039e-06,
"loss": 0.529,
"step": 696
},
{
"epoch": 0.58,
"grad_norm": 1.8402907113209426,
"learning_rate": 4.056897819566096e-06,
"loss": 0.4942,
"step": 697
},
{
"epoch": 0.58,
"grad_norm": 2.0368715640768498,
"learning_rate": 4.0543381780147965e-06,
"loss": 0.5245,
"step": 698
},
{
"epoch": 0.58,
"grad_norm": 1.8154462049772704,
"learning_rate": 4.0517758775807135e-06,
"loss": 0.4979,
"step": 699
},
{
"epoch": 0.58,
"grad_norm": 1.890388895335948,
"learning_rate": 4.049210922646973e-06,
"loss": 0.5212,
"step": 700
},
{
"epoch": 0.58,
"grad_norm": 2.0215900504030166,
"learning_rate": 4.046643317601237e-06,
"loss": 0.5384,
"step": 701
},
{
"epoch": 0.58,
"grad_norm": 1.816997259900234,
"learning_rate": 4.0440730668357076e-06,
"loss": 0.492,
"step": 702
},
{
"epoch": 0.58,
"grad_norm": 1.968633766153865,
"learning_rate": 4.0415001747471036e-06,
"loss": 0.5917,
"step": 703
},
{
"epoch": 0.58,
"grad_norm": 1.8313487810801756,
"learning_rate": 4.0389246457366696e-06,
"loss": 0.5561,
"step": 704
},
{
"epoch": 0.58,
"grad_norm": 1.7954421155528784,
"learning_rate": 4.036346484210159e-06,
"loss": 0.5383,
"step": 705
},
{
"epoch": 0.59,
"grad_norm": 1.8517101217315919,
"learning_rate": 4.033765694577826e-06,
"loss": 0.5368,
"step": 706
},
{
"epoch": 0.59,
"grad_norm": 1.8888441616203875,
"learning_rate": 4.031182281254423e-06,
"loss": 0.5895,
"step": 707
},
{
"epoch": 0.59,
"grad_norm": 1.8131436351862782,
"learning_rate": 4.028596248659191e-06,
"loss": 0.5346,
"step": 708
},
{
"epoch": 0.59,
"grad_norm": 1.8803113487311214,
"learning_rate": 4.0260076012158486e-06,
"loss": 0.4987,
"step": 709
},
{
"epoch": 0.59,
"grad_norm": 1.8989122650791335,
"learning_rate": 4.023416343352589e-06,
"loss": 0.5007,
"step": 710
},
{
"epoch": 0.59,
"grad_norm": 1.9466291969735336,
"learning_rate": 4.020822479502074e-06,
"loss": 0.5868,
"step": 711
},
{
"epoch": 0.59,
"grad_norm": 1.869533367998661,
"learning_rate": 4.018226014101418e-06,
"loss": 0.5995,
"step": 712
},
{
"epoch": 0.59,
"grad_norm": 1.93738608926368,
"learning_rate": 4.015626951592187e-06,
"loss": 0.5625,
"step": 713
},
{
"epoch": 0.59,
"grad_norm": 1.8485080870897803,
"learning_rate": 4.013025296420394e-06,
"loss": 0.5585,
"step": 714
},
{
"epoch": 0.59,
"grad_norm": 1.8099669115387913,
"learning_rate": 4.010421053036481e-06,
"loss": 0.5384,
"step": 715
},
{
"epoch": 0.59,
"grad_norm": 1.8810123612010912,
"learning_rate": 4.007814225895321e-06,
"loss": 0.5589,
"step": 716
},
{
"epoch": 0.59,
"grad_norm": 1.8692823610937885,
"learning_rate": 4.005204819456205e-06,
"loss": 0.5474,
"step": 717
},
{
"epoch": 0.6,
"grad_norm": 1.8120887102918588,
"learning_rate": 4.00259283818284e-06,
"loss": 0.5138,
"step": 718
},
{
"epoch": 0.6,
"grad_norm": 1.7933926935301234,
"learning_rate": 3.999978286543331e-06,
"loss": 0.5235,
"step": 719
},
{
"epoch": 0.6,
"grad_norm": 1.8382360731306235,
"learning_rate": 3.997361169010187e-06,
"loss": 0.5846,
"step": 720
},
{
"epoch": 0.6,
"grad_norm": 1.993925306673069,
"learning_rate": 3.994741490060301e-06,
"loss": 0.5561,
"step": 721
},
{
"epoch": 0.6,
"grad_norm": 1.900088669959918,
"learning_rate": 3.9921192541749505e-06,
"loss": 0.5215,
"step": 722
},
{
"epoch": 0.6,
"grad_norm": 1.9250072769385074,
"learning_rate": 3.989494465839785e-06,
"loss": 0.54,
"step": 723
},
{
"epoch": 0.6,
"grad_norm": 1.7928905908766457,
"learning_rate": 3.986867129544822e-06,
"loss": 0.6066,
"step": 724
},
{
"epoch": 0.6,
"grad_norm": 1.9474900039545116,
"learning_rate": 3.984237249784437e-06,
"loss": 0.5173,
"step": 725
},
{
"epoch": 0.6,
"grad_norm": 1.9004077336349998,
"learning_rate": 3.981604831057357e-06,
"loss": 0.5409,
"step": 726
},
{
"epoch": 0.6,
"grad_norm": 1.7573843693188624,
"learning_rate": 3.97896987786665e-06,
"loss": 0.5239,
"step": 727
},
{
"epoch": 0.6,
"grad_norm": 1.899283660379949,
"learning_rate": 3.976332394719721e-06,
"loss": 0.4977,
"step": 728
},
{
"epoch": 0.6,
"grad_norm": 1.8353476568345033,
"learning_rate": 3.973692386128304e-06,
"loss": 0.5834,
"step": 729
},
{
"epoch": 0.61,
"grad_norm": 2.032325534167748,
"learning_rate": 3.971049856608451e-06,
"loss": 0.5343,
"step": 730
},
{
"epoch": 0.61,
"grad_norm": 1.8161347764383835,
"learning_rate": 3.9684048106805286e-06,
"loss": 0.585,
"step": 731
},
{
"epoch": 0.61,
"grad_norm": 1.836376388525165,
"learning_rate": 3.965757252869204e-06,
"loss": 0.5978,
"step": 732
},
{
"epoch": 0.61,
"grad_norm": 1.889118862096067,
"learning_rate": 3.963107187703446e-06,
"loss": 0.5393,
"step": 733
},
{
"epoch": 0.61,
"grad_norm": 1.7772829607776217,
"learning_rate": 3.96045461971651e-06,
"loss": 0.5164,
"step": 734
},
{
"epoch": 0.61,
"grad_norm": 1.7980410807492582,
"learning_rate": 3.957799553445932e-06,
"loss": 0.5455,
"step": 735
},
{
"epoch": 0.61,
"grad_norm": 1.907936099702467,
"learning_rate": 3.955141993433526e-06,
"loss": 0.532,
"step": 736
},
{
"epoch": 0.61,
"grad_norm": 1.8668064740862462,
"learning_rate": 3.9524819442253645e-06,
"loss": 0.5578,
"step": 737
},
{
"epoch": 0.61,
"grad_norm": 1.838952740378055,
"learning_rate": 3.949819410371785e-06,
"loss": 0.5784,
"step": 738
},
{
"epoch": 0.61,
"grad_norm": 1.9595767898211005,
"learning_rate": 3.947154396427373e-06,
"loss": 0.5213,
"step": 739
},
{
"epoch": 0.61,
"grad_norm": 1.9422968944070973,
"learning_rate": 3.944486906950954e-06,
"loss": 0.5709,
"step": 740
},
{
"epoch": 0.61,
"grad_norm": 1.760556693040696,
"learning_rate": 3.941816946505592e-06,
"loss": 0.5564,
"step": 741
},
{
"epoch": 0.62,
"grad_norm": 1.8054841879427592,
"learning_rate": 3.939144519658575e-06,
"loss": 0.5435,
"step": 742
},
{
"epoch": 0.62,
"grad_norm": 2.1072923992538,
"learning_rate": 3.936469630981412e-06,
"loss": 0.5622,
"step": 743
},
{
"epoch": 0.62,
"grad_norm": 1.711687978027928,
"learning_rate": 3.933792285049821e-06,
"loss": 0.5554,
"step": 744
},
{
"epoch": 0.62,
"grad_norm": 1.8166543944942228,
"learning_rate": 3.931112486443727e-06,
"loss": 0.5079,
"step": 745
},
{
"epoch": 0.62,
"grad_norm": 1.7923405334139695,
"learning_rate": 3.928430239747246e-06,
"loss": 0.5692,
"step": 746
},
{
"epoch": 0.62,
"grad_norm": 1.9611773239667012,
"learning_rate": 3.925745549548687e-06,
"loss": 0.5092,
"step": 747
},
{
"epoch": 0.62,
"grad_norm": 1.8440088039871827,
"learning_rate": 3.923058420440534e-06,
"loss": 0.5369,
"step": 748
},
{
"epoch": 0.62,
"grad_norm": 1.9272316571307881,
"learning_rate": 3.920368857019447e-06,
"loss": 0.5798,
"step": 749
},
{
"epoch": 0.62,
"grad_norm": 1.8248503445199376,
"learning_rate": 3.917676863886246e-06,
"loss": 0.5479,
"step": 750
},
{
"epoch": 0.62,
"grad_norm": 1.9200626612083824,
"learning_rate": 3.914982445645912e-06,
"loss": 0.549,
"step": 751
},
{
"epoch": 0.62,
"grad_norm": 1.8585556832275227,
"learning_rate": 3.91228560690757e-06,
"loss": 0.5283,
"step": 752
},
{
"epoch": 0.62,
"grad_norm": 1.819239895382093,
"learning_rate": 3.90958635228449e-06,
"loss": 0.535,
"step": 753
},
{
"epoch": 0.63,
"grad_norm": 1.7810389942543545,
"learning_rate": 3.90688468639407e-06,
"loss": 0.5125,
"step": 754
},
{
"epoch": 0.63,
"grad_norm": 1.9614453700373935,
"learning_rate": 3.904180613857837e-06,
"loss": 0.5406,
"step": 755
},
{
"epoch": 0.63,
"grad_norm": 1.805104940263808,
"learning_rate": 3.901474139301433e-06,
"loss": 0.5794,
"step": 756
},
{
"epoch": 0.63,
"grad_norm": 1.78756289235025,
"learning_rate": 3.898765267354607e-06,
"loss": 0.569,
"step": 757
},
{
"epoch": 0.63,
"grad_norm": 1.912300438003516,
"learning_rate": 3.896054002651213e-06,
"loss": 0.5565,
"step": 758
},
{
"epoch": 0.63,
"grad_norm": 1.8148356694353722,
"learning_rate": 3.893340349829195e-06,
"loss": 0.5471,
"step": 759
},
{
"epoch": 0.63,
"grad_norm": 1.6836223387492706,
"learning_rate": 3.890624313530583e-06,
"loss": 0.5145,
"step": 760
},
{
"epoch": 0.63,
"grad_norm": 1.8389298216964765,
"learning_rate": 3.887905898401485e-06,
"loss": 0.5441,
"step": 761
},
{
"epoch": 0.63,
"grad_norm": 1.7845754057436856,
"learning_rate": 3.885185109092078e-06,
"loss": 0.5478,
"step": 762
},
{
"epoch": 0.63,
"grad_norm": 1.77076035925993,
"learning_rate": 3.882461950256598e-06,
"loss": 0.5497,
"step": 763
},
{
"epoch": 0.63,
"grad_norm": 1.8011284465286703,
"learning_rate": 3.87973642655334e-06,
"loss": 0.5039,
"step": 764
},
{
"epoch": 0.63,
"grad_norm": 1.7400129481667248,
"learning_rate": 3.877008542644637e-06,
"loss": 0.5243,
"step": 765
},
{
"epoch": 0.64,
"grad_norm": 1.9899565111682327,
"learning_rate": 3.874278303196866e-06,
"loss": 0.5767,
"step": 766
},
{
"epoch": 0.64,
"grad_norm": 1.8345576263874734,
"learning_rate": 3.871545712880429e-06,
"loss": 0.5262,
"step": 767
},
{
"epoch": 0.64,
"grad_norm": 1.8375211207672395,
"learning_rate": 3.8688107763697505e-06,
"loss": 0.5467,
"step": 768
},
{
"epoch": 0.64,
"grad_norm": 1.8068462280574835,
"learning_rate": 3.8660734983432715e-06,
"loss": 0.5256,
"step": 769
},
{
"epoch": 0.64,
"grad_norm": 1.7823522202158735,
"learning_rate": 3.863333883483433e-06,
"loss": 0.5419,
"step": 770
},
{
"epoch": 0.64,
"grad_norm": 1.8881514180214427,
"learning_rate": 3.86059193647668e-06,
"loss": 0.541,
"step": 771
},
{
"epoch": 0.64,
"grad_norm": 1.8311064595650786,
"learning_rate": 3.85784766201344e-06,
"loss": 0.5455,
"step": 772
},
{
"epoch": 0.64,
"grad_norm": 1.9833459774866717,
"learning_rate": 3.855101064788126e-06,
"loss": 0.5723,
"step": 773
},
{
"epoch": 0.64,
"grad_norm": 1.7968096633022903,
"learning_rate": 3.852352149499125e-06,
"loss": 0.5153,
"step": 774
},
{
"epoch": 0.64,
"grad_norm": 1.775423895652992,
"learning_rate": 3.849600920848787e-06,
"loss": 0.5134,
"step": 775
},
{
"epoch": 0.64,
"grad_norm": 1.7262892998825556,
"learning_rate": 3.84684738354342e-06,
"loss": 0.5287,
"step": 776
},
{
"epoch": 0.64,
"grad_norm": 1.7866135638778051,
"learning_rate": 3.84409154229328e-06,
"loss": 0.57,
"step": 777
},
{
"epoch": 0.64,
"grad_norm": 1.787377916112687,
"learning_rate": 3.841333401812569e-06,
"loss": 0.5312,
"step": 778
},
{
"epoch": 0.65,
"grad_norm": 1.684801862246949,
"learning_rate": 3.838572966819416e-06,
"loss": 0.5822,
"step": 779
},
{
"epoch": 0.65,
"grad_norm": 1.79074773131748,
"learning_rate": 3.835810242035879e-06,
"loss": 0.5651,
"step": 780
},
{
"epoch": 0.65,
"grad_norm": 1.9234904827178134,
"learning_rate": 3.8330452321879305e-06,
"loss": 0.5527,
"step": 781
},
{
"epoch": 0.65,
"grad_norm": 2.1733402579018186,
"learning_rate": 3.830277942005455e-06,
"loss": 0.5545,
"step": 782
},
{
"epoch": 0.65,
"grad_norm": 2.112229504682016,
"learning_rate": 3.827508376222233e-06,
"loss": 0.5766,
"step": 783
},
{
"epoch": 0.65,
"grad_norm": 2.087174122744587,
"learning_rate": 3.824736539575944e-06,
"loss": 0.549,
"step": 784
},
{
"epoch": 0.65,
"grad_norm": 1.9570382810890106,
"learning_rate": 3.821962436808145e-06,
"loss": 0.4984,
"step": 785
},
{
"epoch": 0.65,
"grad_norm": 1.94720853153738,
"learning_rate": 3.819186072664277e-06,
"loss": 0.5303,
"step": 786
},
{
"epoch": 0.65,
"grad_norm": 2.21095404069362,
"learning_rate": 3.816407451893643e-06,
"loss": 0.5674,
"step": 787
},
{
"epoch": 0.65,
"grad_norm": 1.7284336698899117,
"learning_rate": 3.8136265792494094e-06,
"loss": 0.5952,
"step": 788
},
{
"epoch": 0.65,
"grad_norm": 1.940869697529687,
"learning_rate": 3.8108434594885934e-06,
"loss": 0.5198,
"step": 789
},
{
"epoch": 0.65,
"grad_norm": 1.9282749931884566,
"learning_rate": 3.808058097372057e-06,
"loss": 0.5499,
"step": 790
},
{
"epoch": 0.66,
"grad_norm": 2.0180195532646983,
"learning_rate": 3.8052704976644984e-06,
"loss": 0.5117,
"step": 791
},
{
"epoch": 0.66,
"grad_norm": 1.8303561179366206,
"learning_rate": 3.8024806651344424e-06,
"loss": 0.5034,
"step": 792
},
{
"epoch": 0.66,
"grad_norm": 2.0584295539484754,
"learning_rate": 3.7996886045542335e-06,
"loss": 0.5391,
"step": 793
},
{
"epoch": 0.66,
"grad_norm": 1.7736893833047733,
"learning_rate": 3.7968943207000284e-06,
"loss": 0.5378,
"step": 794
},
{
"epoch": 0.66,
"grad_norm": 1.7840353008162277,
"learning_rate": 3.794097818351786e-06,
"loss": 0.5091,
"step": 795
},
{
"epoch": 0.66,
"grad_norm": 2.0949100717616225,
"learning_rate": 3.791299102293261e-06,
"loss": 0.5731,
"step": 796
},
{
"epoch": 0.66,
"grad_norm": 2.048353193294094,
"learning_rate": 3.7884981773119943e-06,
"loss": 0.5576,
"step": 797
},
{
"epoch": 0.66,
"grad_norm": 1.9990070284918733,
"learning_rate": 3.7856950481993054e-06,
"loss": 0.5297,
"step": 798
},
{
"epoch": 0.66,
"grad_norm": 1.859560152641746,
"learning_rate": 3.7828897197502856e-06,
"loss": 0.5131,
"step": 799
},
{
"epoch": 0.66,
"grad_norm": 2.0054802770873916,
"learning_rate": 3.780082196763785e-06,
"loss": 0.5428,
"step": 800
},
{
"epoch": 0.66,
"grad_norm": 1.8985367093585213,
"learning_rate": 3.7772724840424126e-06,
"loss": 0.5206,
"step": 801
},
{
"epoch": 0.66,
"grad_norm": 1.9964704653764362,
"learning_rate": 3.774460586392519e-06,
"loss": 0.5929,
"step": 802
},
{
"epoch": 0.67,
"grad_norm": 1.7572936836574113,
"learning_rate": 3.771646508624194e-06,
"loss": 0.5428,
"step": 803
},
{
"epoch": 0.67,
"grad_norm": 1.9623695483620975,
"learning_rate": 3.768830255551258e-06,
"loss": 0.5685,
"step": 804
},
{
"epoch": 0.67,
"grad_norm": 1.9663290616402378,
"learning_rate": 3.76601183199125e-06,
"loss": 0.5351,
"step": 805
},
{
"epoch": 0.67,
"grad_norm": 1.7876590847889615,
"learning_rate": 3.763191242765424e-06,
"loss": 0.567,
"step": 806
},
{
"epoch": 0.67,
"grad_norm": 1.8500820456277005,
"learning_rate": 3.7603684926987383e-06,
"loss": 0.523,
"step": 807
},
{
"epoch": 0.67,
"grad_norm": 2.041973125533567,
"learning_rate": 3.757543586619845e-06,
"loss": 0.5531,
"step": 808
},
{
"epoch": 0.67,
"grad_norm": 1.7440376746222928,
"learning_rate": 3.754716529361089e-06,
"loss": 0.4913,
"step": 809
},
{
"epoch": 0.67,
"grad_norm": 1.7910937306897654,
"learning_rate": 3.7518873257584897e-06,
"loss": 0.5128,
"step": 810
},
{
"epoch": 0.67,
"grad_norm": 1.9334392608388238,
"learning_rate": 3.7490559806517434e-06,
"loss": 0.5861,
"step": 811
},
{
"epoch": 0.67,
"grad_norm": 2.0003597857127673,
"learning_rate": 3.746222498884206e-06,
"loss": 0.5535,
"step": 812
},
{
"epoch": 0.67,
"grad_norm": 1.7964615198133413,
"learning_rate": 3.74338688530289e-06,
"loss": 0.5409,
"step": 813
},
{
"epoch": 0.67,
"grad_norm": 1.7726488990007383,
"learning_rate": 3.740549144758453e-06,
"loss": 0.5714,
"step": 814
},
{
"epoch": 0.68,
"grad_norm": 1.9080323144095523,
"learning_rate": 3.737709282105193e-06,
"loss": 0.5534,
"step": 815
},
{
"epoch": 0.68,
"grad_norm": 1.9612361354867969,
"learning_rate": 3.734867302201038e-06,
"loss": 0.5282,
"step": 816
},
{
"epoch": 0.68,
"grad_norm": 1.873254058551618,
"learning_rate": 3.7320232099075363e-06,
"loss": 0.5422,
"step": 817
},
{
"epoch": 0.68,
"grad_norm": 1.8383882069199007,
"learning_rate": 3.7291770100898508e-06,
"loss": 0.5588,
"step": 818
},
{
"epoch": 0.68,
"grad_norm": 2.0137053963220835,
"learning_rate": 3.726328707616749e-06,
"loss": 0.5895,
"step": 819
},
{
"epoch": 0.68,
"grad_norm": 1.8207549211692964,
"learning_rate": 3.7234783073605957e-06,
"loss": 0.5428,
"step": 820
},
{
"epoch": 0.68,
"grad_norm": 1.7929761418069659,
"learning_rate": 3.7206258141973445e-06,
"loss": 0.555,
"step": 821
},
{
"epoch": 0.68,
"grad_norm": 1.8863691259545465,
"learning_rate": 3.7177712330065285e-06,
"loss": 0.5802,
"step": 822
},
{
"epoch": 0.68,
"grad_norm": 1.8383911000943605,
"learning_rate": 3.714914568671252e-06,
"loss": 0.4986,
"step": 823
},
{
"epoch": 0.68,
"grad_norm": 2.0032777947804044,
"learning_rate": 3.7120558260781846e-06,
"loss": 0.6456,
"step": 824
},
{
"epoch": 0.68,
"grad_norm": 1.733320874844507,
"learning_rate": 3.709195010117551e-06,
"loss": 0.5146,
"step": 825
},
{
"epoch": 0.68,
"grad_norm": 1.7411187007421471,
"learning_rate": 3.7063321256831193e-06,
"loss": 0.5297,
"step": 826
},
{
"epoch": 0.69,
"grad_norm": 1.8334107493901353,
"learning_rate": 3.7034671776722003e-06,
"loss": 0.545,
"step": 827
},
{
"epoch": 0.69,
"grad_norm": 1.931467221651553,
"learning_rate": 3.7006001709856314e-06,
"loss": 0.579,
"step": 828
},
{
"epoch": 0.69,
"grad_norm": 1.799522216655623,
"learning_rate": 3.697731110527774e-06,
"loss": 0.5453,
"step": 829
},
{
"epoch": 0.69,
"grad_norm": 1.8098119388805842,
"learning_rate": 3.6948600012065016e-06,
"loss": 0.5186,
"step": 830
},
{
"epoch": 0.69,
"grad_norm": 1.8419013342395714,
"learning_rate": 3.6919868479331934e-06,
"loss": 0.4833,
"step": 831
},
{
"epoch": 0.69,
"grad_norm": 1.8419148322752323,
"learning_rate": 3.6891116556227234e-06,
"loss": 0.5479,
"step": 832
},
{
"epoch": 0.69,
"grad_norm": 1.7858200344474908,
"learning_rate": 3.6862344291934545e-06,
"loss": 0.5264,
"step": 833
},
{
"epoch": 0.69,
"grad_norm": 1.8057437623830686,
"learning_rate": 3.6833551735672293e-06,
"loss": 0.5208,
"step": 834
},
{
"epoch": 0.69,
"grad_norm": 1.8570584000334132,
"learning_rate": 3.6804738936693617e-06,
"loss": 0.5652,
"step": 835
},
{
"epoch": 0.69,
"grad_norm": 1.7961732805960369,
"learning_rate": 3.677590594428629e-06,
"loss": 0.5693,
"step": 836
},
{
"epoch": 0.69,
"grad_norm": 1.954108513879844,
"learning_rate": 3.6747052807772614e-06,
"loss": 0.5673,
"step": 837
},
{
"epoch": 0.69,
"grad_norm": 1.834152772161213,
"learning_rate": 3.671817957650936e-06,
"loss": 0.5118,
"step": 838
},
{
"epoch": 0.7,
"grad_norm": 1.8035026424969205,
"learning_rate": 3.6689286299887663e-06,
"loss": 0.5778,
"step": 839
},
{
"epoch": 0.7,
"grad_norm": 1.7862771700309947,
"learning_rate": 3.666037302733295e-06,
"loss": 0.5575,
"step": 840
},
{
"epoch": 0.7,
"grad_norm": 1.7398650592861555,
"learning_rate": 3.6631439808304874e-06,
"loss": 0.5323,
"step": 841
},
{
"epoch": 0.7,
"grad_norm": 1.7082885736006344,
"learning_rate": 3.6602486692297183e-06,
"loss": 0.543,
"step": 842
},
{
"epoch": 0.7,
"grad_norm": 1.8242434568233548,
"learning_rate": 3.6573513728837685e-06,
"loss": 0.5579,
"step": 843
},
{
"epoch": 0.7,
"grad_norm": 1.8305967806472925,
"learning_rate": 3.6544520967488108e-06,
"loss": 0.5425,
"step": 844
},
{
"epoch": 0.7,
"grad_norm": 1.7126995402462595,
"learning_rate": 3.651550845784407e-06,
"loss": 0.5399,
"step": 845
},
{
"epoch": 0.7,
"grad_norm": 1.992190051239983,
"learning_rate": 3.648647624953496e-06,
"loss": 0.5951,
"step": 846
},
{
"epoch": 0.7,
"grad_norm": 1.9362402903409848,
"learning_rate": 3.6457424392223885e-06,
"loss": 0.5427,
"step": 847
},
{
"epoch": 0.7,
"grad_norm": 1.7390586845081806,
"learning_rate": 3.642835293560754e-06,
"loss": 0.5269,
"step": 848
},
{
"epoch": 0.7,
"grad_norm": 1.8601747321693383,
"learning_rate": 3.639926192941615e-06,
"loss": 0.5246,
"step": 849
},
{
"epoch": 0.7,
"grad_norm": 1.8305054240762129,
"learning_rate": 3.6370151423413396e-06,
"loss": 0.562,
"step": 850
},
{
"epoch": 0.71,
"grad_norm": 1.8361711553327809,
"learning_rate": 3.6341021467396296e-06,
"loss": 0.5066,
"step": 851
},
{
"epoch": 0.71,
"grad_norm": 1.9202617492772214,
"learning_rate": 3.6311872111195163e-06,
"loss": 0.5755,
"step": 852
},
{
"epoch": 0.71,
"grad_norm": 1.9056266366653432,
"learning_rate": 3.628270340467348e-06,
"loss": 0.5193,
"step": 853
},
{
"epoch": 0.71,
"grad_norm": 1.9700971504271882,
"learning_rate": 3.625351539772783e-06,
"loss": 0.5499,
"step": 854
},
{
"epoch": 0.71,
"grad_norm": 1.7142305580780086,
"learning_rate": 3.6224308140287818e-06,
"loss": 0.5597,
"step": 855
},
{
"epoch": 0.71,
"grad_norm": 1.7897876492593174,
"learning_rate": 3.6195081682315972e-06,
"loss": 0.5347,
"step": 856
},
{
"epoch": 0.71,
"grad_norm": 2.191923699092432,
"learning_rate": 3.616583607380769e-06,
"loss": 0.5251,
"step": 857
},
{
"epoch": 0.71,
"grad_norm": 1.8582876176666503,
"learning_rate": 3.61365713647911e-06,
"loss": 0.5067,
"step": 858
},
{
"epoch": 0.71,
"grad_norm": 1.991617360171558,
"learning_rate": 3.610728760532701e-06,
"loss": 0.6464,
"step": 859
},
{
"epoch": 0.71,
"grad_norm": 1.892621069660817,
"learning_rate": 3.607798484550881e-06,
"loss": 0.5145,
"step": 860
},
{
"epoch": 0.71,
"grad_norm": 1.7592963181570629,
"learning_rate": 3.6048663135462423e-06,
"loss": 0.5297,
"step": 861
},
{
"epoch": 0.71,
"grad_norm": 2.020192040751123,
"learning_rate": 3.6019322525346157e-06,
"loss": 0.5709,
"step": 862
},
{
"epoch": 0.72,
"grad_norm": 1.8575959680616767,
"learning_rate": 3.598996306535067e-06,
"loss": 0.5946,
"step": 863
},
{
"epoch": 0.72,
"grad_norm": 1.9638758131071599,
"learning_rate": 3.5960584805698845e-06,
"loss": 0.4833,
"step": 864
},
{
"epoch": 0.72,
"grad_norm": 1.7517341191956926,
"learning_rate": 3.593118779664574e-06,
"loss": 0.5439,
"step": 865
},
{
"epoch": 0.72,
"grad_norm": 1.7637144330636925,
"learning_rate": 3.590177208847848e-06,
"loss": 0.4898,
"step": 866
},
{
"epoch": 0.72,
"grad_norm": 2.107899096934758,
"learning_rate": 3.5872337731516186e-06,
"loss": 0.5332,
"step": 867
},
{
"epoch": 0.72,
"grad_norm": 2.016493645108941,
"learning_rate": 3.5842884776109875e-06,
"loss": 0.5313,
"step": 868
},
{
"epoch": 0.72,
"grad_norm": 1.8758602544873038,
"learning_rate": 3.581341327264236e-06,
"loss": 0.554,
"step": 869
},
{
"epoch": 0.72,
"grad_norm": 1.8566881639083022,
"learning_rate": 3.5783923271528222e-06,
"loss": 0.5322,
"step": 870
},
{
"epoch": 0.72,
"grad_norm": 1.9151838907738468,
"learning_rate": 3.5754414823213647e-06,
"loss": 0.5306,
"step": 871
},
{
"epoch": 0.72,
"grad_norm": 1.7893407766785276,
"learning_rate": 3.572488797817639e-06,
"loss": 0.5226,
"step": 872
},
{
"epoch": 0.72,
"grad_norm": 1.908122661974681,
"learning_rate": 3.569534278692569e-06,
"loss": 0.5132,
"step": 873
},
{
"epoch": 0.72,
"grad_norm": 1.9052513037253582,
"learning_rate": 3.5665779300002144e-06,
"loss": 0.513,
"step": 874
},
{
"epoch": 0.73,
"grad_norm": 1.7876914527016339,
"learning_rate": 3.563619756797767e-06,
"loss": 0.5627,
"step": 875
},
{
"epoch": 0.73,
"grad_norm": 1.9607045801516068,
"learning_rate": 3.5606597641455387e-06,
"loss": 0.4986,
"step": 876
},
{
"epoch": 0.73,
"grad_norm": 1.701462749441997,
"learning_rate": 3.5576979571069527e-06,
"loss": 0.5306,
"step": 877
},
{
"epoch": 0.73,
"grad_norm": 1.8413701238351416,
"learning_rate": 3.554734340748538e-06,
"loss": 0.5602,
"step": 878
},
{
"epoch": 0.73,
"grad_norm": 1.8762306249541667,
"learning_rate": 3.5517689201399162e-06,
"loss": 0.5663,
"step": 879
},
{
"epoch": 0.73,
"grad_norm": 1.833164968453507,
"learning_rate": 3.5488017003537977e-06,
"loss": 0.5264,
"step": 880
},
{
"epoch": 0.73,
"grad_norm": 1.766302763247428,
"learning_rate": 3.5458326864659687e-06,
"loss": 0.5498,
"step": 881
},
{
"epoch": 0.73,
"grad_norm": 1.821883208129187,
"learning_rate": 3.5428618835552867e-06,
"loss": 0.5468,
"step": 882
},
{
"epoch": 0.73,
"grad_norm": 1.7773758034614335,
"learning_rate": 3.5398892967036674e-06,
"loss": 0.505,
"step": 883
},
{
"epoch": 0.73,
"grad_norm": 1.8248820711070537,
"learning_rate": 3.5369149309960783e-06,
"loss": 0.5679,
"step": 884
},
{
"epoch": 0.73,
"grad_norm": 1.8248114104788378,
"learning_rate": 3.5339387915205305e-06,
"loss": 0.5351,
"step": 885
},
{
"epoch": 0.73,
"grad_norm": 2.00472132505421,
"learning_rate": 3.53096088336807e-06,
"loss": 0.5637,
"step": 886
},
{
"epoch": 0.74,
"grad_norm": 2.0594957277906656,
"learning_rate": 3.5279812116327667e-06,
"loss": 0.567,
"step": 887
},
{
"epoch": 0.74,
"grad_norm": 1.916227169502353,
"learning_rate": 3.5249997814117098e-06,
"loss": 0.5733,
"step": 888
},
{
"epoch": 0.74,
"grad_norm": 1.7595020268824906,
"learning_rate": 3.5220165978049937e-06,
"loss": 0.5512,
"step": 889
},
{
"epoch": 0.74,
"grad_norm": 1.8259487385184114,
"learning_rate": 3.5190316659157126e-06,
"loss": 0.5332,
"step": 890
},
{
"epoch": 0.74,
"grad_norm": 1.8216813752485344,
"learning_rate": 3.5160449908499538e-06,
"loss": 0.5718,
"step": 891
},
{
"epoch": 0.74,
"grad_norm": 1.8497964997952454,
"learning_rate": 3.5130565777167845e-06,
"loss": 0.5179,
"step": 892
},
{
"epoch": 0.74,
"grad_norm": 1.8242356367817554,
"learning_rate": 3.5100664316282464e-06,
"loss": 0.5587,
"step": 893
},
{
"epoch": 0.74,
"grad_norm": 1.7793507179190546,
"learning_rate": 3.5070745576993428e-06,
"loss": 0.5924,
"step": 894
},
{
"epoch": 0.74,
"grad_norm": 1.920176905610262,
"learning_rate": 3.5040809610480364e-06,
"loss": 0.5579,
"step": 895
},
{
"epoch": 0.74,
"grad_norm": 1.954421523744336,
"learning_rate": 3.5010856467952335e-06,
"loss": 0.5496,
"step": 896
},
{
"epoch": 0.74,
"grad_norm": 1.7785169911731862,
"learning_rate": 3.4980886200647817e-06,
"loss": 0.5383,
"step": 897
},
{
"epoch": 0.74,
"grad_norm": 1.853827977546151,
"learning_rate": 3.4950898859834555e-06,
"loss": 0.5501,
"step": 898
},
{
"epoch": 0.75,
"grad_norm": 1.9882198198152168,
"learning_rate": 3.4920894496809515e-06,
"loss": 0.5557,
"step": 899
},
{
"epoch": 0.75,
"grad_norm": 1.98090605107646,
"learning_rate": 3.489087316289877e-06,
"loss": 0.5661,
"step": 900
},
{
"epoch": 0.75,
"grad_norm": 2.0027723691714785,
"learning_rate": 3.486083490945743e-06,
"loss": 0.4791,
"step": 901
},
{
"epoch": 0.75,
"grad_norm": 2.0183911897675015,
"learning_rate": 3.4830779787869555e-06,
"loss": 0.5386,
"step": 902
},
{
"epoch": 0.75,
"grad_norm": 1.9385976919386894,
"learning_rate": 3.480070784954805e-06,
"loss": 0.5351,
"step": 903
},
{
"epoch": 0.75,
"grad_norm": 1.7612550957325825,
"learning_rate": 3.4770619145934586e-06,
"loss": 0.511,
"step": 904
},
{
"epoch": 0.75,
"grad_norm": 1.8677538420589843,
"learning_rate": 3.4740513728499515e-06,
"loss": 0.5942,
"step": 905
},
{
"epoch": 0.75,
"grad_norm": 1.9208446249900946,
"learning_rate": 3.4710391648741787e-06,
"loss": 0.5146,
"step": 906
},
{
"epoch": 0.75,
"grad_norm": 1.8008673055527855,
"learning_rate": 3.468025295818885e-06,
"loss": 0.5909,
"step": 907
},
{
"epoch": 0.75,
"grad_norm": 1.891052390507894,
"learning_rate": 3.465009770839657e-06,
"loss": 0.5527,
"step": 908
},
{
"epoch": 0.75,
"grad_norm": 2.0521048489395435,
"learning_rate": 3.4619925950949126e-06,
"loss": 0.5756,
"step": 909
},
{
"epoch": 0.75,
"grad_norm": 2.003295441830653,
"learning_rate": 3.4589737737458946e-06,
"loss": 0.5299,
"step": 910
},
{
"epoch": 0.76,
"grad_norm": 1.7635851435542724,
"learning_rate": 3.4559533119566612e-06,
"loss": 0.5338,
"step": 911
},
{
"epoch": 0.76,
"grad_norm": 1.834326490517508,
"learning_rate": 3.4529312148940763e-06,
"loss": 0.56,
"step": 912
},
{
"epoch": 0.76,
"grad_norm": 1.8618427761057224,
"learning_rate": 3.4499074877278016e-06,
"loss": 0.5189,
"step": 913
},
{
"epoch": 0.76,
"grad_norm": 2.04459004844406,
"learning_rate": 3.446882135630286e-06,
"loss": 0.5765,
"step": 914
},
{
"epoch": 0.76,
"grad_norm": 1.7467595732765806,
"learning_rate": 3.4438551637767604e-06,
"loss": 0.5512,
"step": 915
},
{
"epoch": 0.76,
"grad_norm": 1.7952035114217406,
"learning_rate": 3.4408265773452226e-06,
"loss": 0.5348,
"step": 916
},
{
"epoch": 0.76,
"grad_norm": 1.8448198186244822,
"learning_rate": 3.4377963815164362e-06,
"loss": 0.5187,
"step": 917
},
{
"epoch": 0.76,
"grad_norm": 1.7738820116169103,
"learning_rate": 3.4347645814739156e-06,
"loss": 0.507,
"step": 918
},
{
"epoch": 0.76,
"grad_norm": 1.9699054774415494,
"learning_rate": 3.4317311824039216e-06,
"loss": 0.5175,
"step": 919
},
{
"epoch": 0.76,
"grad_norm": 1.7482905457169124,
"learning_rate": 3.4286961894954473e-06,
"loss": 0.5188,
"step": 920
},
{
"epoch": 0.76,
"grad_norm": 1.8012194296110113,
"learning_rate": 3.425659607940215e-06,
"loss": 0.5465,
"step": 921
},
{
"epoch": 0.76,
"grad_norm": 1.7978097428012587,
"learning_rate": 3.422621442932662e-06,
"loss": 0.5257,
"step": 922
},
{
"epoch": 0.77,
"grad_norm": 1.8534167116514217,
"learning_rate": 3.419581699669937e-06,
"loss": 0.536,
"step": 923
},
{
"epoch": 0.77,
"grad_norm": 1.7733377878036733,
"learning_rate": 3.416540383351888e-06,
"loss": 0.5632,
"step": 924
},
{
"epoch": 0.77,
"grad_norm": 1.8124786776539388,
"learning_rate": 3.4134974991810503e-06,
"loss": 0.5471,
"step": 925
},
{
"epoch": 0.77,
"grad_norm": 1.8553271859579439,
"learning_rate": 3.4104530523626463e-06,
"loss": 0.538,
"step": 926
},
{
"epoch": 0.77,
"grad_norm": 1.8888926038913822,
"learning_rate": 3.4074070481045683e-06,
"loss": 0.4868,
"step": 927
},
{
"epoch": 0.77,
"grad_norm": 2.0158609319355505,
"learning_rate": 3.404359491617374e-06,
"loss": 0.5757,
"step": 928
},
{
"epoch": 0.77,
"grad_norm": 1.8376639720078027,
"learning_rate": 3.401310388114276e-06,
"loss": 0.5377,
"step": 929
},
{
"epoch": 0.77,
"grad_norm": 2.3651883595335232,
"learning_rate": 3.3982597428111336e-06,
"loss": 0.5536,
"step": 930
},
{
"epoch": 0.77,
"grad_norm": 1.908409388949023,
"learning_rate": 3.3952075609264423e-06,
"loss": 0.5349,
"step": 931
},
{
"epoch": 0.77,
"grad_norm": 1.8261622890952995,
"learning_rate": 3.3921538476813278e-06,
"loss": 0.4991,
"step": 932
},
{
"epoch": 0.77,
"grad_norm": 1.924034720876031,
"learning_rate": 3.3890986082995353e-06,
"loss": 0.536,
"step": 933
},
{
"epoch": 0.77,
"grad_norm": 1.829615974230478,
"learning_rate": 3.3860418480074188e-06,
"loss": 0.5163,
"step": 934
},
{
"epoch": 0.78,
"grad_norm": 1.7812992854973535,
"learning_rate": 3.3829835720339353e-06,
"loss": 0.5412,
"step": 935
},
{
"epoch": 0.78,
"grad_norm": 1.8270515542068861,
"learning_rate": 3.3799237856106348e-06,
"loss": 0.5459,
"step": 936
},
{
"epoch": 0.78,
"grad_norm": 1.8336967909163833,
"learning_rate": 3.3768624939716506e-06,
"loss": 0.5074,
"step": 937
},
{
"epoch": 0.78,
"grad_norm": 1.773892866992307,
"learning_rate": 3.373799702353691e-06,
"loss": 0.5457,
"step": 938
},
{
"epoch": 0.78,
"grad_norm": 1.8605607499004266,
"learning_rate": 3.370735415996031e-06,
"loss": 0.5691,
"step": 939
},
{
"epoch": 0.78,
"grad_norm": 1.7961529805945686,
"learning_rate": 3.3676696401405007e-06,
"loss": 0.5406,
"step": 940
},
{
"epoch": 0.78,
"grad_norm": 1.7406787561376078,
"learning_rate": 3.3646023800314792e-06,
"loss": 0.5297,
"step": 941
},
{
"epoch": 0.78,
"grad_norm": 1.9794693468141764,
"learning_rate": 3.361533640915885e-06,
"loss": 0.4765,
"step": 942
},
{
"epoch": 0.78,
"grad_norm": 1.820632707720892,
"learning_rate": 3.3584634280431657e-06,
"loss": 0.5395,
"step": 943
},
{
"epoch": 0.78,
"grad_norm": 1.8478126164835549,
"learning_rate": 3.3553917466652915e-06,
"loss": 0.5288,
"step": 944
},
{
"epoch": 0.78,
"grad_norm": 1.749509825583459,
"learning_rate": 3.352318602036742e-06,
"loss": 0.5343,
"step": 945
},
{
"epoch": 0.78,
"grad_norm": 1.8034305951190157,
"learning_rate": 3.3492439994145033e-06,
"loss": 0.5536,
"step": 946
},
{
"epoch": 0.79,
"grad_norm": 1.8172591817519397,
"learning_rate": 3.346167944058052e-06,
"loss": 0.5844,
"step": 947
},
{
"epoch": 0.79,
"grad_norm": 1.749562414198837,
"learning_rate": 3.3430904412293526e-06,
"loss": 0.4833,
"step": 948
},
{
"epoch": 0.79,
"grad_norm": 1.7243742428927225,
"learning_rate": 3.3400114961928444e-06,
"loss": 0.4828,
"step": 949
},
{
"epoch": 0.79,
"grad_norm": 1.757242299744874,
"learning_rate": 3.3369311142154337e-06,
"loss": 0.5282,
"step": 950
},
{
"epoch": 0.79,
"grad_norm": 2.036302581700697,
"learning_rate": 3.3338493005664853e-06,
"loss": 0.5315,
"step": 951
},
{
"epoch": 0.79,
"grad_norm": 1.886299636672335,
"learning_rate": 3.330766060517812e-06,
"loss": 0.5244,
"step": 952
},
{
"epoch": 0.79,
"grad_norm": 1.898853787733011,
"learning_rate": 3.3276813993436695e-06,
"loss": 0.5914,
"step": 953
},
{
"epoch": 0.79,
"grad_norm": 1.8359472984671243,
"learning_rate": 3.324595322320741e-06,
"loss": 0.5488,
"step": 954
},
{
"epoch": 0.79,
"grad_norm": 1.8768955168510497,
"learning_rate": 3.321507834728134e-06,
"loss": 0.5871,
"step": 955
},
{
"epoch": 0.79,
"grad_norm": 1.8358033818112791,
"learning_rate": 3.3184189418473674e-06,
"loss": 0.5632,
"step": 956
},
{
"epoch": 0.79,
"grad_norm": 1.792562502385941,
"learning_rate": 3.315328648962364e-06,
"loss": 0.4887,
"step": 957
},
{
"epoch": 0.79,
"grad_norm": 1.8732702930932368,
"learning_rate": 3.312236961359444e-06,
"loss": 0.5313,
"step": 958
},
{
"epoch": 0.8,
"grad_norm": 1.7708047128885986,
"learning_rate": 3.3091438843273115e-06,
"loss": 0.5348,
"step": 959
},
{
"epoch": 0.8,
"grad_norm": 1.9094434763935804,
"learning_rate": 3.3060494231570463e-06,
"loss": 0.5027,
"step": 960
},
{
"epoch": 0.8,
"grad_norm": 1.87927564418864,
"learning_rate": 3.3029535831420977e-06,
"loss": 0.511,
"step": 961
},
{
"epoch": 0.8,
"grad_norm": 1.717365559903535,
"learning_rate": 3.299856369578273e-06,
"loss": 0.5203,
"step": 962
},
{
"epoch": 0.8,
"grad_norm": 1.770779257052532,
"learning_rate": 3.2967577877637296e-06,
"loss": 0.5233,
"step": 963
},
{
"epoch": 0.8,
"grad_norm": 1.7541392466004568,
"learning_rate": 3.2936578429989653e-06,
"loss": 0.5013,
"step": 964
},
{
"epoch": 0.8,
"grad_norm": 1.7840578280891832,
"learning_rate": 3.290556540586809e-06,
"loss": 0.4844,
"step": 965
},
{
"epoch": 0.8,
"grad_norm": 1.7184305413001233,
"learning_rate": 3.287453885832413e-06,
"loss": 0.4694,
"step": 966
},
{
"epoch": 0.8,
"grad_norm": 1.8671517036325307,
"learning_rate": 3.2843498840432403e-06,
"loss": 0.4652,
"step": 967
},
{
"epoch": 0.8,
"grad_norm": 1.9960847871768508,
"learning_rate": 3.2812445405290612e-06,
"loss": 0.5906,
"step": 968
},
{
"epoch": 0.8,
"grad_norm": 1.7535227575839891,
"learning_rate": 3.27813786060194e-06,
"loss": 0.5482,
"step": 969
},
{
"epoch": 0.8,
"grad_norm": 1.929231862440999,
"learning_rate": 3.2750298495762278e-06,
"loss": 0.5334,
"step": 970
},
{
"epoch": 0.8,
"grad_norm": 1.7879676366114814,
"learning_rate": 3.2719205127685505e-06,
"loss": 0.515,
"step": 971
},
{
"epoch": 0.81,
"grad_norm": 1.7817120865072218,
"learning_rate": 3.2688098554978053e-06,
"loss": 0.5045,
"step": 972
},
{
"epoch": 0.81,
"grad_norm": 1.8725673808714274,
"learning_rate": 3.265697883085145e-06,
"loss": 0.5557,
"step": 973
},
{
"epoch": 0.81,
"grad_norm": 1.8554796275037901,
"learning_rate": 3.262584600853973e-06,
"loss": 0.5785,
"step": 974
},
{
"epoch": 0.81,
"grad_norm": 1.77078783324655,
"learning_rate": 3.259470014129936e-06,
"loss": 0.524,
"step": 975
},
{
"epoch": 0.81,
"grad_norm": 1.820843626030818,
"learning_rate": 3.256354128240907e-06,
"loss": 0.5144,
"step": 976
},
{
"epoch": 0.81,
"grad_norm": 1.9330495063889956,
"learning_rate": 3.253236948516987e-06,
"loss": 0.5405,
"step": 977
},
{
"epoch": 0.81,
"grad_norm": 1.9113413794485425,
"learning_rate": 3.2501184802904867e-06,
"loss": 0.5212,
"step": 978
},
{
"epoch": 0.81,
"grad_norm": 1.799188386703558,
"learning_rate": 3.2469987288959208e-06,
"loss": 0.5148,
"step": 979
},
{
"epoch": 0.81,
"grad_norm": 1.8610914183588203,
"learning_rate": 3.2438776996700023e-06,
"loss": 0.5363,
"step": 980
},
{
"epoch": 0.81,
"grad_norm": 1.8245263524947073,
"learning_rate": 3.240755397951625e-06,
"loss": 0.5216,
"step": 981
},
{
"epoch": 0.81,
"grad_norm": 1.7863270641417597,
"learning_rate": 3.2376318290818643e-06,
"loss": 0.5581,
"step": 982
},
{
"epoch": 0.81,
"grad_norm": 1.9266115141469626,
"learning_rate": 3.23450699840396e-06,
"loss": 0.5178,
"step": 983
},
{
"epoch": 0.82,
"grad_norm": 1.8044458399187253,
"learning_rate": 3.2313809112633133e-06,
"loss": 0.5252,
"step": 984
},
{
"epoch": 0.82,
"grad_norm": 1.8809392949423562,
"learning_rate": 3.2282535730074714e-06,
"loss": 0.486,
"step": 985
},
{
"epoch": 0.82,
"grad_norm": 1.9487997548787144,
"learning_rate": 3.2251249889861237e-06,
"loss": 0.5272,
"step": 986
},
{
"epoch": 0.82,
"grad_norm": 2.088279538426057,
"learning_rate": 3.2219951645510907e-06,
"loss": 0.5426,
"step": 987
},
{
"epoch": 0.82,
"grad_norm": 1.8280370745964312,
"learning_rate": 3.218864105056313e-06,
"loss": 0.5545,
"step": 988
},
{
"epoch": 0.82,
"grad_norm": 1.7678201455723743,
"learning_rate": 3.2157318158578473e-06,
"loss": 0.5476,
"step": 989
},
{
"epoch": 0.82,
"grad_norm": 1.708170466024094,
"learning_rate": 3.21259830231385e-06,
"loss": 0.5442,
"step": 990
},
{
"epoch": 0.82,
"grad_norm": 2.0427224573251483,
"learning_rate": 3.209463569784575e-06,
"loss": 0.5501,
"step": 991
},
{
"epoch": 0.82,
"grad_norm": 1.8557413526282036,
"learning_rate": 3.206327623632359e-06,
"loss": 0.5573,
"step": 992
},
{
"epoch": 0.82,
"grad_norm": 1.7138810851622357,
"learning_rate": 3.2031904692216153e-06,
"loss": 0.5267,
"step": 993
},
{
"epoch": 0.82,
"grad_norm": 1.9034028799031073,
"learning_rate": 3.2000521119188267e-06,
"loss": 0.5605,
"step": 994
},
{
"epoch": 0.82,
"grad_norm": 1.994571492675121,
"learning_rate": 3.1969125570925303e-06,
"loss": 0.53,
"step": 995
},
{
"epoch": 0.83,
"grad_norm": 1.771581881704634,
"learning_rate": 3.193771810113313e-06,
"loss": 0.6177,
"step": 996
},
{
"epoch": 0.83,
"grad_norm": 1.7808220445921694,
"learning_rate": 3.1906298763538005e-06,
"loss": 0.5215,
"step": 997
},
{
"epoch": 0.83,
"grad_norm": 1.8069794706642701,
"learning_rate": 3.1874867611886513e-06,
"loss": 0.5444,
"step": 998
},
{
"epoch": 0.83,
"grad_norm": 1.7806867210889854,
"learning_rate": 3.1843424699945403e-06,
"loss": 0.5471,
"step": 999
},
{
"epoch": 0.83,
"grad_norm": 1.7481554024627886,
"learning_rate": 3.1811970081501576e-06,
"loss": 0.5159,
"step": 1000
},
{
"epoch": 0.83,
"grad_norm": 1.8105318680671914,
"learning_rate": 3.1780503810361946e-06,
"loss": 0.4985,
"step": 1001
},
{
"epoch": 0.83,
"grad_norm": 1.7033701950072382,
"learning_rate": 3.1749025940353363e-06,
"loss": 0.5594,
"step": 1002
},
{
"epoch": 0.83,
"grad_norm": 2.3799847532384515,
"learning_rate": 3.1717536525322512e-06,
"loss": 0.5978,
"step": 1003
},
{
"epoch": 0.83,
"grad_norm": 1.7427559432173463,
"learning_rate": 3.1686035619135845e-06,
"loss": 0.5299,
"step": 1004
},
{
"epoch": 0.83,
"grad_norm": 1.7454547855925509,
"learning_rate": 3.1654523275679453e-06,
"loss": 0.5439,
"step": 1005
},
{
"epoch": 0.83,
"grad_norm": 1.7130931472340127,
"learning_rate": 3.162299954885899e-06,
"loss": 0.5379,
"step": 1006
},
{
"epoch": 0.83,
"grad_norm": 1.6940357366272063,
"learning_rate": 3.15914644925996e-06,
"loss": 0.5694,
"step": 1007
},
{
"epoch": 0.84,
"grad_norm": 1.8544220651543013,
"learning_rate": 3.1559918160845787e-06,
"loss": 0.5285,
"step": 1008
},
{
"epoch": 0.84,
"grad_norm": 1.8481774433371347,
"learning_rate": 3.1528360607561358e-06,
"loss": 0.5384,
"step": 1009
},
{
"epoch": 0.84,
"grad_norm": 1.8256828659009958,
"learning_rate": 3.149679188672932e-06,
"loss": 0.4806,
"step": 1010
},
{
"epoch": 0.84,
"grad_norm": 1.9380282822721238,
"learning_rate": 3.1465212052351766e-06,
"loss": 0.543,
"step": 1011
},
{
"epoch": 0.84,
"grad_norm": 1.985943690469791,
"learning_rate": 3.1433621158449807e-06,
"loss": 0.5549,
"step": 1012
},
{
"epoch": 0.84,
"grad_norm": 1.7038398790061953,
"learning_rate": 3.140201925906348e-06,
"loss": 0.4682,
"step": 1013
},
{
"epoch": 0.84,
"grad_norm": 1.8748481620529394,
"learning_rate": 3.1370406408251632e-06,
"loss": 0.5046,
"step": 1014
},
{
"epoch": 0.84,
"grad_norm": 1.7587036990451181,
"learning_rate": 3.133878266009186e-06,
"loss": 0.5203,
"step": 1015
},
{
"epoch": 0.84,
"grad_norm": 1.7503537433041947,
"learning_rate": 3.130714806868041e-06,
"loss": 0.5546,
"step": 1016
},
{
"epoch": 0.84,
"grad_norm": 1.7701505667314001,
"learning_rate": 3.127550268813205e-06,
"loss": 0.531,
"step": 1017
},
{
"epoch": 0.84,
"grad_norm": 1.771371589393474,
"learning_rate": 3.124384657258001e-06,
"loss": 0.5424,
"step": 1018
},
{
"epoch": 0.84,
"grad_norm": 1.8016015279719124,
"learning_rate": 3.1212179776175905e-06,
"loss": 0.5706,
"step": 1019
},
{
"epoch": 0.85,
"grad_norm": 1.810944889002695,
"learning_rate": 3.1180502353089598e-06,
"loss": 0.5502,
"step": 1020
},
{
"epoch": 0.85,
"grad_norm": 1.8062084514449492,
"learning_rate": 3.1148814357509147e-06,
"loss": 0.5337,
"step": 1021
},
{
"epoch": 0.85,
"grad_norm": 1.669643406466654,
"learning_rate": 3.111711584364068e-06,
"loss": 0.4802,
"step": 1022
},
{
"epoch": 0.85,
"grad_norm": 1.6852245083058144,
"learning_rate": 3.1085406865708333e-06,
"loss": 0.532,
"step": 1023
},
{
"epoch": 0.85,
"grad_norm": 1.8463748056800222,
"learning_rate": 3.1053687477954124e-06,
"loss": 0.5112,
"step": 1024
},
{
"epoch": 0.85,
"grad_norm": 1.7302148909577209,
"learning_rate": 3.10219577346379e-06,
"loss": 0.5549,
"step": 1025
},
{
"epoch": 0.85,
"grad_norm": 1.7752983463714818,
"learning_rate": 3.0990217690037206e-06,
"loss": 0.5606,
"step": 1026
},
{
"epoch": 0.85,
"grad_norm": 1.695119975844164,
"learning_rate": 3.09584673984472e-06,
"loss": 0.486,
"step": 1027
},
{
"epoch": 0.85,
"grad_norm": 1.793543444803663,
"learning_rate": 3.0926706914180605e-06,
"loss": 0.6474,
"step": 1028
},
{
"epoch": 0.85,
"grad_norm": 1.6954588940750932,
"learning_rate": 3.089493629156755e-06,
"loss": 0.5208,
"step": 1029
},
{
"epoch": 0.85,
"grad_norm": 1.9045089074493644,
"learning_rate": 3.08631555849555e-06,
"loss": 0.5291,
"step": 1030
},
{
"epoch": 0.85,
"grad_norm": 1.8481217904786489,
"learning_rate": 3.083136484870921e-06,
"loss": 0.5212,
"step": 1031
},
{
"epoch": 0.86,
"grad_norm": 1.6729420221561044,
"learning_rate": 3.0799564137210536e-06,
"loss": 0.5024,
"step": 1032
},
{
"epoch": 0.86,
"grad_norm": 1.8821832248249077,
"learning_rate": 3.076775350485845e-06,
"loss": 0.5459,
"step": 1033
},
{
"epoch": 0.86,
"grad_norm": 1.762473350167322,
"learning_rate": 3.0735933006068863e-06,
"loss": 0.4938,
"step": 1034
},
{
"epoch": 0.86,
"grad_norm": 1.7950707678098703,
"learning_rate": 3.0704102695274573e-06,
"loss": 0.4922,
"step": 1035
},
{
"epoch": 0.86,
"grad_norm": 1.6853644769275375,
"learning_rate": 3.0672262626925174e-06,
"loss": 0.47,
"step": 1036
},
{
"epoch": 0.86,
"grad_norm": 1.809909106997157,
"learning_rate": 3.0640412855486922e-06,
"loss": 0.5545,
"step": 1037
},
{
"epoch": 0.86,
"grad_norm": 2.019472393876661,
"learning_rate": 3.06085534354427e-06,
"loss": 0.5616,
"step": 1038
},
{
"epoch": 0.86,
"grad_norm": 1.7972785887075076,
"learning_rate": 3.057668442129188e-06,
"loss": 0.5269,
"step": 1039
},
{
"epoch": 0.86,
"grad_norm": 1.865555820217107,
"learning_rate": 3.054480586755026e-06,
"loss": 0.5752,
"step": 1040
},
{
"epoch": 0.86,
"grad_norm": 1.792147096098412,
"learning_rate": 3.051291782874995e-06,
"loss": 0.54,
"step": 1041
},
{
"epoch": 0.86,
"grad_norm": 1.8108893550848508,
"learning_rate": 3.048102035943927e-06,
"loss": 0.5367,
"step": 1042
},
{
"epoch": 0.86,
"grad_norm": 2.0966646553454793,
"learning_rate": 3.04491135141827e-06,
"loss": 0.5455,
"step": 1043
},
{
"epoch": 0.87,
"grad_norm": 1.7357403687049695,
"learning_rate": 3.041719734756073e-06,
"loss": 0.502,
"step": 1044
},
{
"epoch": 0.87,
"grad_norm": 1.8033826162723872,
"learning_rate": 3.038527191416982e-06,
"loss": 0.5644,
"step": 1045
},
{
"epoch": 0.87,
"grad_norm": 1.7822928111630525,
"learning_rate": 3.0353337268622267e-06,
"loss": 0.4938,
"step": 1046
},
{
"epoch": 0.87,
"grad_norm": 1.7910319343463081,
"learning_rate": 3.0321393465546134e-06,
"loss": 0.5889,
"step": 1047
},
{
"epoch": 0.87,
"grad_norm": 1.7457160087273953,
"learning_rate": 3.028944055958514e-06,
"loss": 0.5022,
"step": 1048
},
{
"epoch": 0.87,
"grad_norm": 1.691379648176161,
"learning_rate": 3.0257478605398595e-06,
"loss": 0.4841,
"step": 1049
},
{
"epoch": 0.87,
"grad_norm": 1.7452186987943483,
"learning_rate": 3.0225507657661257e-06,
"loss": 0.5626,
"step": 1050
},
{
"epoch": 0.87,
"grad_norm": 1.7578678635930594,
"learning_rate": 3.0193527771063297e-06,
"loss": 0.5115,
"step": 1051
},
{
"epoch": 0.87,
"grad_norm": 1.7879798898209605,
"learning_rate": 3.016153900031016e-06,
"loss": 0.5296,
"step": 1052
},
{
"epoch": 0.87,
"grad_norm": 1.6745604796677231,
"learning_rate": 3.0129541400122492e-06,
"loss": 0.5089,
"step": 1053
},
{
"epoch": 0.87,
"grad_norm": 1.8484438696306678,
"learning_rate": 3.0097535025236045e-06,
"loss": 0.6124,
"step": 1054
},
{
"epoch": 0.87,
"grad_norm": 1.8023880068850882,
"learning_rate": 3.0065519930401595e-06,
"loss": 0.4983,
"step": 1055
},
{
"epoch": 0.88,
"grad_norm": 1.743901583565096,
"learning_rate": 3.0033496170384803e-06,
"loss": 0.4998,
"step": 1056
},
{
"epoch": 0.88,
"grad_norm": 1.9494472820876043,
"learning_rate": 3.000146379996617e-06,
"loss": 0.537,
"step": 1057
},
{
"epoch": 0.88,
"grad_norm": 1.6992995489648048,
"learning_rate": 2.996942287394093e-06,
"loss": 0.5822,
"step": 1058
},
{
"epoch": 0.88,
"grad_norm": 1.8498288139189643,
"learning_rate": 2.993737344711895e-06,
"loss": 0.5651,
"step": 1059
},
{
"epoch": 0.88,
"grad_norm": 1.755920633785882,
"learning_rate": 2.990531557432464e-06,
"loss": 0.496,
"step": 1060
},
{
"epoch": 0.88,
"grad_norm": 1.7876484928074277,
"learning_rate": 2.9873249310396853e-06,
"loss": 0.5224,
"step": 1061
},
{
"epoch": 0.88,
"grad_norm": 1.7573987279473129,
"learning_rate": 2.98411747101888e-06,
"loss": 0.5228,
"step": 1062
},
{
"epoch": 0.88,
"grad_norm": 1.6995721104857204,
"learning_rate": 2.980909182856794e-06,
"loss": 0.4758,
"step": 1063
},
{
"epoch": 0.88,
"grad_norm": 1.907464743607936,
"learning_rate": 2.9777000720415916e-06,
"loss": 0.5254,
"step": 1064
},
{
"epoch": 0.88,
"grad_norm": 1.7921365259203703,
"learning_rate": 2.974490144062844e-06,
"loss": 0.5116,
"step": 1065
},
{
"epoch": 0.88,
"grad_norm": 1.9010192849593792,
"learning_rate": 2.9712794044115196e-06,
"loss": 0.5136,
"step": 1066
},
{
"epoch": 0.88,
"grad_norm": 1.742881813035793,
"learning_rate": 2.968067858579975e-06,
"loss": 0.5436,
"step": 1067
},
{
"epoch": 0.89,
"grad_norm": 1.7135933558215708,
"learning_rate": 2.964855512061947e-06,
"loss": 0.5268,
"step": 1068
},
{
"epoch": 0.89,
"grad_norm": 1.8360025545734582,
"learning_rate": 2.9616423703525414e-06,
"loss": 0.5238,
"step": 1069
},
{
"epoch": 0.89,
"grad_norm": 1.7090421713960848,
"learning_rate": 2.9584284389482237e-06,
"loss": 0.5051,
"step": 1070
},
{
"epoch": 0.89,
"grad_norm": 1.7462732547158757,
"learning_rate": 2.9552137233468113e-06,
"loss": 0.4838,
"step": 1071
},
{
"epoch": 0.89,
"grad_norm": 1.9336108910937513,
"learning_rate": 2.951998229047464e-06,
"loss": 0.5576,
"step": 1072
},
{
"epoch": 0.89,
"grad_norm": 1.784092660568157,
"learning_rate": 2.9487819615506702e-06,
"loss": 0.5349,
"step": 1073
},
{
"epoch": 0.89,
"grad_norm": 1.772640354616067,
"learning_rate": 2.945564926358245e-06,
"loss": 0.5423,
"step": 1074
},
{
"epoch": 0.89,
"grad_norm": 1.8491968859591044,
"learning_rate": 2.9423471289733125e-06,
"loss": 0.5453,
"step": 1075
},
{
"epoch": 0.89,
"grad_norm": 1.8283172103770493,
"learning_rate": 2.9391285749003046e-06,
"loss": 0.5318,
"step": 1076
},
{
"epoch": 0.89,
"grad_norm": 1.7802483696828226,
"learning_rate": 2.935909269644946e-06,
"loss": 0.4954,
"step": 1077
},
{
"epoch": 0.89,
"grad_norm": 1.8687809173149,
"learning_rate": 2.9326892187142457e-06,
"loss": 0.5428,
"step": 1078
},
{
"epoch": 0.89,
"grad_norm": 1.9218917868616974,
"learning_rate": 2.9294684276164888e-06,
"loss": 0.5125,
"step": 1079
},
{
"epoch": 0.9,
"grad_norm": 1.8406300824318225,
"learning_rate": 2.9262469018612278e-06,
"loss": 0.5186,
"step": 1080
},
{
"epoch": 0.9,
"grad_norm": 1.8153319034513924,
"learning_rate": 2.9230246469592695e-06,
"loss": 0.4878,
"step": 1081
},
{
"epoch": 0.9,
"grad_norm": 1.8381190525343576,
"learning_rate": 2.91980166842267e-06,
"loss": 0.5455,
"step": 1082
},
{
"epoch": 0.9,
"grad_norm": 1.7941629060330144,
"learning_rate": 2.9165779717647212e-06,
"loss": 0.5425,
"step": 1083
},
{
"epoch": 0.9,
"grad_norm": 1.755950985861856,
"learning_rate": 2.9133535624999466e-06,
"loss": 0.4992,
"step": 1084
},
{
"epoch": 0.9,
"grad_norm": 1.8065716401418646,
"learning_rate": 2.9101284461440853e-06,
"loss": 0.5569,
"step": 1085
},
{
"epoch": 0.9,
"grad_norm": 1.8487073865649808,
"learning_rate": 2.9069026282140887e-06,
"loss": 0.5352,
"step": 1086
},
{
"epoch": 0.9,
"grad_norm": 1.877024524581134,
"learning_rate": 2.903676114228107e-06,
"loss": 0.5584,
"step": 1087
},
{
"epoch": 0.9,
"grad_norm": 1.812931375367902,
"learning_rate": 2.9004489097054807e-06,
"loss": 0.5154,
"step": 1088
},
{
"epoch": 0.9,
"grad_norm": 1.7729938020658174,
"learning_rate": 2.897221020166732e-06,
"loss": 0.5386,
"step": 1089
},
{
"epoch": 0.9,
"grad_norm": 1.6991898958250629,
"learning_rate": 2.8939924511335555e-06,
"loss": 0.5467,
"step": 1090
},
{
"epoch": 0.9,
"grad_norm": 1.7298323860671052,
"learning_rate": 2.890763208128807e-06,
"loss": 0.5506,
"step": 1091
},
{
"epoch": 0.91,
"grad_norm": 1.9718362378496106,
"learning_rate": 2.887533296676497e-06,
"loss": 0.5453,
"step": 1092
},
{
"epoch": 0.91,
"grad_norm": 1.7003897379752575,
"learning_rate": 2.8843027223017767e-06,
"loss": 0.5016,
"step": 1093
},
{
"epoch": 0.91,
"grad_norm": 1.7604846690613096,
"learning_rate": 2.8810714905309346e-06,
"loss": 0.5206,
"step": 1094
},
{
"epoch": 0.91,
"grad_norm": 1.868522047775135,
"learning_rate": 2.8778396068913807e-06,
"loss": 0.5152,
"step": 1095
},
{
"epoch": 0.91,
"grad_norm": 1.8080911269766844,
"learning_rate": 2.874607076911642e-06,
"loss": 0.4966,
"step": 1096
},
{
"epoch": 0.91,
"grad_norm": 1.7767037245003534,
"learning_rate": 2.871373906121351e-06,
"loss": 0.5081,
"step": 1097
},
{
"epoch": 0.91,
"grad_norm": 1.733045586658075,
"learning_rate": 2.8681401000512356e-06,
"loss": 0.5031,
"step": 1098
},
{
"epoch": 0.91,
"grad_norm": 1.6767478479637847,
"learning_rate": 2.8649056642331103e-06,
"loss": 0.4856,
"step": 1099
},
{
"epoch": 0.91,
"grad_norm": 1.6820690185704608,
"learning_rate": 2.8616706041998686e-06,
"loss": 0.5151,
"step": 1100
},
{
"epoch": 0.91,
"grad_norm": 1.840181264549285,
"learning_rate": 2.8584349254854693e-06,
"loss": 0.5393,
"step": 1101
},
{
"epoch": 0.91,
"grad_norm": 1.827807570004724,
"learning_rate": 2.8551986336249322e-06,
"loss": 0.5572,
"step": 1102
},
{
"epoch": 0.91,
"grad_norm": 1.711815265099016,
"learning_rate": 2.8519617341543233e-06,
"loss": 0.5184,
"step": 1103
},
{
"epoch": 0.92,
"grad_norm": 1.7460018389221874,
"learning_rate": 2.8487242326107495e-06,
"loss": 0.5374,
"step": 1104
},
{
"epoch": 0.92,
"grad_norm": 1.985067366728648,
"learning_rate": 2.8454861345323475e-06,
"loss": 0.538,
"step": 1105
},
{
"epoch": 0.92,
"grad_norm": 1.8044567576569952,
"learning_rate": 2.8422474454582754e-06,
"loss": 0.4947,
"step": 1106
},
{
"epoch": 0.92,
"grad_norm": 1.7648712890692506,
"learning_rate": 2.8390081709286997e-06,
"loss": 0.5584,
"step": 1107
},
{
"epoch": 0.92,
"grad_norm": 1.7544905722043518,
"learning_rate": 2.8357683164847903e-06,
"loss": 0.5696,
"step": 1108
},
{
"epoch": 0.92,
"grad_norm": 1.7923136846837993,
"learning_rate": 2.8325278876687084e-06,
"loss": 0.5502,
"step": 1109
},
{
"epoch": 0.92,
"grad_norm": 2.077195937792951,
"learning_rate": 2.8292868900235986e-06,
"loss": 0.543,
"step": 1110
},
{
"epoch": 0.92,
"grad_norm": 1.7675854046933754,
"learning_rate": 2.826045329093578e-06,
"loss": 0.5422,
"step": 1111
},
{
"epoch": 0.92,
"grad_norm": 1.8457239401392898,
"learning_rate": 2.822803210423727e-06,
"loss": 0.5334,
"step": 1112
},
{
"epoch": 0.92,
"grad_norm": 1.7426929121470698,
"learning_rate": 2.8195605395600804e-06,
"loss": 0.4972,
"step": 1113
},
{
"epoch": 0.92,
"grad_norm": 1.7675216264197045,
"learning_rate": 2.8163173220496175e-06,
"loss": 0.5442,
"step": 1114
},
{
"epoch": 0.92,
"grad_norm": 1.7483102565661375,
"learning_rate": 2.8130735634402527e-06,
"loss": 0.5425,
"step": 1115
},
{
"epoch": 0.93,
"grad_norm": 1.692036399159914,
"learning_rate": 2.8098292692808253e-06,
"loss": 0.521,
"step": 1116
},
{
"epoch": 0.93,
"grad_norm": 1.799980213437577,
"learning_rate": 2.8065844451210933e-06,
"loss": 0.5597,
"step": 1117
},
{
"epoch": 0.93,
"grad_norm": 1.7666190830884467,
"learning_rate": 2.803339096511718e-06,
"loss": 0.5612,
"step": 1118
},
{
"epoch": 0.93,
"grad_norm": 1.792129515845057,
"learning_rate": 2.8000932290042597e-06,
"loss": 0.5334,
"step": 1119
},
{
"epoch": 0.93,
"grad_norm": 1.7395715578516604,
"learning_rate": 2.7968468481511663e-06,
"loss": 0.5545,
"step": 1120
},
{
"epoch": 0.93,
"grad_norm": 1.6843830287676704,
"learning_rate": 2.7935999595057623e-06,
"loss": 0.5659,
"step": 1121
},
{
"epoch": 0.93,
"grad_norm": 1.6432688824199502,
"learning_rate": 2.790352568622244e-06,
"loss": 0.4926,
"step": 1122
},
{
"epoch": 0.93,
"grad_norm": 1.7430642435954644,
"learning_rate": 2.787104681055663e-06,
"loss": 0.4666,
"step": 1123
},
{
"epoch": 0.93,
"grad_norm": 1.8067789882264202,
"learning_rate": 2.783856302361923e-06,
"loss": 0.5233,
"step": 1124
},
{
"epoch": 0.93,
"grad_norm": 1.7685143281757654,
"learning_rate": 2.780607438097769e-06,
"loss": 0.5506,
"step": 1125
},
{
"epoch": 0.93,
"grad_norm": 1.7163110868931304,
"learning_rate": 2.7773580938207717e-06,
"loss": 0.5044,
"step": 1126
},
{
"epoch": 0.93,
"grad_norm": 1.809036270322799,
"learning_rate": 2.7741082750893284e-06,
"loss": 0.5206,
"step": 1127
},
{
"epoch": 0.94,
"grad_norm": 1.8193898978325846,
"learning_rate": 2.770857987462645e-06,
"loss": 0.6064,
"step": 1128
},
{
"epoch": 0.94,
"grad_norm": 1.765826426309075,
"learning_rate": 2.76760723650073e-06,
"loss": 0.4914,
"step": 1129
},
{
"epoch": 0.94,
"grad_norm": 2.046345230237298,
"learning_rate": 2.764356027764385e-06,
"loss": 0.5938,
"step": 1130
},
{
"epoch": 0.94,
"grad_norm": 1.8264697696225647,
"learning_rate": 2.7611043668151948e-06,
"loss": 0.5476,
"step": 1131
},
{
"epoch": 0.94,
"grad_norm": 1.7776043318415495,
"learning_rate": 2.7578522592155166e-06,
"loss": 0.5318,
"step": 1132
},
{
"epoch": 0.94,
"grad_norm": 1.767284538432005,
"learning_rate": 2.7545997105284735e-06,
"loss": 0.5197,
"step": 1133
},
{
"epoch": 0.94,
"grad_norm": 1.831190014066027,
"learning_rate": 2.75134672631794e-06,
"loss": 0.4939,
"step": 1134
},
{
"epoch": 0.94,
"grad_norm": 1.7727769641989948,
"learning_rate": 2.7480933121485394e-06,
"loss": 0.5542,
"step": 1135
},
{
"epoch": 0.94,
"grad_norm": 1.7599576706599651,
"learning_rate": 2.7448394735856275e-06,
"loss": 0.5102,
"step": 1136
},
{
"epoch": 0.94,
"grad_norm": 1.7526987759875383,
"learning_rate": 2.7415852161952893e-06,
"loss": 0.5357,
"step": 1137
},
{
"epoch": 0.94,
"grad_norm": 1.7478180377944075,
"learning_rate": 2.7383305455443223e-06,
"loss": 0.552,
"step": 1138
},
{
"epoch": 0.94,
"grad_norm": 1.8026983878339322,
"learning_rate": 2.7350754672002334e-06,
"loss": 0.5324,
"step": 1139
},
{
"epoch": 0.95,
"grad_norm": 1.7539604119960455,
"learning_rate": 2.7318199867312267e-06,
"loss": 0.4951,
"step": 1140
},
{
"epoch": 0.95,
"grad_norm": 1.7060714376533908,
"learning_rate": 2.728564109706193e-06,
"loss": 0.5044,
"step": 1141
},
{
"epoch": 0.95,
"grad_norm": 1.896732668736906,
"learning_rate": 2.725307841694704e-06,
"loss": 0.5272,
"step": 1142
},
{
"epoch": 0.95,
"grad_norm": 1.9094037542829962,
"learning_rate": 2.722051188266998e-06,
"loss": 0.5036,
"step": 1143
},
{
"epoch": 0.95,
"grad_norm": 1.7529900591353695,
"learning_rate": 2.7187941549939723e-06,
"loss": 0.4962,
"step": 1144
},
{
"epoch": 0.95,
"grad_norm": 1.7652784724721573,
"learning_rate": 2.7155367474471763e-06,
"loss": 0.5159,
"step": 1145
},
{
"epoch": 0.95,
"grad_norm": 1.9070275680276054,
"learning_rate": 2.7122789711987964e-06,
"loss": 0.5269,
"step": 1146
},
{
"epoch": 0.95,
"grad_norm": 1.7630505518040367,
"learning_rate": 2.709020831821652e-06,
"loss": 0.5286,
"step": 1147
},
{
"epoch": 0.95,
"grad_norm": 1.7410138974922291,
"learning_rate": 2.7057623348891846e-06,
"loss": 0.4902,
"step": 1148
},
{
"epoch": 0.95,
"grad_norm": 1.745842560539345,
"learning_rate": 2.7025034859754446e-06,
"loss": 0.5178,
"step": 1149
},
{
"epoch": 0.95,
"grad_norm": 1.8498982578771728,
"learning_rate": 2.699244290655086e-06,
"loss": 0.55,
"step": 1150
},
{
"epoch": 0.95,
"grad_norm": 1.6360369924184164,
"learning_rate": 2.6959847545033558e-06,
"loss": 0.4988,
"step": 1151
},
{
"epoch": 0.96,
"grad_norm": 1.6784833460211517,
"learning_rate": 2.692724883096082e-06,
"loss": 0.5303,
"step": 1152
},
{
"epoch": 0.96,
"grad_norm": 1.7888637226825195,
"learning_rate": 2.68946468200967e-06,
"loss": 0.542,
"step": 1153
},
{
"epoch": 0.96,
"grad_norm": 1.7156031503954616,
"learning_rate": 2.686204156821084e-06,
"loss": 0.499,
"step": 1154
},
{
"epoch": 0.96,
"grad_norm": 1.802618839032982,
"learning_rate": 2.6829433131078464e-06,
"loss": 0.5095,
"step": 1155
},
{
"epoch": 0.96,
"grad_norm": 1.7018673816457677,
"learning_rate": 2.6796821564480237e-06,
"loss": 0.4911,
"step": 1156
},
{
"epoch": 0.96,
"grad_norm": 1.939833859373507,
"learning_rate": 2.6764206924202173e-06,
"loss": 0.5965,
"step": 1157
},
{
"epoch": 0.96,
"grad_norm": 1.757462214596805,
"learning_rate": 2.673158926603554e-06,
"loss": 0.5119,
"step": 1158
},
{
"epoch": 0.96,
"grad_norm": 1.824906787992325,
"learning_rate": 2.669896864577678e-06,
"loss": 0.4995,
"step": 1159
},
{
"epoch": 0.96,
"grad_norm": 1.6963319988581682,
"learning_rate": 2.666634511922739e-06,
"loss": 0.499,
"step": 1160
},
{
"epoch": 0.96,
"grad_norm": 1.7490967555131538,
"learning_rate": 2.6633718742193837e-06,
"loss": 0.5045,
"step": 1161
},
{
"epoch": 0.96,
"grad_norm": 1.7295387040616608,
"learning_rate": 2.660108957048749e-06,
"loss": 0.48,
"step": 1162
},
{
"epoch": 0.96,
"grad_norm": 1.7062936128447537,
"learning_rate": 2.656845765992447e-06,
"loss": 0.5024,
"step": 1163
},
{
"epoch": 0.96,
"grad_norm": 1.7291223687738257,
"learning_rate": 2.6535823066325594e-06,
"loss": 0.4965,
"step": 1164
},
{
"epoch": 0.97,
"grad_norm": 1.7660018876230184,
"learning_rate": 2.650318584551626e-06,
"loss": 0.6289,
"step": 1165
},
{
"epoch": 0.97,
"grad_norm": 1.6875948695046943,
"learning_rate": 2.6470546053326375e-06,
"loss": 0.5099,
"step": 1166
},
{
"epoch": 0.97,
"grad_norm": 1.7055862895950586,
"learning_rate": 2.643790374559023e-06,
"loss": 0.4748,
"step": 1167
},
{
"epoch": 0.97,
"grad_norm": 1.8397810404769834,
"learning_rate": 2.6405258978146443e-06,
"loss": 0.5547,
"step": 1168
},
{
"epoch": 0.97,
"grad_norm": 1.6780759297615608,
"learning_rate": 2.6372611806837804e-06,
"loss": 0.4696,
"step": 1169
},
{
"epoch": 0.97,
"grad_norm": 1.7463193906158438,
"learning_rate": 2.633996228751125e-06,
"loss": 0.5167,
"step": 1170
},
{
"epoch": 0.97,
"grad_norm": 1.7682737157303552,
"learning_rate": 2.6307310476017705e-06,
"loss": 0.5178,
"step": 1171
},
{
"epoch": 0.97,
"grad_norm": 1.7759532350573655,
"learning_rate": 2.627465642821203e-06,
"loss": 0.5411,
"step": 1172
},
{
"epoch": 0.97,
"grad_norm": 1.741742707150691,
"learning_rate": 2.624200019995293e-06,
"loss": 0.5357,
"step": 1173
},
{
"epoch": 0.97,
"grad_norm": 1.7638181255611864,
"learning_rate": 2.6209341847102787e-06,
"loss": 0.5598,
"step": 1174
},
{
"epoch": 0.97,
"grad_norm": 1.6585763596592404,
"learning_rate": 2.6176681425527663e-06,
"loss": 0.4891,
"step": 1175
},
{
"epoch": 0.97,
"grad_norm": 1.7652514703885578,
"learning_rate": 2.614401899109716e-06,
"loss": 0.5412,
"step": 1176
},
{
"epoch": 0.98,
"grad_norm": 1.7646286601286296,
"learning_rate": 2.6111354599684287e-06,
"loss": 0.4753,
"step": 1177
},
{
"epoch": 0.98,
"grad_norm": 1.7933546923906454,
"learning_rate": 2.6078688307165436e-06,
"loss": 0.5159,
"step": 1178
},
{
"epoch": 0.98,
"grad_norm": 1.8474498352431208,
"learning_rate": 2.6046020169420223e-06,
"loss": 0.4786,
"step": 1179
},
{
"epoch": 0.98,
"grad_norm": 1.816609500392057,
"learning_rate": 2.601335024233145e-06,
"loss": 0.5821,
"step": 1180
},
{
"epoch": 0.98,
"grad_norm": 1.7603922858788037,
"learning_rate": 2.598067858178495e-06,
"loss": 0.4749,
"step": 1181
},
{
"epoch": 0.98,
"grad_norm": 1.771168764538133,
"learning_rate": 2.594800524366956e-06,
"loss": 0.5221,
"step": 1182
},
{
"epoch": 0.98,
"grad_norm": 1.7428386931770696,
"learning_rate": 2.591533028387694e-06,
"loss": 0.5243,
"step": 1183
},
{
"epoch": 0.98,
"grad_norm": 1.7354647623517858,
"learning_rate": 2.588265375830155e-06,
"loss": 0.4665,
"step": 1184
},
{
"epoch": 0.98,
"grad_norm": 1.7757829783254058,
"learning_rate": 2.5849975722840537e-06,
"loss": 0.4713,
"step": 1185
},
{
"epoch": 0.98,
"grad_norm": 1.7660698291034924,
"learning_rate": 2.58172962333936e-06,
"loss": 0.5198,
"step": 1186
},
{
"epoch": 0.98,
"grad_norm": 1.7071465020770178,
"learning_rate": 2.5784615345862963e-06,
"loss": 0.5355,
"step": 1187
},
{
"epoch": 0.98,
"grad_norm": 1.6994920599655763,
"learning_rate": 2.5751933116153215e-06,
"loss": 0.4867,
"step": 1188
},
{
"epoch": 0.99,
"grad_norm": 1.7891977115774562,
"learning_rate": 2.5719249600171247e-06,
"loss": 0.5071,
"step": 1189
},
{
"epoch": 0.99,
"grad_norm": 1.6866451169084888,
"learning_rate": 2.568656485382616e-06,
"loss": 0.4767,
"step": 1190
},
{
"epoch": 0.99,
"grad_norm": 1.9106444693405875,
"learning_rate": 2.5653878933029134e-06,
"loss": 0.5063,
"step": 1191
},
{
"epoch": 0.99,
"grad_norm": 1.7546015951107552,
"learning_rate": 2.56211918936934e-06,
"loss": 0.5536,
"step": 1192
},
{
"epoch": 0.99,
"grad_norm": 1.7866083346923656,
"learning_rate": 2.5588503791734053e-06,
"loss": 0.4738,
"step": 1193
},
{
"epoch": 0.99,
"grad_norm": 1.6678313975517949,
"learning_rate": 2.5555814683068058e-06,
"loss": 0.5095,
"step": 1194
},
{
"epoch": 0.99,
"grad_norm": 1.694690087625629,
"learning_rate": 2.552312462361405e-06,
"loss": 0.5711,
"step": 1195
},
{
"epoch": 0.99,
"grad_norm": 1.7583066556547233,
"learning_rate": 2.5490433669292337e-06,
"loss": 0.5183,
"step": 1196
},
{
"epoch": 0.99,
"grad_norm": 1.8259327544569408,
"learning_rate": 2.5457741876024716e-06,
"loss": 0.5129,
"step": 1197
},
{
"epoch": 0.99,
"grad_norm": 1.743709458286742,
"learning_rate": 2.542504929973445e-06,
"loss": 0.509,
"step": 1198
},
{
"epoch": 0.99,
"grad_norm": 1.8551037168096902,
"learning_rate": 2.5392355996346134e-06,
"loss": 0.4874,
"step": 1199
},
{
"epoch": 0.99,
"grad_norm": 1.7705896553689628,
"learning_rate": 2.5359662021785596e-06,
"loss": 0.5102,
"step": 1200
},
{
"epoch": 1.0,
"grad_norm": 1.8456154073029885,
"learning_rate": 2.532696743197982e-06,
"loss": 0.5363,
"step": 1201
},
{
"epoch": 1.0,
"grad_norm": 1.7341454202963031,
"learning_rate": 2.529427228285686e-06,
"loss": 0.5013,
"step": 1202
},
{
"epoch": 1.0,
"grad_norm": 1.7923147732329405,
"learning_rate": 2.526157663034568e-06,
"loss": 0.5191,
"step": 1203
},
{
"epoch": 1.0,
"grad_norm": 1.731262319220837,
"learning_rate": 2.522888053037616e-06,
"loss": 0.4889,
"step": 1204
},
{
"epoch": 1.0,
"grad_norm": 1.797800368847369,
"learning_rate": 2.5196184038878895e-06,
"loss": 0.4868,
"step": 1205
},
{
"epoch": 1.0,
"grad_norm": 1.8182272292135089,
"learning_rate": 2.5163487211785194e-06,
"loss": 0.5159,
"step": 1206
}
],
"logging_steps": 1,
"max_steps": 2412,
"num_input_tokens_seen": 0,
"num_train_epochs": 2,
"save_steps": 603,
"total_flos": 568033919631360.0,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}