1JV42 / checkpoint-2000 /trainer_state.json
gotzmann's picture
..
2061992
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.4859676831490706,
"eval_steps": 500,
"global_step": 2000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0002429838415745353,
"grad_norm": 0.9283741116523743,
"learning_rate": 8e-05,
"loss": 1.9395,
"step": 1
},
{
"epoch": 0.0004859676831490706,
"grad_norm": 1.3144824504852295,
"learning_rate": 8e-05,
"loss": 1.9039,
"step": 2
},
{
"epoch": 0.0007289515247236059,
"grad_norm": 0.5672217011451721,
"learning_rate": 8e-05,
"loss": 1.8385,
"step": 3
},
{
"epoch": 0.0009719353662981412,
"grad_norm": 0.6657972931861877,
"learning_rate": 8e-05,
"loss": 1.9482,
"step": 4
},
{
"epoch": 0.0012149192078726764,
"grad_norm": 0.512556791305542,
"learning_rate": 8e-05,
"loss": 1.9696,
"step": 5
},
{
"epoch": 0.0014579030494472117,
"grad_norm": 0.5714458227157593,
"learning_rate": 8e-05,
"loss": 1.8831,
"step": 6
},
{
"epoch": 0.001700886891021747,
"grad_norm": 0.6022382378578186,
"learning_rate": 8e-05,
"loss": 1.9216,
"step": 7
},
{
"epoch": 0.0019438707325962824,
"grad_norm": 0.5293871164321899,
"learning_rate": 8e-05,
"loss": 1.8539,
"step": 8
},
{
"epoch": 0.0021868545741708175,
"grad_norm": 0.5279130339622498,
"learning_rate": 8e-05,
"loss": 1.7731,
"step": 9
},
{
"epoch": 0.0024298384157453528,
"grad_norm": 0.4825509190559387,
"learning_rate": 8e-05,
"loss": 1.7685,
"step": 10
},
{
"epoch": 0.002672822257319888,
"grad_norm": 0.464072048664093,
"learning_rate": 8e-05,
"loss": 1.7744,
"step": 11
},
{
"epoch": 0.0029158060988944234,
"grad_norm": 0.514319658279419,
"learning_rate": 8e-05,
"loss": 1.9682,
"step": 12
},
{
"epoch": 0.0031587899404689587,
"grad_norm": 0.4752178490161896,
"learning_rate": 8e-05,
"loss": 1.7164,
"step": 13
},
{
"epoch": 0.003401773782043494,
"grad_norm": 0.5416842103004456,
"learning_rate": 8e-05,
"loss": 1.8342,
"step": 14
},
{
"epoch": 0.0036447576236180294,
"grad_norm": 0.513598620891571,
"learning_rate": 8e-05,
"loss": 1.9164,
"step": 15
},
{
"epoch": 0.0038877414651925647,
"grad_norm": 0.4487161934375763,
"learning_rate": 8e-05,
"loss": 1.7595,
"step": 16
},
{
"epoch": 0.0041307253067671,
"grad_norm": 0.5007575750350952,
"learning_rate": 8e-05,
"loss": 1.9731,
"step": 17
},
{
"epoch": 0.004373709148341635,
"grad_norm": 0.4565035402774811,
"learning_rate": 8e-05,
"loss": 1.7485,
"step": 18
},
{
"epoch": 0.00461669298991617,
"grad_norm": 0.4516490399837494,
"learning_rate": 8e-05,
"loss": 1.7877,
"step": 19
},
{
"epoch": 0.0048596768314907056,
"grad_norm": 0.4720084071159363,
"learning_rate": 8e-05,
"loss": 1.8198,
"step": 20
},
{
"epoch": 0.005102660673065241,
"grad_norm": 0.46458321809768677,
"learning_rate": 8e-05,
"loss": 1.7779,
"step": 21
},
{
"epoch": 0.005345644514639776,
"grad_norm": 0.46023452281951904,
"learning_rate": 8e-05,
"loss": 1.8233,
"step": 22
},
{
"epoch": 0.0055886283562143115,
"grad_norm": 0.45890012383461,
"learning_rate": 8e-05,
"loss": 1.8359,
"step": 23
},
{
"epoch": 0.005831612197788847,
"grad_norm": 0.4736693799495697,
"learning_rate": 8e-05,
"loss": 1.8834,
"step": 24
},
{
"epoch": 0.006074596039363382,
"grad_norm": 0.47544634342193604,
"learning_rate": 8e-05,
"loss": 1.8574,
"step": 25
},
{
"epoch": 0.0063175798809379175,
"grad_norm": 0.48038774728775024,
"learning_rate": 8e-05,
"loss": 1.9607,
"step": 26
},
{
"epoch": 0.006560563722512453,
"grad_norm": 0.4817793369293213,
"learning_rate": 8e-05,
"loss": 1.8488,
"step": 27
},
{
"epoch": 0.006803547564086988,
"grad_norm": 0.42538994550704956,
"learning_rate": 8e-05,
"loss": 1.7048,
"step": 28
},
{
"epoch": 0.0070465314056615234,
"grad_norm": 0.4736614525318146,
"learning_rate": 8e-05,
"loss": 1.882,
"step": 29
},
{
"epoch": 0.007289515247236059,
"grad_norm": 0.42797160148620605,
"learning_rate": 8e-05,
"loss": 1.7167,
"step": 30
},
{
"epoch": 0.007532499088810594,
"grad_norm": 0.4591984450817108,
"learning_rate": 8e-05,
"loss": 1.8623,
"step": 31
},
{
"epoch": 0.007775482930385129,
"grad_norm": 0.4495055377483368,
"learning_rate": 8e-05,
"loss": 1.8061,
"step": 32
},
{
"epoch": 0.008018466771959665,
"grad_norm": 0.44170689582824707,
"learning_rate": 8e-05,
"loss": 1.8883,
"step": 33
},
{
"epoch": 0.0082614506135342,
"grad_norm": 0.4610385298728943,
"learning_rate": 8e-05,
"loss": 1.9759,
"step": 34
},
{
"epoch": 0.008504434455108735,
"grad_norm": 0.4396963119506836,
"learning_rate": 8e-05,
"loss": 1.7499,
"step": 35
},
{
"epoch": 0.00874741829668327,
"grad_norm": 0.4422125220298767,
"learning_rate": 8e-05,
"loss": 1.8818,
"step": 36
},
{
"epoch": 0.008990402138257806,
"grad_norm": 0.42213165760040283,
"learning_rate": 8e-05,
"loss": 1.7612,
"step": 37
},
{
"epoch": 0.00923338597983234,
"grad_norm": 0.4536469280719757,
"learning_rate": 8e-05,
"loss": 1.8168,
"step": 38
},
{
"epoch": 0.009476369821406877,
"grad_norm": 0.4328950345516205,
"learning_rate": 8e-05,
"loss": 1.8431,
"step": 39
},
{
"epoch": 0.009719353662981411,
"grad_norm": 0.4586104452610016,
"learning_rate": 8e-05,
"loss": 1.9572,
"step": 40
},
{
"epoch": 0.009962337504555947,
"grad_norm": 0.4931277632713318,
"learning_rate": 8e-05,
"loss": 2.0045,
"step": 41
},
{
"epoch": 0.010205321346130482,
"grad_norm": 0.4379693269729614,
"learning_rate": 8e-05,
"loss": 1.7844,
"step": 42
},
{
"epoch": 0.010448305187705018,
"grad_norm": 0.4282945394515991,
"learning_rate": 8e-05,
"loss": 1.7884,
"step": 43
},
{
"epoch": 0.010691289029279552,
"grad_norm": 0.4561641812324524,
"learning_rate": 8e-05,
"loss": 1.6689,
"step": 44
},
{
"epoch": 0.010934272870854089,
"grad_norm": 0.4802621901035309,
"learning_rate": 8e-05,
"loss": 1.8825,
"step": 45
},
{
"epoch": 0.011177256712428623,
"grad_norm": 0.45682236552238464,
"learning_rate": 8e-05,
"loss": 1.9142,
"step": 46
},
{
"epoch": 0.01142024055400316,
"grad_norm": 0.4659646451473236,
"learning_rate": 8e-05,
"loss": 1.9928,
"step": 47
},
{
"epoch": 0.011663224395577694,
"grad_norm": 0.46792858839035034,
"learning_rate": 8e-05,
"loss": 1.7975,
"step": 48
},
{
"epoch": 0.01190620823715223,
"grad_norm": 0.42964687943458557,
"learning_rate": 8e-05,
"loss": 1.7913,
"step": 49
},
{
"epoch": 0.012149192078726764,
"grad_norm": 0.4474690854549408,
"learning_rate": 8e-05,
"loss": 1.8821,
"step": 50
},
{
"epoch": 0.0123921759203013,
"grad_norm": 0.43518373370170593,
"learning_rate": 8e-05,
"loss": 1.815,
"step": 51
},
{
"epoch": 0.012635159761875835,
"grad_norm": 0.41885894536972046,
"learning_rate": 8e-05,
"loss": 1.7571,
"step": 52
},
{
"epoch": 0.012878143603450371,
"grad_norm": 0.42882251739501953,
"learning_rate": 8e-05,
"loss": 1.8297,
"step": 53
},
{
"epoch": 0.013121127445024906,
"grad_norm": 0.4267278015613556,
"learning_rate": 8e-05,
"loss": 1.8158,
"step": 54
},
{
"epoch": 0.013364111286599442,
"grad_norm": 0.4405764937400818,
"learning_rate": 8e-05,
"loss": 1.9295,
"step": 55
},
{
"epoch": 0.013607095128173976,
"grad_norm": 0.45413410663604736,
"learning_rate": 8e-05,
"loss": 1.795,
"step": 56
},
{
"epoch": 0.013850078969748512,
"grad_norm": 0.4386683404445648,
"learning_rate": 8e-05,
"loss": 1.8841,
"step": 57
},
{
"epoch": 0.014093062811323047,
"grad_norm": 0.4813304841518402,
"learning_rate": 8e-05,
"loss": 1.9317,
"step": 58
},
{
"epoch": 0.014336046652897583,
"grad_norm": 0.4510192573070526,
"learning_rate": 8e-05,
"loss": 1.7689,
"step": 59
},
{
"epoch": 0.014579030494472118,
"grad_norm": 0.42841628193855286,
"learning_rate": 8e-05,
"loss": 1.7183,
"step": 60
},
{
"epoch": 0.014822014336046654,
"grad_norm": 0.4288236200809479,
"learning_rate": 8e-05,
"loss": 1.8038,
"step": 61
},
{
"epoch": 0.015064998177621188,
"grad_norm": 0.41781896352767944,
"learning_rate": 8e-05,
"loss": 1.8636,
"step": 62
},
{
"epoch": 0.015307982019195723,
"grad_norm": 0.4112628996372223,
"learning_rate": 8e-05,
"loss": 1.7913,
"step": 63
},
{
"epoch": 0.015550965860770259,
"grad_norm": 0.4400574564933777,
"learning_rate": 8e-05,
"loss": 1.78,
"step": 64
},
{
"epoch": 0.015793949702344795,
"grad_norm": 0.4312531352043152,
"learning_rate": 8e-05,
"loss": 1.6216,
"step": 65
},
{
"epoch": 0.01603693354391933,
"grad_norm": 0.4226854145526886,
"learning_rate": 8e-05,
"loss": 1.8446,
"step": 66
},
{
"epoch": 0.016279917385493864,
"grad_norm": 0.4536939561367035,
"learning_rate": 8e-05,
"loss": 1.8761,
"step": 67
},
{
"epoch": 0.0165229012270684,
"grad_norm": 0.44555526971817017,
"learning_rate": 8e-05,
"loss": 1.7852,
"step": 68
},
{
"epoch": 0.016765885068642936,
"grad_norm": 0.44413813948631287,
"learning_rate": 8e-05,
"loss": 1.7404,
"step": 69
},
{
"epoch": 0.01700886891021747,
"grad_norm": 0.4578434228897095,
"learning_rate": 8e-05,
"loss": 1.8257,
"step": 70
},
{
"epoch": 0.017251852751792005,
"grad_norm": 0.4213903546333313,
"learning_rate": 8e-05,
"loss": 1.7382,
"step": 71
},
{
"epoch": 0.01749483659336654,
"grad_norm": 0.4393966495990753,
"learning_rate": 8e-05,
"loss": 1.8235,
"step": 72
},
{
"epoch": 0.017737820434941078,
"grad_norm": 0.43298789858818054,
"learning_rate": 8e-05,
"loss": 1.9501,
"step": 73
},
{
"epoch": 0.017980804276515612,
"grad_norm": 0.43117642402648926,
"learning_rate": 8e-05,
"loss": 1.8529,
"step": 74
},
{
"epoch": 0.018223788118090146,
"grad_norm": 0.4453331530094147,
"learning_rate": 8e-05,
"loss": 1.8208,
"step": 75
},
{
"epoch": 0.01846677195966468,
"grad_norm": 0.4348840117454529,
"learning_rate": 8e-05,
"loss": 1.7738,
"step": 76
},
{
"epoch": 0.01870975580123922,
"grad_norm": 0.4274575412273407,
"learning_rate": 8e-05,
"loss": 1.6352,
"step": 77
},
{
"epoch": 0.018952739642813753,
"grad_norm": 0.45439624786376953,
"learning_rate": 8e-05,
"loss": 1.8004,
"step": 78
},
{
"epoch": 0.019195723484388288,
"grad_norm": 0.431682288646698,
"learning_rate": 8e-05,
"loss": 1.7191,
"step": 79
},
{
"epoch": 0.019438707325962822,
"grad_norm": 0.43740177154541016,
"learning_rate": 8e-05,
"loss": 1.7335,
"step": 80
},
{
"epoch": 0.01968169116753736,
"grad_norm": 0.44585227966308594,
"learning_rate": 8e-05,
"loss": 1.8385,
"step": 81
},
{
"epoch": 0.019924675009111895,
"grad_norm": 0.41222020983695984,
"learning_rate": 8e-05,
"loss": 1.8581,
"step": 82
},
{
"epoch": 0.02016765885068643,
"grad_norm": 0.4409661889076233,
"learning_rate": 8e-05,
"loss": 1.7644,
"step": 83
},
{
"epoch": 0.020410642692260964,
"grad_norm": 0.4678371846675873,
"learning_rate": 8e-05,
"loss": 1.9272,
"step": 84
},
{
"epoch": 0.0206536265338355,
"grad_norm": 0.43374109268188477,
"learning_rate": 8e-05,
"loss": 1.7207,
"step": 85
},
{
"epoch": 0.020896610375410036,
"grad_norm": 0.4561867117881775,
"learning_rate": 8e-05,
"loss": 1.7123,
"step": 86
},
{
"epoch": 0.02113959421698457,
"grad_norm": 0.4501771628856659,
"learning_rate": 8e-05,
"loss": 1.791,
"step": 87
},
{
"epoch": 0.021382578058559105,
"grad_norm": 0.4467450678348541,
"learning_rate": 8e-05,
"loss": 1.6327,
"step": 88
},
{
"epoch": 0.021625561900133643,
"grad_norm": 0.4093148112297058,
"learning_rate": 8e-05,
"loss": 1.682,
"step": 89
},
{
"epoch": 0.021868545741708177,
"grad_norm": 0.4533080458641052,
"learning_rate": 8e-05,
"loss": 1.761,
"step": 90
},
{
"epoch": 0.02211152958328271,
"grad_norm": 0.48888301849365234,
"learning_rate": 8e-05,
"loss": 1.7748,
"step": 91
},
{
"epoch": 0.022354513424857246,
"grad_norm": 0.43746045231819153,
"learning_rate": 8e-05,
"loss": 1.868,
"step": 92
},
{
"epoch": 0.022597497266431784,
"grad_norm": 0.4325760304927826,
"learning_rate": 8e-05,
"loss": 1.9195,
"step": 93
},
{
"epoch": 0.02284048110800632,
"grad_norm": 0.4253198802471161,
"learning_rate": 8e-05,
"loss": 1.7801,
"step": 94
},
{
"epoch": 0.023083464949580853,
"grad_norm": 0.44341036677360535,
"learning_rate": 8e-05,
"loss": 1.7678,
"step": 95
},
{
"epoch": 0.023326448791155387,
"grad_norm": 0.40490806102752686,
"learning_rate": 8e-05,
"loss": 1.7993,
"step": 96
},
{
"epoch": 0.023569432632729922,
"grad_norm": 0.4402480125427246,
"learning_rate": 8e-05,
"loss": 1.8517,
"step": 97
},
{
"epoch": 0.02381241647430446,
"grad_norm": 0.4148694574832916,
"learning_rate": 8e-05,
"loss": 1.8051,
"step": 98
},
{
"epoch": 0.024055400315878994,
"grad_norm": 0.4118325412273407,
"learning_rate": 8e-05,
"loss": 1.6258,
"step": 99
},
{
"epoch": 0.02429838415745353,
"grad_norm": 0.4318928122520447,
"learning_rate": 8e-05,
"loss": 1.9232,
"step": 100
},
{
"epoch": 0.024541367999028063,
"grad_norm": 0.43065184354782104,
"learning_rate": 8e-05,
"loss": 1.7095,
"step": 101
},
{
"epoch": 0.0247843518406026,
"grad_norm": 0.4622381627559662,
"learning_rate": 8e-05,
"loss": 1.7769,
"step": 102
},
{
"epoch": 0.025027335682177135,
"grad_norm": 0.4534037411212921,
"learning_rate": 8e-05,
"loss": 1.9383,
"step": 103
},
{
"epoch": 0.02527031952375167,
"grad_norm": 0.4303297996520996,
"learning_rate": 8e-05,
"loss": 1.8807,
"step": 104
},
{
"epoch": 0.025513303365326204,
"grad_norm": 0.44953134655952454,
"learning_rate": 8e-05,
"loss": 1.9548,
"step": 105
},
{
"epoch": 0.025756287206900742,
"grad_norm": 0.4983407258987427,
"learning_rate": 8e-05,
"loss": 1.8985,
"step": 106
},
{
"epoch": 0.025999271048475277,
"grad_norm": 0.4147990942001343,
"learning_rate": 8e-05,
"loss": 1.74,
"step": 107
},
{
"epoch": 0.02624225489004981,
"grad_norm": 0.41655194759368896,
"learning_rate": 8e-05,
"loss": 1.752,
"step": 108
},
{
"epoch": 0.026485238731624346,
"grad_norm": 0.43446749448776245,
"learning_rate": 8e-05,
"loss": 1.7903,
"step": 109
},
{
"epoch": 0.026728222573198884,
"grad_norm": 0.43117472529411316,
"learning_rate": 8e-05,
"loss": 1.8309,
"step": 110
},
{
"epoch": 0.026971206414773418,
"grad_norm": 0.4523254334926605,
"learning_rate": 8e-05,
"loss": 1.7257,
"step": 111
},
{
"epoch": 0.027214190256347952,
"grad_norm": 0.495615690946579,
"learning_rate": 8e-05,
"loss": 2.0216,
"step": 112
},
{
"epoch": 0.027457174097922487,
"grad_norm": 0.437045693397522,
"learning_rate": 8e-05,
"loss": 1.81,
"step": 113
},
{
"epoch": 0.027700157939497025,
"grad_norm": 0.44665664434432983,
"learning_rate": 8e-05,
"loss": 1.8155,
"step": 114
},
{
"epoch": 0.02794314178107156,
"grad_norm": 0.3989410102367401,
"learning_rate": 8e-05,
"loss": 1.699,
"step": 115
},
{
"epoch": 0.028186125622646094,
"grad_norm": 0.4348151981830597,
"learning_rate": 8e-05,
"loss": 1.6835,
"step": 116
},
{
"epoch": 0.028429109464220628,
"grad_norm": 0.4325806498527527,
"learning_rate": 8e-05,
"loss": 1.5317,
"step": 117
},
{
"epoch": 0.028672093305795166,
"grad_norm": 0.44066253304481506,
"learning_rate": 8e-05,
"loss": 1.7281,
"step": 118
},
{
"epoch": 0.0289150771473697,
"grad_norm": 0.4325154423713684,
"learning_rate": 8e-05,
"loss": 1.8724,
"step": 119
},
{
"epoch": 0.029158060988944235,
"grad_norm": 0.4403970539569855,
"learning_rate": 8e-05,
"loss": 1.7267,
"step": 120
},
{
"epoch": 0.02940104483051877,
"grad_norm": 0.44233599305152893,
"learning_rate": 8e-05,
"loss": 1.8199,
"step": 121
},
{
"epoch": 0.029644028672093307,
"grad_norm": 0.41565191745758057,
"learning_rate": 8e-05,
"loss": 1.6504,
"step": 122
},
{
"epoch": 0.029887012513667842,
"grad_norm": 0.4531630277633667,
"learning_rate": 8e-05,
"loss": 1.9043,
"step": 123
},
{
"epoch": 0.030129996355242376,
"grad_norm": 0.3977494239807129,
"learning_rate": 8e-05,
"loss": 1.6552,
"step": 124
},
{
"epoch": 0.03037298019681691,
"grad_norm": 0.40935423970222473,
"learning_rate": 8e-05,
"loss": 1.7107,
"step": 125
},
{
"epoch": 0.030615964038391445,
"grad_norm": 0.4547923505306244,
"learning_rate": 8e-05,
"loss": 1.8269,
"step": 126
},
{
"epoch": 0.030858947879965983,
"grad_norm": 0.4384150505065918,
"learning_rate": 8e-05,
"loss": 1.9686,
"step": 127
},
{
"epoch": 0.031101931721540518,
"grad_norm": 0.4189033508300781,
"learning_rate": 8e-05,
"loss": 1.5945,
"step": 128
},
{
"epoch": 0.031344915563115056,
"grad_norm": 0.4254000186920166,
"learning_rate": 8e-05,
"loss": 1.8124,
"step": 129
},
{
"epoch": 0.03158789940468959,
"grad_norm": 0.44288694858551025,
"learning_rate": 8e-05,
"loss": 1.8364,
"step": 130
},
{
"epoch": 0.031830883246264124,
"grad_norm": 0.4456717073917389,
"learning_rate": 8e-05,
"loss": 1.8809,
"step": 131
},
{
"epoch": 0.03207386708783866,
"grad_norm": 0.41588082909584045,
"learning_rate": 8e-05,
"loss": 1.7629,
"step": 132
},
{
"epoch": 0.03231685092941319,
"grad_norm": 0.43358904123306274,
"learning_rate": 8e-05,
"loss": 1.6689,
"step": 133
},
{
"epoch": 0.03255983477098773,
"grad_norm": 0.4369245171546936,
"learning_rate": 8e-05,
"loss": 1.8364,
"step": 134
},
{
"epoch": 0.03280281861256226,
"grad_norm": 0.4606565237045288,
"learning_rate": 8e-05,
"loss": 1.9564,
"step": 135
},
{
"epoch": 0.0330458024541368,
"grad_norm": 0.44513610005378723,
"learning_rate": 8e-05,
"loss": 1.8491,
"step": 136
},
{
"epoch": 0.03328878629571134,
"grad_norm": 0.41755104064941406,
"learning_rate": 8e-05,
"loss": 1.7142,
"step": 137
},
{
"epoch": 0.03353177013728587,
"grad_norm": 0.42471590638160706,
"learning_rate": 8e-05,
"loss": 1.6703,
"step": 138
},
{
"epoch": 0.03377475397886041,
"grad_norm": 0.43392664194107056,
"learning_rate": 8e-05,
"loss": 1.771,
"step": 139
},
{
"epoch": 0.03401773782043494,
"grad_norm": 0.44673675298690796,
"learning_rate": 8e-05,
"loss": 1.8783,
"step": 140
},
{
"epoch": 0.034260721662009476,
"grad_norm": 0.43131935596466064,
"learning_rate": 8e-05,
"loss": 1.7653,
"step": 141
},
{
"epoch": 0.03450370550358401,
"grad_norm": 0.39308279752731323,
"learning_rate": 8e-05,
"loss": 1.6689,
"step": 142
},
{
"epoch": 0.034746689345158545,
"grad_norm": 0.42498987913131714,
"learning_rate": 8e-05,
"loss": 1.7597,
"step": 143
},
{
"epoch": 0.03498967318673308,
"grad_norm": 0.43941235542297363,
"learning_rate": 8e-05,
"loss": 1.7055,
"step": 144
},
{
"epoch": 0.03523265702830762,
"grad_norm": 0.4112749695777893,
"learning_rate": 8e-05,
"loss": 1.6473,
"step": 145
},
{
"epoch": 0.035475640869882155,
"grad_norm": 0.39101311564445496,
"learning_rate": 8e-05,
"loss": 1.5963,
"step": 146
},
{
"epoch": 0.03571862471145669,
"grad_norm": 0.429481565952301,
"learning_rate": 8e-05,
"loss": 1.6638,
"step": 147
},
{
"epoch": 0.035961608553031224,
"grad_norm": 0.40940847992897034,
"learning_rate": 8e-05,
"loss": 1.7589,
"step": 148
},
{
"epoch": 0.03620459239460576,
"grad_norm": 0.42588290572166443,
"learning_rate": 8e-05,
"loss": 1.8461,
"step": 149
},
{
"epoch": 0.03644757623618029,
"grad_norm": 0.4170544743537903,
"learning_rate": 8e-05,
"loss": 1.6811,
"step": 150
},
{
"epoch": 0.03669056007775483,
"grad_norm": 0.41155344247817993,
"learning_rate": 8e-05,
"loss": 1.7645,
"step": 151
},
{
"epoch": 0.03693354391932936,
"grad_norm": 0.4302254617214203,
"learning_rate": 8e-05,
"loss": 1.7034,
"step": 152
},
{
"epoch": 0.0371765277609039,
"grad_norm": 0.43057015538215637,
"learning_rate": 8e-05,
"loss": 1.7479,
"step": 153
},
{
"epoch": 0.03741951160247844,
"grad_norm": 0.41998055577278137,
"learning_rate": 8e-05,
"loss": 1.8225,
"step": 154
},
{
"epoch": 0.03766249544405297,
"grad_norm": 0.41916316747665405,
"learning_rate": 8e-05,
"loss": 1.6778,
"step": 155
},
{
"epoch": 0.03790547928562751,
"grad_norm": 0.42347627878189087,
"learning_rate": 8e-05,
"loss": 1.8304,
"step": 156
},
{
"epoch": 0.03814846312720204,
"grad_norm": 0.4198334515094757,
"learning_rate": 8e-05,
"loss": 1.7716,
"step": 157
},
{
"epoch": 0.038391446968776576,
"grad_norm": 0.4451964497566223,
"learning_rate": 8e-05,
"loss": 1.8368,
"step": 158
},
{
"epoch": 0.03863443081035111,
"grad_norm": 0.4325767755508423,
"learning_rate": 8e-05,
"loss": 1.7495,
"step": 159
},
{
"epoch": 0.038877414651925644,
"grad_norm": 0.43794333934783936,
"learning_rate": 8e-05,
"loss": 1.858,
"step": 160
},
{
"epoch": 0.03912039849350018,
"grad_norm": 0.4283236861228943,
"learning_rate": 8e-05,
"loss": 1.7569,
"step": 161
},
{
"epoch": 0.03936338233507472,
"grad_norm": 0.4178604185581207,
"learning_rate": 8e-05,
"loss": 1.6726,
"step": 162
},
{
"epoch": 0.039606366176649255,
"grad_norm": 0.43650540709495544,
"learning_rate": 8e-05,
"loss": 1.8255,
"step": 163
},
{
"epoch": 0.03984935001822379,
"grad_norm": 0.40848660469055176,
"learning_rate": 8e-05,
"loss": 1.6742,
"step": 164
},
{
"epoch": 0.040092333859798324,
"grad_norm": 0.4333600699901581,
"learning_rate": 8e-05,
"loss": 1.7795,
"step": 165
},
{
"epoch": 0.04033531770137286,
"grad_norm": 0.43268805742263794,
"learning_rate": 8e-05,
"loss": 1.7503,
"step": 166
},
{
"epoch": 0.04057830154294739,
"grad_norm": 0.39446377754211426,
"learning_rate": 8e-05,
"loss": 1.6284,
"step": 167
},
{
"epoch": 0.04082128538452193,
"grad_norm": 0.40882980823516846,
"learning_rate": 8e-05,
"loss": 1.6174,
"step": 168
},
{
"epoch": 0.04106426922609646,
"grad_norm": 0.43151581287384033,
"learning_rate": 8e-05,
"loss": 1.8183,
"step": 169
},
{
"epoch": 0.041307253067671,
"grad_norm": 0.43037307262420654,
"learning_rate": 8e-05,
"loss": 1.7799,
"step": 170
},
{
"epoch": 0.04155023690924554,
"grad_norm": 0.42919039726257324,
"learning_rate": 8e-05,
"loss": 1.7856,
"step": 171
},
{
"epoch": 0.04179322075082007,
"grad_norm": 0.41983699798583984,
"learning_rate": 8e-05,
"loss": 1.7359,
"step": 172
},
{
"epoch": 0.042036204592394606,
"grad_norm": 0.432204931974411,
"learning_rate": 8e-05,
"loss": 1.6861,
"step": 173
},
{
"epoch": 0.04227918843396914,
"grad_norm": 0.4395346939563751,
"learning_rate": 8e-05,
"loss": 1.7703,
"step": 174
},
{
"epoch": 0.042522172275543675,
"grad_norm": 0.4153292775154114,
"learning_rate": 8e-05,
"loss": 1.7165,
"step": 175
},
{
"epoch": 0.04276515611711821,
"grad_norm": 0.39776793122291565,
"learning_rate": 8e-05,
"loss": 1.6566,
"step": 176
},
{
"epoch": 0.043008139958692744,
"grad_norm": 0.4581263065338135,
"learning_rate": 8e-05,
"loss": 1.7415,
"step": 177
},
{
"epoch": 0.043251123800267285,
"grad_norm": 0.4424501657485962,
"learning_rate": 8e-05,
"loss": 1.8035,
"step": 178
},
{
"epoch": 0.04349410764184182,
"grad_norm": 0.480151891708374,
"learning_rate": 8e-05,
"loss": 1.7629,
"step": 179
},
{
"epoch": 0.043737091483416354,
"grad_norm": 0.4894304871559143,
"learning_rate": 8e-05,
"loss": 1.7972,
"step": 180
},
{
"epoch": 0.04398007532499089,
"grad_norm": 0.41660112142562866,
"learning_rate": 8e-05,
"loss": 1.7438,
"step": 181
},
{
"epoch": 0.04422305916656542,
"grad_norm": 0.41020017862319946,
"learning_rate": 8e-05,
"loss": 1.6237,
"step": 182
},
{
"epoch": 0.04446604300813996,
"grad_norm": 0.4876617193222046,
"learning_rate": 8e-05,
"loss": 1.9507,
"step": 183
},
{
"epoch": 0.04470902684971449,
"grad_norm": 0.4428721070289612,
"learning_rate": 8e-05,
"loss": 1.7743,
"step": 184
},
{
"epoch": 0.04495201069128903,
"grad_norm": 0.40488573908805847,
"learning_rate": 8e-05,
"loss": 1.6729,
"step": 185
},
{
"epoch": 0.04519499453286357,
"grad_norm": 0.4551525413990021,
"learning_rate": 8e-05,
"loss": 1.7132,
"step": 186
},
{
"epoch": 0.0454379783744381,
"grad_norm": 0.4685042202472687,
"learning_rate": 8e-05,
"loss": 1.8417,
"step": 187
},
{
"epoch": 0.04568096221601264,
"grad_norm": 0.43833059072494507,
"learning_rate": 8e-05,
"loss": 1.8575,
"step": 188
},
{
"epoch": 0.04592394605758717,
"grad_norm": 0.44860225915908813,
"learning_rate": 8e-05,
"loss": 1.8212,
"step": 189
},
{
"epoch": 0.046166929899161706,
"grad_norm": 0.4465605914592743,
"learning_rate": 8e-05,
"loss": 1.7541,
"step": 190
},
{
"epoch": 0.04640991374073624,
"grad_norm": 0.4442957639694214,
"learning_rate": 8e-05,
"loss": 1.9162,
"step": 191
},
{
"epoch": 0.046652897582310775,
"grad_norm": 0.4390282928943634,
"learning_rate": 8e-05,
"loss": 1.7281,
"step": 192
},
{
"epoch": 0.04689588142388531,
"grad_norm": 0.454426646232605,
"learning_rate": 8e-05,
"loss": 1.8302,
"step": 193
},
{
"epoch": 0.047138865265459844,
"grad_norm": 0.43619468808174133,
"learning_rate": 8e-05,
"loss": 1.6894,
"step": 194
},
{
"epoch": 0.047381849107034385,
"grad_norm": 0.40817612409591675,
"learning_rate": 8e-05,
"loss": 1.7071,
"step": 195
},
{
"epoch": 0.04762483294860892,
"grad_norm": 0.42603886127471924,
"learning_rate": 8e-05,
"loss": 1.8151,
"step": 196
},
{
"epoch": 0.047867816790183454,
"grad_norm": 0.4507157504558563,
"learning_rate": 8e-05,
"loss": 1.7738,
"step": 197
},
{
"epoch": 0.04811080063175799,
"grad_norm": 0.4105568826198578,
"learning_rate": 8e-05,
"loss": 1.7082,
"step": 198
},
{
"epoch": 0.04835378447333252,
"grad_norm": 0.40932849049568176,
"learning_rate": 8e-05,
"loss": 1.6927,
"step": 199
},
{
"epoch": 0.04859676831490706,
"grad_norm": 0.4452005624771118,
"learning_rate": 8e-05,
"loss": 1.676,
"step": 200
},
{
"epoch": 0.04883975215648159,
"grad_norm": 0.44663047790527344,
"learning_rate": 8e-05,
"loss": 1.7947,
"step": 201
},
{
"epoch": 0.049082735998056126,
"grad_norm": 0.4227569103240967,
"learning_rate": 8e-05,
"loss": 1.8643,
"step": 202
},
{
"epoch": 0.04932571983963067,
"grad_norm": 0.42726418375968933,
"learning_rate": 8e-05,
"loss": 1.7908,
"step": 203
},
{
"epoch": 0.0495687036812052,
"grad_norm": 0.44869542121887207,
"learning_rate": 8e-05,
"loss": 1.8512,
"step": 204
},
{
"epoch": 0.049811687522779736,
"grad_norm": 0.43665412068367004,
"learning_rate": 8e-05,
"loss": 1.7925,
"step": 205
},
{
"epoch": 0.05005467136435427,
"grad_norm": 0.4294425845146179,
"learning_rate": 8e-05,
"loss": 1.7406,
"step": 206
},
{
"epoch": 0.050297655205928805,
"grad_norm": 0.4532032310962677,
"learning_rate": 8e-05,
"loss": 1.8743,
"step": 207
},
{
"epoch": 0.05054063904750334,
"grad_norm": 0.4643804132938385,
"learning_rate": 8e-05,
"loss": 1.7961,
"step": 208
},
{
"epoch": 0.050783622889077874,
"grad_norm": 0.412218302488327,
"learning_rate": 8e-05,
"loss": 1.6942,
"step": 209
},
{
"epoch": 0.05102660673065241,
"grad_norm": 0.45216429233551025,
"learning_rate": 8e-05,
"loss": 1.8609,
"step": 210
},
{
"epoch": 0.05126959057222695,
"grad_norm": 0.42403680086135864,
"learning_rate": 8e-05,
"loss": 1.7602,
"step": 211
},
{
"epoch": 0.051512574413801485,
"grad_norm": 0.4293997883796692,
"learning_rate": 8e-05,
"loss": 1.6597,
"step": 212
},
{
"epoch": 0.05175555825537602,
"grad_norm": 0.4341531991958618,
"learning_rate": 8e-05,
"loss": 1.8109,
"step": 213
},
{
"epoch": 0.051998542096950554,
"grad_norm": 0.40185922384262085,
"learning_rate": 8e-05,
"loss": 1.6082,
"step": 214
},
{
"epoch": 0.05224152593852509,
"grad_norm": 0.4393584728240967,
"learning_rate": 8e-05,
"loss": 1.8533,
"step": 215
},
{
"epoch": 0.05248450978009962,
"grad_norm": 0.42899441719055176,
"learning_rate": 8e-05,
"loss": 1.7643,
"step": 216
},
{
"epoch": 0.05272749362167416,
"grad_norm": 0.4237852692604065,
"learning_rate": 8e-05,
"loss": 1.7457,
"step": 217
},
{
"epoch": 0.05297047746324869,
"grad_norm": 0.42249593138694763,
"learning_rate": 8e-05,
"loss": 1.7127,
"step": 218
},
{
"epoch": 0.05321346130482323,
"grad_norm": 0.421320378780365,
"learning_rate": 8e-05,
"loss": 1.7894,
"step": 219
},
{
"epoch": 0.05345644514639777,
"grad_norm": 0.4289996325969696,
"learning_rate": 8e-05,
"loss": 1.6778,
"step": 220
},
{
"epoch": 0.0536994289879723,
"grad_norm": 0.4348432421684265,
"learning_rate": 8e-05,
"loss": 1.8268,
"step": 221
},
{
"epoch": 0.053942412829546836,
"grad_norm": 0.41037532687187195,
"learning_rate": 8e-05,
"loss": 1.8001,
"step": 222
},
{
"epoch": 0.05418539667112137,
"grad_norm": 0.4165085256099701,
"learning_rate": 8e-05,
"loss": 1.7297,
"step": 223
},
{
"epoch": 0.054428380512695905,
"grad_norm": 0.44498410820961,
"learning_rate": 8e-05,
"loss": 1.8597,
"step": 224
},
{
"epoch": 0.05467136435427044,
"grad_norm": 0.40958845615386963,
"learning_rate": 8e-05,
"loss": 1.6655,
"step": 225
},
{
"epoch": 0.054914348195844974,
"grad_norm": 0.4312472343444824,
"learning_rate": 8e-05,
"loss": 1.7577,
"step": 226
},
{
"epoch": 0.05515733203741951,
"grad_norm": 0.4492066204547882,
"learning_rate": 8e-05,
"loss": 1.84,
"step": 227
},
{
"epoch": 0.05540031587899405,
"grad_norm": 0.40647533535957336,
"learning_rate": 8e-05,
"loss": 1.6446,
"step": 228
},
{
"epoch": 0.055643299720568584,
"grad_norm": 0.429390549659729,
"learning_rate": 8e-05,
"loss": 1.7794,
"step": 229
},
{
"epoch": 0.05588628356214312,
"grad_norm": 0.41633883118629456,
"learning_rate": 8e-05,
"loss": 1.8394,
"step": 230
},
{
"epoch": 0.05612926740371765,
"grad_norm": 0.40792158246040344,
"learning_rate": 8e-05,
"loss": 1.6387,
"step": 231
},
{
"epoch": 0.05637225124529219,
"grad_norm": 0.41727787256240845,
"learning_rate": 8e-05,
"loss": 1.7214,
"step": 232
},
{
"epoch": 0.05661523508686672,
"grad_norm": 0.4122094213962555,
"learning_rate": 8e-05,
"loss": 1.7498,
"step": 233
},
{
"epoch": 0.056858218928441256,
"grad_norm": 0.43074655532836914,
"learning_rate": 8e-05,
"loss": 1.8505,
"step": 234
},
{
"epoch": 0.05710120277001579,
"grad_norm": 0.4109654426574707,
"learning_rate": 8e-05,
"loss": 1.6622,
"step": 235
},
{
"epoch": 0.05734418661159033,
"grad_norm": 0.40236973762512207,
"learning_rate": 8e-05,
"loss": 1.6639,
"step": 236
},
{
"epoch": 0.05758717045316487,
"grad_norm": 0.43539148569107056,
"learning_rate": 8e-05,
"loss": 1.8052,
"step": 237
},
{
"epoch": 0.0578301542947394,
"grad_norm": 0.41733142733573914,
"learning_rate": 8e-05,
"loss": 1.7533,
"step": 238
},
{
"epoch": 0.058073138136313936,
"grad_norm": 0.4331442415714264,
"learning_rate": 8e-05,
"loss": 1.7759,
"step": 239
},
{
"epoch": 0.05831612197788847,
"grad_norm": 0.4151133894920349,
"learning_rate": 8e-05,
"loss": 1.8155,
"step": 240
},
{
"epoch": 0.058559105819463005,
"grad_norm": 0.42025041580200195,
"learning_rate": 8e-05,
"loss": 1.7744,
"step": 241
},
{
"epoch": 0.05880208966103754,
"grad_norm": 0.4147380590438843,
"learning_rate": 8e-05,
"loss": 1.7317,
"step": 242
},
{
"epoch": 0.059045073502612073,
"grad_norm": 0.3916904330253601,
"learning_rate": 8e-05,
"loss": 1.7501,
"step": 243
},
{
"epoch": 0.059288057344186615,
"grad_norm": 0.4432324469089508,
"learning_rate": 8e-05,
"loss": 1.8159,
"step": 244
},
{
"epoch": 0.05953104118576115,
"grad_norm": 0.4161258935928345,
"learning_rate": 8e-05,
"loss": 1.7629,
"step": 245
},
{
"epoch": 0.059774025027335684,
"grad_norm": 0.4148889482021332,
"learning_rate": 8e-05,
"loss": 1.7103,
"step": 246
},
{
"epoch": 0.06001700886891022,
"grad_norm": 0.4157002866268158,
"learning_rate": 8e-05,
"loss": 1.7166,
"step": 247
},
{
"epoch": 0.06025999271048475,
"grad_norm": 0.39465364813804626,
"learning_rate": 8e-05,
"loss": 1.6737,
"step": 248
},
{
"epoch": 0.06050297655205929,
"grad_norm": 0.41584497690200806,
"learning_rate": 8e-05,
"loss": 1.744,
"step": 249
},
{
"epoch": 0.06074596039363382,
"grad_norm": 0.4148780107498169,
"learning_rate": 8e-05,
"loss": 1.7409,
"step": 250
},
{
"epoch": 0.060988944235208356,
"grad_norm": 0.43678727746009827,
"learning_rate": 8e-05,
"loss": 1.8074,
"step": 251
},
{
"epoch": 0.06123192807678289,
"grad_norm": 0.4008216857910156,
"learning_rate": 8e-05,
"loss": 1.6168,
"step": 252
},
{
"epoch": 0.06147491191835743,
"grad_norm": 0.4190528094768524,
"learning_rate": 8e-05,
"loss": 1.6,
"step": 253
},
{
"epoch": 0.061717895759931966,
"grad_norm": 0.4158279001712799,
"learning_rate": 8e-05,
"loss": 1.7602,
"step": 254
},
{
"epoch": 0.0619608796015065,
"grad_norm": 0.4088147282600403,
"learning_rate": 8e-05,
"loss": 1.7789,
"step": 255
},
{
"epoch": 0.062203863443081035,
"grad_norm": 0.41537022590637207,
"learning_rate": 8e-05,
"loss": 1.669,
"step": 256
},
{
"epoch": 0.06244684728465557,
"grad_norm": 0.3931873142719269,
"learning_rate": 8e-05,
"loss": 1.5828,
"step": 257
},
{
"epoch": 0.06268983112623011,
"grad_norm": 0.4231928884983063,
"learning_rate": 8e-05,
"loss": 1.7634,
"step": 258
},
{
"epoch": 0.06293281496780465,
"grad_norm": 0.396416574716568,
"learning_rate": 8e-05,
"loss": 1.6611,
"step": 259
},
{
"epoch": 0.06317579880937918,
"grad_norm": 0.39217904210090637,
"learning_rate": 8e-05,
"loss": 1.6942,
"step": 260
},
{
"epoch": 0.06341878265095371,
"grad_norm": 0.4122065305709839,
"learning_rate": 8e-05,
"loss": 1.874,
"step": 261
},
{
"epoch": 0.06366176649252825,
"grad_norm": 0.4038335680961609,
"learning_rate": 8e-05,
"loss": 1.7264,
"step": 262
},
{
"epoch": 0.06390475033410278,
"grad_norm": 0.4242023229598999,
"learning_rate": 8e-05,
"loss": 1.8315,
"step": 263
},
{
"epoch": 0.06414773417567732,
"grad_norm": 0.41710829734802246,
"learning_rate": 8e-05,
"loss": 1.6869,
"step": 264
},
{
"epoch": 0.06439071801725185,
"grad_norm": 0.4024234116077423,
"learning_rate": 8e-05,
"loss": 1.6267,
"step": 265
},
{
"epoch": 0.06463370185882639,
"grad_norm": 0.43158861994743347,
"learning_rate": 8e-05,
"loss": 1.8,
"step": 266
},
{
"epoch": 0.06487668570040092,
"grad_norm": 0.4473307132720947,
"learning_rate": 8e-05,
"loss": 1.8952,
"step": 267
},
{
"epoch": 0.06511966954197546,
"grad_norm": 0.4519525170326233,
"learning_rate": 8e-05,
"loss": 1.9715,
"step": 268
},
{
"epoch": 0.06536265338354999,
"grad_norm": 0.42093324661254883,
"learning_rate": 8e-05,
"loss": 1.848,
"step": 269
},
{
"epoch": 0.06560563722512452,
"grad_norm": 0.44148561358451843,
"learning_rate": 8e-05,
"loss": 1.7559,
"step": 270
},
{
"epoch": 0.06584862106669906,
"grad_norm": 0.4208815097808838,
"learning_rate": 8e-05,
"loss": 1.8728,
"step": 271
},
{
"epoch": 0.0660916049082736,
"grad_norm": 0.4542776942253113,
"learning_rate": 8e-05,
"loss": 1.7405,
"step": 272
},
{
"epoch": 0.06633458874984814,
"grad_norm": 0.4561680257320404,
"learning_rate": 8e-05,
"loss": 1.8184,
"step": 273
},
{
"epoch": 0.06657757259142268,
"grad_norm": 0.4309070408344269,
"learning_rate": 8e-05,
"loss": 1.7915,
"step": 274
},
{
"epoch": 0.06682055643299721,
"grad_norm": 0.4592060446739197,
"learning_rate": 8e-05,
"loss": 1.8771,
"step": 275
},
{
"epoch": 0.06706354027457175,
"grad_norm": 0.4334331452846527,
"learning_rate": 8e-05,
"loss": 1.7187,
"step": 276
},
{
"epoch": 0.06730652411614628,
"grad_norm": 0.41513165831565857,
"learning_rate": 8e-05,
"loss": 1.7187,
"step": 277
},
{
"epoch": 0.06754950795772081,
"grad_norm": 0.42725256085395813,
"learning_rate": 8e-05,
"loss": 1.6586,
"step": 278
},
{
"epoch": 0.06779249179929535,
"grad_norm": 0.42010682821273804,
"learning_rate": 8e-05,
"loss": 1.7787,
"step": 279
},
{
"epoch": 0.06803547564086988,
"grad_norm": 0.41037890315055847,
"learning_rate": 8e-05,
"loss": 1.7413,
"step": 280
},
{
"epoch": 0.06827845948244442,
"grad_norm": 0.4202909469604492,
"learning_rate": 8e-05,
"loss": 1.8023,
"step": 281
},
{
"epoch": 0.06852144332401895,
"grad_norm": 0.38481223583221436,
"learning_rate": 8e-05,
"loss": 1.6706,
"step": 282
},
{
"epoch": 0.06876442716559349,
"grad_norm": 0.4297863245010376,
"learning_rate": 8e-05,
"loss": 1.743,
"step": 283
},
{
"epoch": 0.06900741100716802,
"grad_norm": 0.448222279548645,
"learning_rate": 8e-05,
"loss": 1.9037,
"step": 284
},
{
"epoch": 0.06925039484874256,
"grad_norm": 0.42773011326789856,
"learning_rate": 8e-05,
"loss": 1.6506,
"step": 285
},
{
"epoch": 0.06949337869031709,
"grad_norm": 0.425235778093338,
"learning_rate": 8e-05,
"loss": 1.8267,
"step": 286
},
{
"epoch": 0.06973636253189162,
"grad_norm": 0.42985713481903076,
"learning_rate": 8e-05,
"loss": 1.8545,
"step": 287
},
{
"epoch": 0.06997934637346616,
"grad_norm": 0.4162665009498596,
"learning_rate": 8e-05,
"loss": 1.8069,
"step": 288
},
{
"epoch": 0.07022233021504069,
"grad_norm": 0.4128991961479187,
"learning_rate": 8e-05,
"loss": 1.6796,
"step": 289
},
{
"epoch": 0.07046531405661524,
"grad_norm": 0.41411450505256653,
"learning_rate": 8e-05,
"loss": 1.8225,
"step": 290
},
{
"epoch": 0.07070829789818978,
"grad_norm": 0.4319620132446289,
"learning_rate": 8e-05,
"loss": 1.7133,
"step": 291
},
{
"epoch": 0.07095128173976431,
"grad_norm": 0.42714792490005493,
"learning_rate": 8e-05,
"loss": 1.7042,
"step": 292
},
{
"epoch": 0.07119426558133884,
"grad_norm": 0.41195017099380493,
"learning_rate": 8e-05,
"loss": 1.747,
"step": 293
},
{
"epoch": 0.07143724942291338,
"grad_norm": 0.411868155002594,
"learning_rate": 8e-05,
"loss": 1.7709,
"step": 294
},
{
"epoch": 0.07168023326448791,
"grad_norm": 2.195003032684326,
"learning_rate": 8e-05,
"loss": 1.7605,
"step": 295
},
{
"epoch": 0.07192321710606245,
"grad_norm": 0.4210272431373596,
"learning_rate": 8e-05,
"loss": 1.7884,
"step": 296
},
{
"epoch": 0.07216620094763698,
"grad_norm": 0.41573718190193176,
"learning_rate": 8e-05,
"loss": 1.7787,
"step": 297
},
{
"epoch": 0.07240918478921152,
"grad_norm": 0.42812490463256836,
"learning_rate": 8e-05,
"loss": 1.7738,
"step": 298
},
{
"epoch": 0.07265216863078605,
"grad_norm": 0.4160097539424896,
"learning_rate": 8e-05,
"loss": 1.6746,
"step": 299
},
{
"epoch": 0.07289515247236059,
"grad_norm": 0.4339464008808136,
"learning_rate": 8e-05,
"loss": 1.7024,
"step": 300
},
{
"epoch": 0.07313813631393512,
"grad_norm": 0.40688636898994446,
"learning_rate": 8e-05,
"loss": 1.7263,
"step": 301
},
{
"epoch": 0.07338112015550965,
"grad_norm": 0.4286927282810211,
"learning_rate": 8e-05,
"loss": 1.8026,
"step": 302
},
{
"epoch": 0.07362410399708419,
"grad_norm": 0.4497995972633362,
"learning_rate": 8e-05,
"loss": 1.8929,
"step": 303
},
{
"epoch": 0.07386708783865872,
"grad_norm": 0.39630329608917236,
"learning_rate": 8e-05,
"loss": 1.7391,
"step": 304
},
{
"epoch": 0.07411007168023326,
"grad_norm": 0.43036988377571106,
"learning_rate": 8e-05,
"loss": 1.7689,
"step": 305
},
{
"epoch": 0.0743530555218078,
"grad_norm": 0.41594815254211426,
"learning_rate": 8e-05,
"loss": 1.7756,
"step": 306
},
{
"epoch": 0.07459603936338234,
"grad_norm": 0.42958587408065796,
"learning_rate": 8e-05,
"loss": 1.8022,
"step": 307
},
{
"epoch": 0.07483902320495688,
"grad_norm": 0.41847291588783264,
"learning_rate": 8e-05,
"loss": 1.6651,
"step": 308
},
{
"epoch": 0.07508200704653141,
"grad_norm": 0.4142318665981293,
"learning_rate": 8e-05,
"loss": 1.8017,
"step": 309
},
{
"epoch": 0.07532499088810594,
"grad_norm": 0.41507381200790405,
"learning_rate": 8e-05,
"loss": 1.6409,
"step": 310
},
{
"epoch": 0.07556797472968048,
"grad_norm": 0.4372384548187256,
"learning_rate": 8e-05,
"loss": 1.9386,
"step": 311
},
{
"epoch": 0.07581095857125501,
"grad_norm": 0.4396492838859558,
"learning_rate": 8e-05,
"loss": 1.8556,
"step": 312
},
{
"epoch": 0.07605394241282955,
"grad_norm": 0.40758559107780457,
"learning_rate": 8e-05,
"loss": 1.809,
"step": 313
},
{
"epoch": 0.07629692625440408,
"grad_norm": 0.4153490662574768,
"learning_rate": 8e-05,
"loss": 1.6373,
"step": 314
},
{
"epoch": 0.07653991009597862,
"grad_norm": 0.43232935667037964,
"learning_rate": 8e-05,
"loss": 1.6996,
"step": 315
},
{
"epoch": 0.07678289393755315,
"grad_norm": 0.4399256706237793,
"learning_rate": 8e-05,
"loss": 1.7999,
"step": 316
},
{
"epoch": 0.07702587777912769,
"grad_norm": 0.4185955822467804,
"learning_rate": 8e-05,
"loss": 1.8538,
"step": 317
},
{
"epoch": 0.07726886162070222,
"grad_norm": 0.42307454347610474,
"learning_rate": 8e-05,
"loss": 1.7355,
"step": 318
},
{
"epoch": 0.07751184546227675,
"grad_norm": 0.42181700468063354,
"learning_rate": 8e-05,
"loss": 1.5743,
"step": 319
},
{
"epoch": 0.07775482930385129,
"grad_norm": 0.42582136392593384,
"learning_rate": 8e-05,
"loss": 1.642,
"step": 320
},
{
"epoch": 0.07799781314542582,
"grad_norm": 0.40768539905548096,
"learning_rate": 8e-05,
"loss": 1.779,
"step": 321
},
{
"epoch": 0.07824079698700036,
"grad_norm": 0.42627450823783875,
"learning_rate": 8e-05,
"loss": 1.6912,
"step": 322
},
{
"epoch": 0.0784837808285749,
"grad_norm": 0.4287555515766144,
"learning_rate": 8e-05,
"loss": 1.6869,
"step": 323
},
{
"epoch": 0.07872676467014944,
"grad_norm": 0.4250035285949707,
"learning_rate": 8e-05,
"loss": 1.7786,
"step": 324
},
{
"epoch": 0.07896974851172398,
"grad_norm": 0.42147719860076904,
"learning_rate": 8e-05,
"loss": 1.7021,
"step": 325
},
{
"epoch": 0.07921273235329851,
"grad_norm": 0.4258410334587097,
"learning_rate": 8e-05,
"loss": 1.7664,
"step": 326
},
{
"epoch": 0.07945571619487304,
"grad_norm": 0.42165327072143555,
"learning_rate": 8e-05,
"loss": 1.857,
"step": 327
},
{
"epoch": 0.07969870003644758,
"grad_norm": 0.3965833783149719,
"learning_rate": 8e-05,
"loss": 1.712,
"step": 328
},
{
"epoch": 0.07994168387802211,
"grad_norm": 0.4351164698600769,
"learning_rate": 8e-05,
"loss": 1.8151,
"step": 329
},
{
"epoch": 0.08018466771959665,
"grad_norm": 0.41587620973587036,
"learning_rate": 8e-05,
"loss": 1.7757,
"step": 330
},
{
"epoch": 0.08042765156117118,
"grad_norm": 0.422070175409317,
"learning_rate": 8e-05,
"loss": 1.7251,
"step": 331
},
{
"epoch": 0.08067063540274572,
"grad_norm": 0.4227633476257324,
"learning_rate": 8e-05,
"loss": 1.7727,
"step": 332
},
{
"epoch": 0.08091361924432025,
"grad_norm": 0.42356976866722107,
"learning_rate": 8e-05,
"loss": 1.7924,
"step": 333
},
{
"epoch": 0.08115660308589479,
"grad_norm": 0.4465808868408203,
"learning_rate": 8e-05,
"loss": 1.8332,
"step": 334
},
{
"epoch": 0.08139958692746932,
"grad_norm": 0.4090607762336731,
"learning_rate": 8e-05,
"loss": 1.7735,
"step": 335
},
{
"epoch": 0.08164257076904385,
"grad_norm": 0.4155711233615875,
"learning_rate": 8e-05,
"loss": 1.8427,
"step": 336
},
{
"epoch": 0.08188555461061839,
"grad_norm": 0.42290425300598145,
"learning_rate": 8e-05,
"loss": 1.7565,
"step": 337
},
{
"epoch": 0.08212853845219292,
"grad_norm": 0.427473783493042,
"learning_rate": 8e-05,
"loss": 1.8271,
"step": 338
},
{
"epoch": 0.08237152229376747,
"grad_norm": 0.41899219155311584,
"learning_rate": 8e-05,
"loss": 1.866,
"step": 339
},
{
"epoch": 0.082614506135342,
"grad_norm": 0.4216976463794708,
"learning_rate": 8e-05,
"loss": 1.8554,
"step": 340
},
{
"epoch": 0.08285748997691654,
"grad_norm": 0.40997281670570374,
"learning_rate": 8e-05,
"loss": 1.6829,
"step": 341
},
{
"epoch": 0.08310047381849107,
"grad_norm": 0.4109158217906952,
"learning_rate": 8e-05,
"loss": 1.7338,
"step": 342
},
{
"epoch": 0.08334345766006561,
"grad_norm": 0.4242565333843231,
"learning_rate": 8e-05,
"loss": 1.7397,
"step": 343
},
{
"epoch": 0.08358644150164014,
"grad_norm": 0.4627560079097748,
"learning_rate": 8e-05,
"loss": 1.8587,
"step": 344
},
{
"epoch": 0.08382942534321468,
"grad_norm": 0.39504504203796387,
"learning_rate": 8e-05,
"loss": 1.5618,
"step": 345
},
{
"epoch": 0.08407240918478921,
"grad_norm": 0.5971431136131287,
"learning_rate": 8e-05,
"loss": 1.984,
"step": 346
},
{
"epoch": 0.08431539302636375,
"grad_norm": 0.43111053109169006,
"learning_rate": 8e-05,
"loss": 1.6815,
"step": 347
},
{
"epoch": 0.08455837686793828,
"grad_norm": 0.44461020827293396,
"learning_rate": 8e-05,
"loss": 1.9002,
"step": 348
},
{
"epoch": 0.08480136070951282,
"grad_norm": 0.40719398856163025,
"learning_rate": 8e-05,
"loss": 1.6941,
"step": 349
},
{
"epoch": 0.08504434455108735,
"grad_norm": 0.41791296005249023,
"learning_rate": 8e-05,
"loss": 1.7223,
"step": 350
},
{
"epoch": 0.08528732839266188,
"grad_norm": 0.43790993094444275,
"learning_rate": 8e-05,
"loss": 1.7557,
"step": 351
},
{
"epoch": 0.08553031223423642,
"grad_norm": 0.3951578438282013,
"learning_rate": 8e-05,
"loss": 1.6533,
"step": 352
},
{
"epoch": 0.08577329607581095,
"grad_norm": 0.4710818827152252,
"learning_rate": 8e-05,
"loss": 1.8857,
"step": 353
},
{
"epoch": 0.08601627991738549,
"grad_norm": 0.4196699857711792,
"learning_rate": 8e-05,
"loss": 1.7204,
"step": 354
},
{
"epoch": 0.08625926375896002,
"grad_norm": 0.4194652736186981,
"learning_rate": 8e-05,
"loss": 1.6968,
"step": 355
},
{
"epoch": 0.08650224760053457,
"grad_norm": 0.438880980014801,
"learning_rate": 8e-05,
"loss": 1.6351,
"step": 356
},
{
"epoch": 0.0867452314421091,
"grad_norm": 0.41452550888061523,
"learning_rate": 8e-05,
"loss": 1.6648,
"step": 357
},
{
"epoch": 0.08698821528368364,
"grad_norm": 0.4164050221443176,
"learning_rate": 8e-05,
"loss": 1.7089,
"step": 358
},
{
"epoch": 0.08723119912525817,
"grad_norm": 0.4484345018863678,
"learning_rate": 8e-05,
"loss": 1.8237,
"step": 359
},
{
"epoch": 0.08747418296683271,
"grad_norm": 0.43570855259895325,
"learning_rate": 8e-05,
"loss": 1.632,
"step": 360
},
{
"epoch": 0.08771716680840724,
"grad_norm": 0.4059920907020569,
"learning_rate": 8e-05,
"loss": 1.7481,
"step": 361
},
{
"epoch": 0.08796015064998178,
"grad_norm": 0.4080146253108978,
"learning_rate": 8e-05,
"loss": 1.641,
"step": 362
},
{
"epoch": 0.08820313449155631,
"grad_norm": 0.39081236720085144,
"learning_rate": 8e-05,
"loss": 1.6033,
"step": 363
},
{
"epoch": 0.08844611833313085,
"grad_norm": 0.4051390290260315,
"learning_rate": 8e-05,
"loss": 1.7319,
"step": 364
},
{
"epoch": 0.08868910217470538,
"grad_norm": 0.4073815643787384,
"learning_rate": 8e-05,
"loss": 1.8466,
"step": 365
},
{
"epoch": 0.08893208601627992,
"grad_norm": 0.4258662760257721,
"learning_rate": 8e-05,
"loss": 1.9736,
"step": 366
},
{
"epoch": 0.08917506985785445,
"grad_norm": 0.4329228401184082,
"learning_rate": 8e-05,
"loss": 1.7617,
"step": 367
},
{
"epoch": 0.08941805369942898,
"grad_norm": 0.4149377644062042,
"learning_rate": 8e-05,
"loss": 1.7838,
"step": 368
},
{
"epoch": 0.08966103754100352,
"grad_norm": 0.4258325397968292,
"learning_rate": 8e-05,
"loss": 1.7024,
"step": 369
},
{
"epoch": 0.08990402138257805,
"grad_norm": 0.4353642761707306,
"learning_rate": 8e-05,
"loss": 1.6774,
"step": 370
},
{
"epoch": 0.09014700522415259,
"grad_norm": 0.4435424506664276,
"learning_rate": 8e-05,
"loss": 1.8068,
"step": 371
},
{
"epoch": 0.09038998906572714,
"grad_norm": 0.43207329511642456,
"learning_rate": 8e-05,
"loss": 1.8337,
"step": 372
},
{
"epoch": 0.09063297290730167,
"grad_norm": 0.4225546717643738,
"learning_rate": 8e-05,
"loss": 1.8111,
"step": 373
},
{
"epoch": 0.0908759567488762,
"grad_norm": 0.41650712490081787,
"learning_rate": 8e-05,
"loss": 1.6998,
"step": 374
},
{
"epoch": 0.09111894059045074,
"grad_norm": 0.44362857937812805,
"learning_rate": 8e-05,
"loss": 1.8781,
"step": 375
},
{
"epoch": 0.09136192443202527,
"grad_norm": 0.4244385063648224,
"learning_rate": 8e-05,
"loss": 1.7318,
"step": 376
},
{
"epoch": 0.09160490827359981,
"grad_norm": 0.4230808615684509,
"learning_rate": 8e-05,
"loss": 1.868,
"step": 377
},
{
"epoch": 0.09184789211517434,
"grad_norm": 0.4087420403957367,
"learning_rate": 8e-05,
"loss": 1.5235,
"step": 378
},
{
"epoch": 0.09209087595674888,
"grad_norm": 0.4241844117641449,
"learning_rate": 8e-05,
"loss": 1.6388,
"step": 379
},
{
"epoch": 0.09233385979832341,
"grad_norm": 0.4437425136566162,
"learning_rate": 8e-05,
"loss": 1.8141,
"step": 380
},
{
"epoch": 0.09257684363989795,
"grad_norm": 0.43334922194480896,
"learning_rate": 8e-05,
"loss": 1.7747,
"step": 381
},
{
"epoch": 0.09281982748147248,
"grad_norm": 0.4382667541503906,
"learning_rate": 8e-05,
"loss": 1.7946,
"step": 382
},
{
"epoch": 0.09306281132304701,
"grad_norm": 0.4285142719745636,
"learning_rate": 8e-05,
"loss": 1.7562,
"step": 383
},
{
"epoch": 0.09330579516462155,
"grad_norm": 0.4092921018600464,
"learning_rate": 8e-05,
"loss": 1.5453,
"step": 384
},
{
"epoch": 0.09354877900619608,
"grad_norm": 0.4280427396297455,
"learning_rate": 8e-05,
"loss": 1.8365,
"step": 385
},
{
"epoch": 0.09379176284777062,
"grad_norm": 0.41424787044525146,
"learning_rate": 8e-05,
"loss": 1.6779,
"step": 386
},
{
"epoch": 0.09403474668934515,
"grad_norm": 0.4082753658294678,
"learning_rate": 8e-05,
"loss": 1.6822,
"step": 387
},
{
"epoch": 0.09427773053091969,
"grad_norm": 0.4185827374458313,
"learning_rate": 8e-05,
"loss": 1.7599,
"step": 388
},
{
"epoch": 0.09452071437249424,
"grad_norm": 0.4068378508090973,
"learning_rate": 8e-05,
"loss": 1.6452,
"step": 389
},
{
"epoch": 0.09476369821406877,
"grad_norm": 0.4201984405517578,
"learning_rate": 8e-05,
"loss": 1.7303,
"step": 390
},
{
"epoch": 0.0950066820556433,
"grad_norm": 0.4467129707336426,
"learning_rate": 8e-05,
"loss": 1.9124,
"step": 391
},
{
"epoch": 0.09524966589721784,
"grad_norm": 0.4684138596057892,
"learning_rate": 8e-05,
"loss": 1.8845,
"step": 392
},
{
"epoch": 0.09549264973879237,
"grad_norm": 0.391348659992218,
"learning_rate": 8e-05,
"loss": 1.6064,
"step": 393
},
{
"epoch": 0.09573563358036691,
"grad_norm": 0.43737274408340454,
"learning_rate": 8e-05,
"loss": 1.7695,
"step": 394
},
{
"epoch": 0.09597861742194144,
"grad_norm": 0.4272935688495636,
"learning_rate": 8e-05,
"loss": 1.6379,
"step": 395
},
{
"epoch": 0.09622160126351598,
"grad_norm": 0.41889452934265137,
"learning_rate": 8e-05,
"loss": 1.674,
"step": 396
},
{
"epoch": 0.09646458510509051,
"grad_norm": 0.4118969440460205,
"learning_rate": 8e-05,
"loss": 1.6895,
"step": 397
},
{
"epoch": 0.09670756894666505,
"grad_norm": 0.440357506275177,
"learning_rate": 8e-05,
"loss": 1.7683,
"step": 398
},
{
"epoch": 0.09695055278823958,
"grad_norm": 0.40678104758262634,
"learning_rate": 8e-05,
"loss": 1.6479,
"step": 399
},
{
"epoch": 0.09719353662981411,
"grad_norm": 0.42315101623535156,
"learning_rate": 8e-05,
"loss": 1.7731,
"step": 400
},
{
"epoch": 0.09743652047138865,
"grad_norm": 0.44656410813331604,
"learning_rate": 8e-05,
"loss": 1.7927,
"step": 401
},
{
"epoch": 0.09767950431296318,
"grad_norm": 0.4181327819824219,
"learning_rate": 8e-05,
"loss": 1.6638,
"step": 402
},
{
"epoch": 0.09792248815453772,
"grad_norm": 0.4143099784851074,
"learning_rate": 8e-05,
"loss": 1.7421,
"step": 403
},
{
"epoch": 0.09816547199611225,
"grad_norm": 0.399997353553772,
"learning_rate": 8e-05,
"loss": 1.732,
"step": 404
},
{
"epoch": 0.0984084558376868,
"grad_norm": 0.41090577840805054,
"learning_rate": 8e-05,
"loss": 1.8132,
"step": 405
},
{
"epoch": 0.09865143967926134,
"grad_norm": 0.4159490764141083,
"learning_rate": 8e-05,
"loss": 1.6796,
"step": 406
},
{
"epoch": 0.09889442352083587,
"grad_norm": 0.4340929687023163,
"learning_rate": 8e-05,
"loss": 1.7595,
"step": 407
},
{
"epoch": 0.0991374073624104,
"grad_norm": 0.41250815987586975,
"learning_rate": 8e-05,
"loss": 1.7695,
"step": 408
},
{
"epoch": 0.09938039120398494,
"grad_norm": 0.3895319402217865,
"learning_rate": 8e-05,
"loss": 1.6261,
"step": 409
},
{
"epoch": 0.09962337504555947,
"grad_norm": 0.42065054178237915,
"learning_rate": 8e-05,
"loss": 1.7685,
"step": 410
},
{
"epoch": 0.09986635888713401,
"grad_norm": 0.4263005554676056,
"learning_rate": 8e-05,
"loss": 1.8352,
"step": 411
},
{
"epoch": 0.10010934272870854,
"grad_norm": 0.4143153131008148,
"learning_rate": 8e-05,
"loss": 1.6148,
"step": 412
},
{
"epoch": 0.10035232657028308,
"grad_norm": 0.4837665259838104,
"learning_rate": 8e-05,
"loss": 1.5294,
"step": 413
},
{
"epoch": 0.10059531041185761,
"grad_norm": 0.4138377606868744,
"learning_rate": 8e-05,
"loss": 1.6781,
"step": 414
},
{
"epoch": 0.10083829425343215,
"grad_norm": 0.5110868215560913,
"learning_rate": 8e-05,
"loss": 1.7925,
"step": 415
},
{
"epoch": 0.10108127809500668,
"grad_norm": 0.40476730465888977,
"learning_rate": 8e-05,
"loss": 1.6946,
"step": 416
},
{
"epoch": 0.10132426193658121,
"grad_norm": 0.42230334877967834,
"learning_rate": 8e-05,
"loss": 1.8064,
"step": 417
},
{
"epoch": 0.10156724577815575,
"grad_norm": 0.4212943911552429,
"learning_rate": 8e-05,
"loss": 1.7424,
"step": 418
},
{
"epoch": 0.10181022961973028,
"grad_norm": 0.4509720206260681,
"learning_rate": 8e-05,
"loss": 1.8692,
"step": 419
},
{
"epoch": 0.10205321346130482,
"grad_norm": 0.39649084210395813,
"learning_rate": 8e-05,
"loss": 1.7349,
"step": 420
},
{
"epoch": 0.10229619730287935,
"grad_norm": 0.4376465678215027,
"learning_rate": 8e-05,
"loss": 1.6797,
"step": 421
},
{
"epoch": 0.1025391811444539,
"grad_norm": 0.4412459135055542,
"learning_rate": 8e-05,
"loss": 1.769,
"step": 422
},
{
"epoch": 0.10278216498602843,
"grad_norm": 0.4239305853843689,
"learning_rate": 8e-05,
"loss": 1.7809,
"step": 423
},
{
"epoch": 0.10302514882760297,
"grad_norm": 0.4495208263397217,
"learning_rate": 8e-05,
"loss": 1.8249,
"step": 424
},
{
"epoch": 0.1032681326691775,
"grad_norm": 0.4457133710384369,
"learning_rate": 8e-05,
"loss": 1.8118,
"step": 425
},
{
"epoch": 0.10351111651075204,
"grad_norm": 0.4061901569366455,
"learning_rate": 8e-05,
"loss": 1.7299,
"step": 426
},
{
"epoch": 0.10375410035232657,
"grad_norm": 0.41500920057296753,
"learning_rate": 8e-05,
"loss": 1.7372,
"step": 427
},
{
"epoch": 0.10399708419390111,
"grad_norm": 0.46426844596862793,
"learning_rate": 8e-05,
"loss": 1.7675,
"step": 428
},
{
"epoch": 0.10424006803547564,
"grad_norm": 0.4445520043373108,
"learning_rate": 8e-05,
"loss": 1.7938,
"step": 429
},
{
"epoch": 0.10448305187705018,
"grad_norm": 0.42322006821632385,
"learning_rate": 8e-05,
"loss": 1.8434,
"step": 430
},
{
"epoch": 0.10472603571862471,
"grad_norm": 0.4529503881931305,
"learning_rate": 8e-05,
"loss": 1.7494,
"step": 431
},
{
"epoch": 0.10496901956019924,
"grad_norm": 0.4172574579715729,
"learning_rate": 8e-05,
"loss": 1.7325,
"step": 432
},
{
"epoch": 0.10521200340177378,
"grad_norm": 0.4366316795349121,
"learning_rate": 8e-05,
"loss": 1.7598,
"step": 433
},
{
"epoch": 0.10545498724334831,
"grad_norm": 0.4232217073440552,
"learning_rate": 8e-05,
"loss": 1.7331,
"step": 434
},
{
"epoch": 0.10569797108492285,
"grad_norm": 0.4281679093837738,
"learning_rate": 8e-05,
"loss": 1.8115,
"step": 435
},
{
"epoch": 0.10594095492649738,
"grad_norm": 0.4335026144981384,
"learning_rate": 8e-05,
"loss": 1.7353,
"step": 436
},
{
"epoch": 0.10618393876807192,
"grad_norm": 0.4126567244529724,
"learning_rate": 8e-05,
"loss": 1.7173,
"step": 437
},
{
"epoch": 0.10642692260964647,
"grad_norm": 0.4086419343948364,
"learning_rate": 8e-05,
"loss": 1.6366,
"step": 438
},
{
"epoch": 0.106669906451221,
"grad_norm": 0.4197182357311249,
"learning_rate": 8e-05,
"loss": 1.8021,
"step": 439
},
{
"epoch": 0.10691289029279553,
"grad_norm": 0.3942872881889343,
"learning_rate": 8e-05,
"loss": 1.4548,
"step": 440
},
{
"epoch": 0.10715587413437007,
"grad_norm": 0.41451960802078247,
"learning_rate": 8e-05,
"loss": 1.5907,
"step": 441
},
{
"epoch": 0.1073988579759446,
"grad_norm": 0.4235744774341583,
"learning_rate": 8e-05,
"loss": 1.9298,
"step": 442
},
{
"epoch": 0.10764184181751914,
"grad_norm": 0.44768843054771423,
"learning_rate": 8e-05,
"loss": 1.8039,
"step": 443
},
{
"epoch": 0.10788482565909367,
"grad_norm": 0.40303924679756165,
"learning_rate": 8e-05,
"loss": 1.6959,
"step": 444
},
{
"epoch": 0.1081278095006682,
"grad_norm": 0.42123013734817505,
"learning_rate": 8e-05,
"loss": 1.7,
"step": 445
},
{
"epoch": 0.10837079334224274,
"grad_norm": 0.4180941581726074,
"learning_rate": 8e-05,
"loss": 1.7982,
"step": 446
},
{
"epoch": 0.10861377718381728,
"grad_norm": 0.41173458099365234,
"learning_rate": 8e-05,
"loss": 1.6573,
"step": 447
},
{
"epoch": 0.10885676102539181,
"grad_norm": 0.41863924264907837,
"learning_rate": 8e-05,
"loss": 1.766,
"step": 448
},
{
"epoch": 0.10909974486696634,
"grad_norm": 0.4276777505874634,
"learning_rate": 8e-05,
"loss": 1.7029,
"step": 449
},
{
"epoch": 0.10934272870854088,
"grad_norm": 0.42458993196487427,
"learning_rate": 8e-05,
"loss": 1.6814,
"step": 450
},
{
"epoch": 0.10958571255011541,
"grad_norm": 0.41878148913383484,
"learning_rate": 8e-05,
"loss": 1.6958,
"step": 451
},
{
"epoch": 0.10982869639168995,
"grad_norm": 0.4032799303531647,
"learning_rate": 8e-05,
"loss": 1.6357,
"step": 452
},
{
"epoch": 0.11007168023326448,
"grad_norm": 0.41101089119911194,
"learning_rate": 8e-05,
"loss": 1.6992,
"step": 453
},
{
"epoch": 0.11031466407483902,
"grad_norm": 0.41978752613067627,
"learning_rate": 8e-05,
"loss": 1.8756,
"step": 454
},
{
"epoch": 0.11055764791641357,
"grad_norm": 0.40360498428344727,
"learning_rate": 8e-05,
"loss": 1.6878,
"step": 455
},
{
"epoch": 0.1108006317579881,
"grad_norm": 0.4039525091648102,
"learning_rate": 8e-05,
"loss": 1.7367,
"step": 456
},
{
"epoch": 0.11104361559956263,
"grad_norm": 0.43515491485595703,
"learning_rate": 8e-05,
"loss": 1.8582,
"step": 457
},
{
"epoch": 0.11128659944113717,
"grad_norm": 0.4115177392959595,
"learning_rate": 8e-05,
"loss": 1.7892,
"step": 458
},
{
"epoch": 0.1115295832827117,
"grad_norm": 0.4229578673839569,
"learning_rate": 8e-05,
"loss": 1.8702,
"step": 459
},
{
"epoch": 0.11177256712428624,
"grad_norm": 0.4015786945819855,
"learning_rate": 8e-05,
"loss": 1.7798,
"step": 460
},
{
"epoch": 0.11201555096586077,
"grad_norm": 0.40968620777130127,
"learning_rate": 8e-05,
"loss": 1.6602,
"step": 461
},
{
"epoch": 0.1122585348074353,
"grad_norm": 0.4068968594074249,
"learning_rate": 8e-05,
"loss": 1.7319,
"step": 462
},
{
"epoch": 0.11250151864900984,
"grad_norm": 0.40810808539390564,
"learning_rate": 8e-05,
"loss": 1.8082,
"step": 463
},
{
"epoch": 0.11274450249058438,
"grad_norm": 0.4733920097351074,
"learning_rate": 8e-05,
"loss": 1.69,
"step": 464
},
{
"epoch": 0.11298748633215891,
"grad_norm": 0.45223650336265564,
"learning_rate": 8e-05,
"loss": 1.7298,
"step": 465
},
{
"epoch": 0.11323047017373344,
"grad_norm": 0.4065145254135132,
"learning_rate": 8e-05,
"loss": 1.6551,
"step": 466
},
{
"epoch": 0.11347345401530798,
"grad_norm": 0.43968337774276733,
"learning_rate": 8e-05,
"loss": 1.8832,
"step": 467
},
{
"epoch": 0.11371643785688251,
"grad_norm": 0.4068659245967865,
"learning_rate": 8e-05,
"loss": 1.6833,
"step": 468
},
{
"epoch": 0.11395942169845705,
"grad_norm": 0.44198158383369446,
"learning_rate": 8e-05,
"loss": 1.6901,
"step": 469
},
{
"epoch": 0.11420240554003158,
"grad_norm": 0.4050433039665222,
"learning_rate": 8e-05,
"loss": 1.7219,
"step": 470
},
{
"epoch": 0.11444538938160612,
"grad_norm": 0.423566609621048,
"learning_rate": 8e-05,
"loss": 1.7293,
"step": 471
},
{
"epoch": 0.11468837322318066,
"grad_norm": 0.4198954105377197,
"learning_rate": 8e-05,
"loss": 1.6615,
"step": 472
},
{
"epoch": 0.1149313570647552,
"grad_norm": 0.43905210494995117,
"learning_rate": 8e-05,
"loss": 1.8544,
"step": 473
},
{
"epoch": 0.11517434090632973,
"grad_norm": 0.403835654258728,
"learning_rate": 8e-05,
"loss": 1.7008,
"step": 474
},
{
"epoch": 0.11541732474790427,
"grad_norm": 0.4185623228549957,
"learning_rate": 8e-05,
"loss": 1.7348,
"step": 475
},
{
"epoch": 0.1156603085894788,
"grad_norm": 0.3955186605453491,
"learning_rate": 8e-05,
"loss": 1.6089,
"step": 476
},
{
"epoch": 0.11590329243105334,
"grad_norm": 0.40133073925971985,
"learning_rate": 8e-05,
"loss": 1.5766,
"step": 477
},
{
"epoch": 0.11614627627262787,
"grad_norm": 0.4133303165435791,
"learning_rate": 8e-05,
"loss": 1.7371,
"step": 478
},
{
"epoch": 0.1163892601142024,
"grad_norm": 0.4125986397266388,
"learning_rate": 8e-05,
"loss": 1.6643,
"step": 479
},
{
"epoch": 0.11663224395577694,
"grad_norm": 0.467380553483963,
"learning_rate": 8e-05,
"loss": 1.753,
"step": 480
},
{
"epoch": 0.11687522779735147,
"grad_norm": 0.4279719293117523,
"learning_rate": 8e-05,
"loss": 1.7712,
"step": 481
},
{
"epoch": 0.11711821163892601,
"grad_norm": 0.4266670048236847,
"learning_rate": 8e-05,
"loss": 1.5892,
"step": 482
},
{
"epoch": 0.11736119548050054,
"grad_norm": 0.42196038365364075,
"learning_rate": 8e-05,
"loss": 1.7975,
"step": 483
},
{
"epoch": 0.11760417932207508,
"grad_norm": 0.4362437129020691,
"learning_rate": 8e-05,
"loss": 1.8205,
"step": 484
},
{
"epoch": 0.11784716316364961,
"grad_norm": 0.4456392824649811,
"learning_rate": 8e-05,
"loss": 1.9124,
"step": 485
},
{
"epoch": 0.11809014700522415,
"grad_norm": 0.40454182028770447,
"learning_rate": 8e-05,
"loss": 1.7391,
"step": 486
},
{
"epoch": 0.11833313084679868,
"grad_norm": 0.4022403061389923,
"learning_rate": 8e-05,
"loss": 1.6956,
"step": 487
},
{
"epoch": 0.11857611468837323,
"grad_norm": 0.40457379817962646,
"learning_rate": 8e-05,
"loss": 1.6685,
"step": 488
},
{
"epoch": 0.11881909852994776,
"grad_norm": 0.4198242127895355,
"learning_rate": 8e-05,
"loss": 1.7777,
"step": 489
},
{
"epoch": 0.1190620823715223,
"grad_norm": 0.40101826190948486,
"learning_rate": 8e-05,
"loss": 1.621,
"step": 490
},
{
"epoch": 0.11930506621309683,
"grad_norm": 0.438236266374588,
"learning_rate": 8e-05,
"loss": 1.777,
"step": 491
},
{
"epoch": 0.11954805005467137,
"grad_norm": 0.4342082738876343,
"learning_rate": 8e-05,
"loss": 1.7479,
"step": 492
},
{
"epoch": 0.1197910338962459,
"grad_norm": 0.4013380706310272,
"learning_rate": 8e-05,
"loss": 1.5459,
"step": 493
},
{
"epoch": 0.12003401773782044,
"grad_norm": 0.4234047830104828,
"learning_rate": 8e-05,
"loss": 1.6803,
"step": 494
},
{
"epoch": 0.12027700157939497,
"grad_norm": 0.4206213653087616,
"learning_rate": 8e-05,
"loss": 1.7764,
"step": 495
},
{
"epoch": 0.1205199854209695,
"grad_norm": 0.4329131543636322,
"learning_rate": 8e-05,
"loss": 1.7233,
"step": 496
},
{
"epoch": 0.12076296926254404,
"grad_norm": 0.4249413013458252,
"learning_rate": 8e-05,
"loss": 1.6537,
"step": 497
},
{
"epoch": 0.12100595310411857,
"grad_norm": 0.4211755096912384,
"learning_rate": 8e-05,
"loss": 1.683,
"step": 498
},
{
"epoch": 0.12124893694569311,
"grad_norm": 0.4254203140735626,
"learning_rate": 8e-05,
"loss": 1.704,
"step": 499
},
{
"epoch": 0.12149192078726764,
"grad_norm": 0.41103148460388184,
"learning_rate": 8e-05,
"loss": 1.7606,
"step": 500
},
{
"epoch": 0.12173490462884218,
"grad_norm": 0.4685738980770111,
"learning_rate": 8e-05,
"loss": 1.7807,
"step": 501
},
{
"epoch": 0.12197788847041671,
"grad_norm": 0.4338212013244629,
"learning_rate": 8e-05,
"loss": 1.7965,
"step": 502
},
{
"epoch": 0.12222087231199125,
"grad_norm": 0.44578397274017334,
"learning_rate": 8e-05,
"loss": 1.907,
"step": 503
},
{
"epoch": 0.12246385615356578,
"grad_norm": 0.4282798171043396,
"learning_rate": 8e-05,
"loss": 1.8234,
"step": 504
},
{
"epoch": 0.12270683999514033,
"grad_norm": 0.42436984181404114,
"learning_rate": 8e-05,
"loss": 1.8776,
"step": 505
},
{
"epoch": 0.12294982383671486,
"grad_norm": 0.4542418420314789,
"learning_rate": 8e-05,
"loss": 1.7844,
"step": 506
},
{
"epoch": 0.1231928076782894,
"grad_norm": 0.43623366951942444,
"learning_rate": 8e-05,
"loss": 1.7513,
"step": 507
},
{
"epoch": 0.12343579151986393,
"grad_norm": 0.41631045937538147,
"learning_rate": 8e-05,
"loss": 1.6828,
"step": 508
},
{
"epoch": 0.12367877536143847,
"grad_norm": 0.4436653256416321,
"learning_rate": 8e-05,
"loss": 1.81,
"step": 509
},
{
"epoch": 0.123921759203013,
"grad_norm": 0.4137571454048157,
"learning_rate": 8e-05,
"loss": 1.6762,
"step": 510
},
{
"epoch": 0.12416474304458754,
"grad_norm": 0.4194542467594147,
"learning_rate": 8e-05,
"loss": 1.6571,
"step": 511
},
{
"epoch": 0.12440772688616207,
"grad_norm": 0.43399301171302795,
"learning_rate": 8e-05,
"loss": 1.6839,
"step": 512
},
{
"epoch": 0.1246507107277366,
"grad_norm": 0.4126221537590027,
"learning_rate": 8e-05,
"loss": 1.6251,
"step": 513
},
{
"epoch": 0.12489369456931114,
"grad_norm": 0.44673866033554077,
"learning_rate": 8e-05,
"loss": 1.8053,
"step": 514
},
{
"epoch": 0.1251366784108857,
"grad_norm": 0.417203813791275,
"learning_rate": 8e-05,
"loss": 1.6503,
"step": 515
},
{
"epoch": 0.12537966225246022,
"grad_norm": 0.43842193484306335,
"learning_rate": 8e-05,
"loss": 1.6617,
"step": 516
},
{
"epoch": 0.12562264609403476,
"grad_norm": 0.40844860672950745,
"learning_rate": 8e-05,
"loss": 1.7281,
"step": 517
},
{
"epoch": 0.1258656299356093,
"grad_norm": 0.46497642993927,
"learning_rate": 8e-05,
"loss": 1.8363,
"step": 518
},
{
"epoch": 0.12610861377718383,
"grad_norm": 0.4338991343975067,
"learning_rate": 8e-05,
"loss": 1.8075,
"step": 519
},
{
"epoch": 0.12635159761875836,
"grad_norm": 0.42398321628570557,
"learning_rate": 8e-05,
"loss": 1.7608,
"step": 520
},
{
"epoch": 0.1265945814603329,
"grad_norm": 0.4285203218460083,
"learning_rate": 8e-05,
"loss": 1.7244,
"step": 521
},
{
"epoch": 0.12683756530190743,
"grad_norm": 0.43014469742774963,
"learning_rate": 8e-05,
"loss": 1.8067,
"step": 522
},
{
"epoch": 0.12708054914348196,
"grad_norm": 0.43716058135032654,
"learning_rate": 8e-05,
"loss": 1.8106,
"step": 523
},
{
"epoch": 0.1273235329850565,
"grad_norm": 0.43253129720687866,
"learning_rate": 8e-05,
"loss": 1.8653,
"step": 524
},
{
"epoch": 0.12756651682663103,
"grad_norm": 0.4163695275783539,
"learning_rate": 8e-05,
"loss": 1.6138,
"step": 525
},
{
"epoch": 0.12780950066820557,
"grad_norm": 0.39770838618278503,
"learning_rate": 8e-05,
"loss": 1.7391,
"step": 526
},
{
"epoch": 0.1280524845097801,
"grad_norm": 0.416168212890625,
"learning_rate": 8e-05,
"loss": 1.7219,
"step": 527
},
{
"epoch": 0.12829546835135464,
"grad_norm": 0.393230140209198,
"learning_rate": 8e-05,
"loss": 1.76,
"step": 528
},
{
"epoch": 0.12853845219292917,
"grad_norm": 0.4247177839279175,
"learning_rate": 8e-05,
"loss": 1.7419,
"step": 529
},
{
"epoch": 0.1287814360345037,
"grad_norm": 0.41639310121536255,
"learning_rate": 8e-05,
"loss": 1.7591,
"step": 530
},
{
"epoch": 0.12902441987607824,
"grad_norm": 0.4266749322414398,
"learning_rate": 8e-05,
"loss": 1.7281,
"step": 531
},
{
"epoch": 0.12926740371765277,
"grad_norm": 0.40807127952575684,
"learning_rate": 8e-05,
"loss": 1.6978,
"step": 532
},
{
"epoch": 0.1295103875592273,
"grad_norm": 0.426586389541626,
"learning_rate": 8e-05,
"loss": 1.756,
"step": 533
},
{
"epoch": 0.12975337140080184,
"grad_norm": 0.41720592975616455,
"learning_rate": 8e-05,
"loss": 1.6427,
"step": 534
},
{
"epoch": 0.12999635524237638,
"grad_norm": 0.4057789146900177,
"learning_rate": 8e-05,
"loss": 1.7339,
"step": 535
},
{
"epoch": 0.1302393390839509,
"grad_norm": 0.43273788690567017,
"learning_rate": 8e-05,
"loss": 1.8462,
"step": 536
},
{
"epoch": 0.13048232292552545,
"grad_norm": 0.42253121733665466,
"learning_rate": 8e-05,
"loss": 1.8726,
"step": 537
},
{
"epoch": 0.13072530676709998,
"grad_norm": 0.4509675204753876,
"learning_rate": 8e-05,
"loss": 1.8426,
"step": 538
},
{
"epoch": 0.13096829060867451,
"grad_norm": 0.4126783311367035,
"learning_rate": 8e-05,
"loss": 1.8826,
"step": 539
},
{
"epoch": 0.13121127445024905,
"grad_norm": 0.4048231840133667,
"learning_rate": 8e-05,
"loss": 1.5729,
"step": 540
},
{
"epoch": 0.13145425829182358,
"grad_norm": 0.4085652828216553,
"learning_rate": 8e-05,
"loss": 1.7383,
"step": 541
},
{
"epoch": 0.13169724213339812,
"grad_norm": 0.41886651515960693,
"learning_rate": 8e-05,
"loss": 1.7015,
"step": 542
},
{
"epoch": 0.13194022597497265,
"grad_norm": 0.4263283610343933,
"learning_rate": 8e-05,
"loss": 1.7303,
"step": 543
},
{
"epoch": 0.1321832098165472,
"grad_norm": 0.409944623708725,
"learning_rate": 8e-05,
"loss": 1.7645,
"step": 544
},
{
"epoch": 0.13242619365812172,
"grad_norm": 0.45405811071395874,
"learning_rate": 8e-05,
"loss": 1.8067,
"step": 545
},
{
"epoch": 0.13266917749969628,
"grad_norm": 0.4124011695384979,
"learning_rate": 8e-05,
"loss": 1.6965,
"step": 546
},
{
"epoch": 0.13291216134127082,
"grad_norm": 0.4117543399333954,
"learning_rate": 8e-05,
"loss": 1.6651,
"step": 547
},
{
"epoch": 0.13315514518284535,
"grad_norm": 0.4148860275745392,
"learning_rate": 8e-05,
"loss": 1.7024,
"step": 548
},
{
"epoch": 0.1333981290244199,
"grad_norm": 0.39591357111930847,
"learning_rate": 8e-05,
"loss": 1.7578,
"step": 549
},
{
"epoch": 0.13364111286599442,
"grad_norm": 0.41340890526771545,
"learning_rate": 8e-05,
"loss": 1.6683,
"step": 550
},
{
"epoch": 0.13388409670756896,
"grad_norm": 0.41868317127227783,
"learning_rate": 8e-05,
"loss": 1.6794,
"step": 551
},
{
"epoch": 0.1341270805491435,
"grad_norm": 0.392217218875885,
"learning_rate": 8e-05,
"loss": 1.6007,
"step": 552
},
{
"epoch": 0.13437006439071802,
"grad_norm": 0.41015952825546265,
"learning_rate": 8e-05,
"loss": 1.669,
"step": 553
},
{
"epoch": 0.13461304823229256,
"grad_norm": 0.4273151457309723,
"learning_rate": 8e-05,
"loss": 1.8276,
"step": 554
},
{
"epoch": 0.1348560320738671,
"grad_norm": 0.4185205101966858,
"learning_rate": 8e-05,
"loss": 1.7621,
"step": 555
},
{
"epoch": 0.13509901591544163,
"grad_norm": 0.3908994197845459,
"learning_rate": 8e-05,
"loss": 1.6895,
"step": 556
},
{
"epoch": 0.13534199975701616,
"grad_norm": 0.3928578794002533,
"learning_rate": 8e-05,
"loss": 1.6038,
"step": 557
},
{
"epoch": 0.1355849835985907,
"grad_norm": 0.4071027636528015,
"learning_rate": 8e-05,
"loss": 1.7764,
"step": 558
},
{
"epoch": 0.13582796744016523,
"grad_norm": 0.4001491963863373,
"learning_rate": 8e-05,
"loss": 1.6241,
"step": 559
},
{
"epoch": 0.13607095128173977,
"grad_norm": 0.42042168974876404,
"learning_rate": 8e-05,
"loss": 1.7249,
"step": 560
},
{
"epoch": 0.1363139351233143,
"grad_norm": 0.4091740846633911,
"learning_rate": 8e-05,
"loss": 1.7422,
"step": 561
},
{
"epoch": 0.13655691896488883,
"grad_norm": 0.4089222848415375,
"learning_rate": 8e-05,
"loss": 1.8531,
"step": 562
},
{
"epoch": 0.13679990280646337,
"grad_norm": 0.39700132608413696,
"learning_rate": 8e-05,
"loss": 1.6705,
"step": 563
},
{
"epoch": 0.1370428866480379,
"grad_norm": 0.3812037408351898,
"learning_rate": 8e-05,
"loss": 1.5709,
"step": 564
},
{
"epoch": 0.13728587048961244,
"grad_norm": 0.4124666750431061,
"learning_rate": 8e-05,
"loss": 1.7889,
"step": 565
},
{
"epoch": 0.13752885433118697,
"grad_norm": 0.4233843982219696,
"learning_rate": 8e-05,
"loss": 1.8359,
"step": 566
},
{
"epoch": 0.1377718381727615,
"grad_norm": 0.40482261776924133,
"learning_rate": 8e-05,
"loss": 1.6611,
"step": 567
},
{
"epoch": 0.13801482201433604,
"grad_norm": 0.40571871399879456,
"learning_rate": 8e-05,
"loss": 1.6503,
"step": 568
},
{
"epoch": 0.13825780585591058,
"grad_norm": 0.4105265140533447,
"learning_rate": 8e-05,
"loss": 1.6609,
"step": 569
},
{
"epoch": 0.1385007896974851,
"grad_norm": 0.3944711685180664,
"learning_rate": 8e-05,
"loss": 1.7262,
"step": 570
},
{
"epoch": 0.13874377353905964,
"grad_norm": 0.415972501039505,
"learning_rate": 8e-05,
"loss": 1.797,
"step": 571
},
{
"epoch": 0.13898675738063418,
"grad_norm": 0.41803696751594543,
"learning_rate": 8e-05,
"loss": 1.7422,
"step": 572
},
{
"epoch": 0.1392297412222087,
"grad_norm": 0.42807522416114807,
"learning_rate": 8e-05,
"loss": 1.8867,
"step": 573
},
{
"epoch": 0.13947272506378325,
"grad_norm": 0.4009261727333069,
"learning_rate": 8e-05,
"loss": 1.7295,
"step": 574
},
{
"epoch": 0.13971570890535778,
"grad_norm": 0.4152818024158478,
"learning_rate": 8e-05,
"loss": 1.7609,
"step": 575
},
{
"epoch": 0.13995869274693232,
"grad_norm": 0.47868460416793823,
"learning_rate": 8e-05,
"loss": 1.8627,
"step": 576
},
{
"epoch": 0.14020167658850685,
"grad_norm": 0.409762442111969,
"learning_rate": 8e-05,
"loss": 1.8045,
"step": 577
},
{
"epoch": 0.14044466043008139,
"grad_norm": 0.6949756145477295,
"learning_rate": 8e-05,
"loss": 1.7074,
"step": 578
},
{
"epoch": 0.14068764427165595,
"grad_norm": 0.45087161660194397,
"learning_rate": 8e-05,
"loss": 1.7881,
"step": 579
},
{
"epoch": 0.14093062811323048,
"grad_norm": 0.46107062697410583,
"learning_rate": 8e-05,
"loss": 1.8546,
"step": 580
},
{
"epoch": 0.14117361195480502,
"grad_norm": 0.39177265763282776,
"learning_rate": 8e-05,
"loss": 1.604,
"step": 581
},
{
"epoch": 0.14141659579637955,
"grad_norm": 0.4238806962966919,
"learning_rate": 8e-05,
"loss": 1.6979,
"step": 582
},
{
"epoch": 0.1416595796379541,
"grad_norm": 0.4344601333141327,
"learning_rate": 8e-05,
"loss": 1.7946,
"step": 583
},
{
"epoch": 0.14190256347952862,
"grad_norm": 0.39927345514297485,
"learning_rate": 8e-05,
"loss": 1.7305,
"step": 584
},
{
"epoch": 0.14214554732110316,
"grad_norm": 0.44042500853538513,
"learning_rate": 8e-05,
"loss": 1.6398,
"step": 585
},
{
"epoch": 0.1423885311626777,
"grad_norm": 0.41562238335609436,
"learning_rate": 8e-05,
"loss": 1.7691,
"step": 586
},
{
"epoch": 0.14263151500425222,
"grad_norm": 0.41256847977638245,
"learning_rate": 8e-05,
"loss": 1.7209,
"step": 587
},
{
"epoch": 0.14287449884582676,
"grad_norm": 0.4304940104484558,
"learning_rate": 8e-05,
"loss": 1.8157,
"step": 588
},
{
"epoch": 0.1431174826874013,
"grad_norm": 0.4045521914958954,
"learning_rate": 8e-05,
"loss": 1.5891,
"step": 589
},
{
"epoch": 0.14336046652897583,
"grad_norm": 0.41248899698257446,
"learning_rate": 8e-05,
"loss": 1.8079,
"step": 590
},
{
"epoch": 0.14360345037055036,
"grad_norm": 0.40122056007385254,
"learning_rate": 8e-05,
"loss": 1.6265,
"step": 591
},
{
"epoch": 0.1438464342121249,
"grad_norm": 0.4546814560890198,
"learning_rate": 8e-05,
"loss": 1.922,
"step": 592
},
{
"epoch": 0.14408941805369943,
"grad_norm": 0.4200947880744934,
"learning_rate": 8e-05,
"loss": 1.7937,
"step": 593
},
{
"epoch": 0.14433240189527397,
"grad_norm": 0.42049646377563477,
"learning_rate": 8e-05,
"loss": 1.7916,
"step": 594
},
{
"epoch": 0.1445753857368485,
"grad_norm": 0.4661625921726227,
"learning_rate": 8e-05,
"loss": 1.8344,
"step": 595
},
{
"epoch": 0.14481836957842303,
"grad_norm": 0.4149019718170166,
"learning_rate": 8e-05,
"loss": 1.7602,
"step": 596
},
{
"epoch": 0.14506135341999757,
"grad_norm": 0.41684791445732117,
"learning_rate": 8e-05,
"loss": 1.644,
"step": 597
},
{
"epoch": 0.1453043372615721,
"grad_norm": 0.4358704686164856,
"learning_rate": 8e-05,
"loss": 1.6737,
"step": 598
},
{
"epoch": 0.14554732110314664,
"grad_norm": 0.4250600337982178,
"learning_rate": 8e-05,
"loss": 1.6269,
"step": 599
},
{
"epoch": 0.14579030494472117,
"grad_norm": 0.42459315061569214,
"learning_rate": 8e-05,
"loss": 1.6598,
"step": 600
},
{
"epoch": 0.1460332887862957,
"grad_norm": 0.4363904893398285,
"learning_rate": 8e-05,
"loss": 1.8471,
"step": 601
},
{
"epoch": 0.14627627262787024,
"grad_norm": 0.41599100828170776,
"learning_rate": 8e-05,
"loss": 1.7264,
"step": 602
},
{
"epoch": 0.14651925646944478,
"grad_norm": 0.3999607264995575,
"learning_rate": 8e-05,
"loss": 1.6048,
"step": 603
},
{
"epoch": 0.1467622403110193,
"grad_norm": 0.4132905900478363,
"learning_rate": 8e-05,
"loss": 1.784,
"step": 604
},
{
"epoch": 0.14700522415259384,
"grad_norm": 0.40192580223083496,
"learning_rate": 8e-05,
"loss": 1.5974,
"step": 605
},
{
"epoch": 0.14724820799416838,
"grad_norm": 0.4473113715648651,
"learning_rate": 8e-05,
"loss": 1.7678,
"step": 606
},
{
"epoch": 0.1474911918357429,
"grad_norm": 0.4752279222011566,
"learning_rate": 8e-05,
"loss": 1.7444,
"step": 607
},
{
"epoch": 0.14773417567731745,
"grad_norm": 0.4193718135356903,
"learning_rate": 8e-05,
"loss": 1.6125,
"step": 608
},
{
"epoch": 0.14797715951889198,
"grad_norm": 0.4258435368537903,
"learning_rate": 8e-05,
"loss": 1.8557,
"step": 609
},
{
"epoch": 0.14822014336046652,
"grad_norm": 0.4286644756793976,
"learning_rate": 8e-05,
"loss": 1.6663,
"step": 610
},
{
"epoch": 0.14846312720204105,
"grad_norm": 0.41051027178764343,
"learning_rate": 8e-05,
"loss": 1.6868,
"step": 611
},
{
"epoch": 0.1487061110436156,
"grad_norm": 0.4174746870994568,
"learning_rate": 8e-05,
"loss": 1.7451,
"step": 612
},
{
"epoch": 0.14894909488519015,
"grad_norm": 0.41176313161849976,
"learning_rate": 8e-05,
"loss": 1.6725,
"step": 613
},
{
"epoch": 0.14919207872676468,
"grad_norm": 0.4219014644622803,
"learning_rate": 8e-05,
"loss": 1.7,
"step": 614
},
{
"epoch": 0.14943506256833922,
"grad_norm": 0.41391685605049133,
"learning_rate": 8e-05,
"loss": 1.5921,
"step": 615
},
{
"epoch": 0.14967804640991375,
"grad_norm": 0.4116523563861847,
"learning_rate": 8e-05,
"loss": 1.8255,
"step": 616
},
{
"epoch": 0.14992103025148829,
"grad_norm": 0.4185222089290619,
"learning_rate": 8e-05,
"loss": 1.7303,
"step": 617
},
{
"epoch": 0.15016401409306282,
"grad_norm": 0.4165772497653961,
"learning_rate": 8e-05,
"loss": 1.7455,
"step": 618
},
{
"epoch": 0.15040699793463735,
"grad_norm": 0.42195072770118713,
"learning_rate": 8e-05,
"loss": 1.8882,
"step": 619
},
{
"epoch": 0.1506499817762119,
"grad_norm": 0.43403923511505127,
"learning_rate": 8e-05,
"loss": 1.7437,
"step": 620
},
{
"epoch": 0.15089296561778642,
"grad_norm": 0.422005832195282,
"learning_rate": 8e-05,
"loss": 1.6995,
"step": 621
},
{
"epoch": 0.15113594945936096,
"grad_norm": 0.4126865565776825,
"learning_rate": 8e-05,
"loss": 1.6739,
"step": 622
},
{
"epoch": 0.1513789333009355,
"grad_norm": 0.42506495118141174,
"learning_rate": 8e-05,
"loss": 1.6437,
"step": 623
},
{
"epoch": 0.15162191714251003,
"grad_norm": 0.4353766441345215,
"learning_rate": 8e-05,
"loss": 1.8077,
"step": 624
},
{
"epoch": 0.15186490098408456,
"grad_norm": 0.43494346737861633,
"learning_rate": 8e-05,
"loss": 1.8059,
"step": 625
},
{
"epoch": 0.1521078848256591,
"grad_norm": 0.4109300374984741,
"learning_rate": 8e-05,
"loss": 1.7252,
"step": 626
},
{
"epoch": 0.15235086866723363,
"grad_norm": 0.4274158477783203,
"learning_rate": 8e-05,
"loss": 1.877,
"step": 627
},
{
"epoch": 0.15259385250880816,
"grad_norm": 0.4044157862663269,
"learning_rate": 8e-05,
"loss": 1.6925,
"step": 628
},
{
"epoch": 0.1528368363503827,
"grad_norm": 0.41327598690986633,
"learning_rate": 8e-05,
"loss": 1.6944,
"step": 629
},
{
"epoch": 0.15307982019195723,
"grad_norm": 0.4174918830394745,
"learning_rate": 8e-05,
"loss": 1.7306,
"step": 630
},
{
"epoch": 0.15332280403353177,
"grad_norm": 0.4256085157394409,
"learning_rate": 8e-05,
"loss": 1.7158,
"step": 631
},
{
"epoch": 0.1535657878751063,
"grad_norm": 0.40840238332748413,
"learning_rate": 8e-05,
"loss": 1.7003,
"step": 632
},
{
"epoch": 0.15380877171668084,
"grad_norm": 0.43425703048706055,
"learning_rate": 8e-05,
"loss": 1.7694,
"step": 633
},
{
"epoch": 0.15405175555825537,
"grad_norm": 0.4183935821056366,
"learning_rate": 8e-05,
"loss": 1.7218,
"step": 634
},
{
"epoch": 0.1542947393998299,
"grad_norm": 0.40338075160980225,
"learning_rate": 8e-05,
"loss": 1.6867,
"step": 635
},
{
"epoch": 0.15453772324140444,
"grad_norm": 0.4134522080421448,
"learning_rate": 8e-05,
"loss": 1.6338,
"step": 636
},
{
"epoch": 0.15478070708297897,
"grad_norm": 0.4205785393714905,
"learning_rate": 8e-05,
"loss": 1.7742,
"step": 637
},
{
"epoch": 0.1550236909245535,
"grad_norm": 0.39972954988479614,
"learning_rate": 8e-05,
"loss": 1.718,
"step": 638
},
{
"epoch": 0.15526667476612804,
"grad_norm": 0.4296969473361969,
"learning_rate": 8e-05,
"loss": 1.9122,
"step": 639
},
{
"epoch": 0.15550965860770258,
"grad_norm": 0.45631009340286255,
"learning_rate": 8e-05,
"loss": 1.7137,
"step": 640
},
{
"epoch": 0.1557526424492771,
"grad_norm": 0.4262036979198456,
"learning_rate": 8e-05,
"loss": 1.7179,
"step": 641
},
{
"epoch": 0.15599562629085165,
"grad_norm": 0.4598737955093384,
"learning_rate": 8e-05,
"loss": 1.697,
"step": 642
},
{
"epoch": 0.15623861013242618,
"grad_norm": 0.4666830897331238,
"learning_rate": 8e-05,
"loss": 1.7301,
"step": 643
},
{
"epoch": 0.15648159397400072,
"grad_norm": 0.41235294938087463,
"learning_rate": 8e-05,
"loss": 1.6335,
"step": 644
},
{
"epoch": 0.15672457781557528,
"grad_norm": 0.45842328667640686,
"learning_rate": 8e-05,
"loss": 1.72,
"step": 645
},
{
"epoch": 0.1569675616571498,
"grad_norm": 0.448718786239624,
"learning_rate": 8e-05,
"loss": 1.9079,
"step": 646
},
{
"epoch": 0.15721054549872435,
"grad_norm": 0.42486071586608887,
"learning_rate": 8e-05,
"loss": 1.7585,
"step": 647
},
{
"epoch": 0.15745352934029888,
"grad_norm": 0.45865485072135925,
"learning_rate": 8e-05,
"loss": 1.786,
"step": 648
},
{
"epoch": 0.15769651318187342,
"grad_norm": 0.4131222367286682,
"learning_rate": 8e-05,
"loss": 1.7231,
"step": 649
},
{
"epoch": 0.15793949702344795,
"grad_norm": 0.42305582761764526,
"learning_rate": 8e-05,
"loss": 1.6925,
"step": 650
},
{
"epoch": 0.15818248086502248,
"grad_norm": 0.42484596371650696,
"learning_rate": 8e-05,
"loss": 1.6765,
"step": 651
},
{
"epoch": 0.15842546470659702,
"grad_norm": 0.41841161251068115,
"learning_rate": 8e-05,
"loss": 1.6919,
"step": 652
},
{
"epoch": 0.15866844854817155,
"grad_norm": 0.42979368567466736,
"learning_rate": 8e-05,
"loss": 1.7638,
"step": 653
},
{
"epoch": 0.1589114323897461,
"grad_norm": 0.4110826551914215,
"learning_rate": 8e-05,
"loss": 1.6365,
"step": 654
},
{
"epoch": 0.15915441623132062,
"grad_norm": 0.4090655446052551,
"learning_rate": 8e-05,
"loss": 1.7052,
"step": 655
},
{
"epoch": 0.15939740007289516,
"grad_norm": 0.4239189624786377,
"learning_rate": 8e-05,
"loss": 1.9034,
"step": 656
},
{
"epoch": 0.1596403839144697,
"grad_norm": 0.4068964421749115,
"learning_rate": 8e-05,
"loss": 1.7131,
"step": 657
},
{
"epoch": 0.15988336775604423,
"grad_norm": 0.4201562702655792,
"learning_rate": 8e-05,
"loss": 1.7107,
"step": 658
},
{
"epoch": 0.16012635159761876,
"grad_norm": 0.4384630620479584,
"learning_rate": 8e-05,
"loss": 1.8174,
"step": 659
},
{
"epoch": 0.1603693354391933,
"grad_norm": 0.4189380407333374,
"learning_rate": 8e-05,
"loss": 1.7293,
"step": 660
},
{
"epoch": 0.16061231928076783,
"grad_norm": 0.43468591570854187,
"learning_rate": 8e-05,
"loss": 1.8541,
"step": 661
},
{
"epoch": 0.16085530312234236,
"grad_norm": 0.44042378664016724,
"learning_rate": 8e-05,
"loss": 1.8672,
"step": 662
},
{
"epoch": 0.1610982869639169,
"grad_norm": 0.39210212230682373,
"learning_rate": 8e-05,
"loss": 1.7412,
"step": 663
},
{
"epoch": 0.16134127080549143,
"grad_norm": 0.4077080488204956,
"learning_rate": 8e-05,
"loss": 1.7045,
"step": 664
},
{
"epoch": 0.16158425464706597,
"grad_norm": 0.413170725107193,
"learning_rate": 8e-05,
"loss": 1.7113,
"step": 665
},
{
"epoch": 0.1618272384886405,
"grad_norm": 0.4146006107330322,
"learning_rate": 8e-05,
"loss": 1.736,
"step": 666
},
{
"epoch": 0.16207022233021504,
"grad_norm": 0.41123950481414795,
"learning_rate": 8e-05,
"loss": 1.771,
"step": 667
},
{
"epoch": 0.16231320617178957,
"grad_norm": 0.4749496579170227,
"learning_rate": 8e-05,
"loss": 1.7646,
"step": 668
},
{
"epoch": 0.1625561900133641,
"grad_norm": 0.4240420162677765,
"learning_rate": 8e-05,
"loss": 1.8084,
"step": 669
},
{
"epoch": 0.16279917385493864,
"grad_norm": 0.3952424228191376,
"learning_rate": 8e-05,
"loss": 1.6433,
"step": 670
},
{
"epoch": 0.16304215769651317,
"grad_norm": 0.39755764603614807,
"learning_rate": 8e-05,
"loss": 1.7252,
"step": 671
},
{
"epoch": 0.1632851415380877,
"grad_norm": 0.41174235939979553,
"learning_rate": 8e-05,
"loss": 1.7415,
"step": 672
},
{
"epoch": 0.16352812537966224,
"grad_norm": 0.4327336847782135,
"learning_rate": 8e-05,
"loss": 1.8273,
"step": 673
},
{
"epoch": 0.16377110922123678,
"grad_norm": 0.3779991865158081,
"learning_rate": 8e-05,
"loss": 1.3962,
"step": 674
},
{
"epoch": 0.1640140930628113,
"grad_norm": 0.40893781185150146,
"learning_rate": 8e-05,
"loss": 1.6842,
"step": 675
},
{
"epoch": 0.16425707690438585,
"grad_norm": 0.41329607367515564,
"learning_rate": 8e-05,
"loss": 1.7249,
"step": 676
},
{
"epoch": 0.16450006074596038,
"grad_norm": 0.401868999004364,
"learning_rate": 8e-05,
"loss": 1.8449,
"step": 677
},
{
"epoch": 0.16474304458753494,
"grad_norm": 0.4251278340816498,
"learning_rate": 8e-05,
"loss": 1.8646,
"step": 678
},
{
"epoch": 0.16498602842910948,
"grad_norm": 0.4233865737915039,
"learning_rate": 8e-05,
"loss": 1.8867,
"step": 679
},
{
"epoch": 0.165229012270684,
"grad_norm": 0.4191146194934845,
"learning_rate": 8e-05,
"loss": 1.8036,
"step": 680
},
{
"epoch": 0.16547199611225855,
"grad_norm": 0.41691043972969055,
"learning_rate": 8e-05,
"loss": 1.8327,
"step": 681
},
{
"epoch": 0.16571497995383308,
"grad_norm": 0.41075795888900757,
"learning_rate": 8e-05,
"loss": 1.7189,
"step": 682
},
{
"epoch": 0.16595796379540761,
"grad_norm": 0.41517287492752075,
"learning_rate": 8e-05,
"loss": 1.7031,
"step": 683
},
{
"epoch": 0.16620094763698215,
"grad_norm": 0.4045945703983307,
"learning_rate": 8e-05,
"loss": 1.7285,
"step": 684
},
{
"epoch": 0.16644393147855668,
"grad_norm": 0.40752533078193665,
"learning_rate": 8e-05,
"loss": 1.6358,
"step": 685
},
{
"epoch": 0.16668691532013122,
"grad_norm": 0.5462080240249634,
"learning_rate": 8e-05,
"loss": 1.741,
"step": 686
},
{
"epoch": 0.16692989916170575,
"grad_norm": 0.43143707513809204,
"learning_rate": 8e-05,
"loss": 1.701,
"step": 687
},
{
"epoch": 0.1671728830032803,
"grad_norm": 0.4070568382740021,
"learning_rate": 8e-05,
"loss": 1.6335,
"step": 688
},
{
"epoch": 0.16741586684485482,
"grad_norm": 0.4276026785373688,
"learning_rate": 8e-05,
"loss": 1.8271,
"step": 689
},
{
"epoch": 0.16765885068642936,
"grad_norm": 0.4280378520488739,
"learning_rate": 8e-05,
"loss": 1.8633,
"step": 690
},
{
"epoch": 0.1679018345280039,
"grad_norm": 0.43015697598457336,
"learning_rate": 8e-05,
"loss": 1.7541,
"step": 691
},
{
"epoch": 0.16814481836957842,
"grad_norm": 0.41025182604789734,
"learning_rate": 8e-05,
"loss": 1.6571,
"step": 692
},
{
"epoch": 0.16838780221115296,
"grad_norm": 0.41091296076774597,
"learning_rate": 8e-05,
"loss": 1.7201,
"step": 693
},
{
"epoch": 0.1686307860527275,
"grad_norm": 0.3976031243801117,
"learning_rate": 8e-05,
"loss": 1.6933,
"step": 694
},
{
"epoch": 0.16887376989430203,
"grad_norm": 0.3983782231807709,
"learning_rate": 8e-05,
"loss": 1.7087,
"step": 695
},
{
"epoch": 0.16911675373587656,
"grad_norm": 0.3997262716293335,
"learning_rate": 8e-05,
"loss": 1.6181,
"step": 696
},
{
"epoch": 0.1693597375774511,
"grad_norm": 0.4466289281845093,
"learning_rate": 8e-05,
"loss": 1.9315,
"step": 697
},
{
"epoch": 0.16960272141902563,
"grad_norm": 0.40420153737068176,
"learning_rate": 8e-05,
"loss": 1.6642,
"step": 698
},
{
"epoch": 0.16984570526060017,
"grad_norm": 0.39683201909065247,
"learning_rate": 8e-05,
"loss": 1.7654,
"step": 699
},
{
"epoch": 0.1700886891021747,
"grad_norm": 0.39208847284317017,
"learning_rate": 8e-05,
"loss": 1.6486,
"step": 700
},
{
"epoch": 0.17033167294374923,
"grad_norm": 0.40302395820617676,
"learning_rate": 8e-05,
"loss": 1.6428,
"step": 701
},
{
"epoch": 0.17057465678532377,
"grad_norm": 0.40285834670066833,
"learning_rate": 8e-05,
"loss": 1.7046,
"step": 702
},
{
"epoch": 0.1708176406268983,
"grad_norm": 0.43496444821357727,
"learning_rate": 8e-05,
"loss": 1.7652,
"step": 703
},
{
"epoch": 0.17106062446847284,
"grad_norm": 0.408661812543869,
"learning_rate": 8e-05,
"loss": 1.8078,
"step": 704
},
{
"epoch": 0.17130360831004737,
"grad_norm": 0.4155483841896057,
"learning_rate": 8e-05,
"loss": 1.5835,
"step": 705
},
{
"epoch": 0.1715465921516219,
"grad_norm": 0.4108770787715912,
"learning_rate": 8e-05,
"loss": 1.5742,
"step": 706
},
{
"epoch": 0.17178957599319644,
"grad_norm": 0.43399226665496826,
"learning_rate": 8e-05,
"loss": 1.8858,
"step": 707
},
{
"epoch": 0.17203255983477098,
"grad_norm": 0.43061381578445435,
"learning_rate": 8e-05,
"loss": 1.6588,
"step": 708
},
{
"epoch": 0.1722755436763455,
"grad_norm": 0.4035395681858063,
"learning_rate": 8e-05,
"loss": 1.7147,
"step": 709
},
{
"epoch": 0.17251852751792005,
"grad_norm": 0.4377632439136505,
"learning_rate": 8e-05,
"loss": 1.7836,
"step": 710
},
{
"epoch": 0.1727615113594946,
"grad_norm": 0.4374580383300781,
"learning_rate": 8e-05,
"loss": 1.5854,
"step": 711
},
{
"epoch": 0.17300449520106914,
"grad_norm": 0.43939241766929626,
"learning_rate": 8e-05,
"loss": 1.8797,
"step": 712
},
{
"epoch": 0.17324747904264368,
"grad_norm": 0.4374196231365204,
"learning_rate": 8e-05,
"loss": 1.7044,
"step": 713
},
{
"epoch": 0.1734904628842182,
"grad_norm": 0.4204579293727875,
"learning_rate": 8e-05,
"loss": 1.6881,
"step": 714
},
{
"epoch": 0.17373344672579275,
"grad_norm": 0.4335678815841675,
"learning_rate": 8e-05,
"loss": 1.8633,
"step": 715
},
{
"epoch": 0.17397643056736728,
"grad_norm": 0.39257657527923584,
"learning_rate": 8e-05,
"loss": 1.5843,
"step": 716
},
{
"epoch": 0.17421941440894181,
"grad_norm": 0.4656222462654114,
"learning_rate": 8e-05,
"loss": 1.6843,
"step": 717
},
{
"epoch": 0.17446239825051635,
"grad_norm": 0.42730045318603516,
"learning_rate": 8e-05,
"loss": 1.8616,
"step": 718
},
{
"epoch": 0.17470538209209088,
"grad_norm": 0.38788118958473206,
"learning_rate": 8e-05,
"loss": 1.6818,
"step": 719
},
{
"epoch": 0.17494836593366542,
"grad_norm": 0.4097652733325958,
"learning_rate": 8e-05,
"loss": 1.5835,
"step": 720
},
{
"epoch": 0.17519134977523995,
"grad_norm": 0.4379699230194092,
"learning_rate": 8e-05,
"loss": 1.7638,
"step": 721
},
{
"epoch": 0.1754343336168145,
"grad_norm": 0.44912099838256836,
"learning_rate": 8e-05,
"loss": 1.7851,
"step": 722
},
{
"epoch": 0.17567731745838902,
"grad_norm": 0.4183383882045746,
"learning_rate": 8e-05,
"loss": 1.7434,
"step": 723
},
{
"epoch": 0.17592030129996356,
"grad_norm": 0.4096759259700775,
"learning_rate": 8e-05,
"loss": 1.6585,
"step": 724
},
{
"epoch": 0.1761632851415381,
"grad_norm": 0.4122353196144104,
"learning_rate": 8e-05,
"loss": 1.7693,
"step": 725
},
{
"epoch": 0.17640626898311262,
"grad_norm": 0.39519456028938293,
"learning_rate": 8e-05,
"loss": 1.676,
"step": 726
},
{
"epoch": 0.17664925282468716,
"grad_norm": 0.419357031583786,
"learning_rate": 8e-05,
"loss": 1.7755,
"step": 727
},
{
"epoch": 0.1768922366662617,
"grad_norm": 0.39010873436927795,
"learning_rate": 8e-05,
"loss": 1.6475,
"step": 728
},
{
"epoch": 0.17713522050783623,
"grad_norm": 0.41785943508148193,
"learning_rate": 8e-05,
"loss": 1.7501,
"step": 729
},
{
"epoch": 0.17737820434941076,
"grad_norm": 0.4477573037147522,
"learning_rate": 8e-05,
"loss": 1.822,
"step": 730
},
{
"epoch": 0.1776211881909853,
"grad_norm": 0.40994468331336975,
"learning_rate": 8e-05,
"loss": 1.7401,
"step": 731
},
{
"epoch": 0.17786417203255983,
"grad_norm": 0.4095614552497864,
"learning_rate": 8e-05,
"loss": 1.7347,
"step": 732
},
{
"epoch": 0.17810715587413437,
"grad_norm": 0.42191195487976074,
"learning_rate": 8e-05,
"loss": 1.5776,
"step": 733
},
{
"epoch": 0.1783501397157089,
"grad_norm": 0.4296643137931824,
"learning_rate": 8e-05,
"loss": 1.8352,
"step": 734
},
{
"epoch": 0.17859312355728343,
"grad_norm": 0.41722437739372253,
"learning_rate": 8e-05,
"loss": 1.7393,
"step": 735
},
{
"epoch": 0.17883610739885797,
"grad_norm": 0.41322436928749084,
"learning_rate": 8e-05,
"loss": 1.5554,
"step": 736
},
{
"epoch": 0.1790790912404325,
"grad_norm": 0.4126609265804291,
"learning_rate": 8e-05,
"loss": 1.7489,
"step": 737
},
{
"epoch": 0.17932207508200704,
"grad_norm": 0.41765400767326355,
"learning_rate": 8e-05,
"loss": 1.7764,
"step": 738
},
{
"epoch": 0.17956505892358157,
"grad_norm": 0.41525664925575256,
"learning_rate": 8e-05,
"loss": 1.777,
"step": 739
},
{
"epoch": 0.1798080427651561,
"grad_norm": 0.42361241579055786,
"learning_rate": 8e-05,
"loss": 1.7266,
"step": 740
},
{
"epoch": 0.18005102660673064,
"grad_norm": 0.40086159110069275,
"learning_rate": 8e-05,
"loss": 1.6845,
"step": 741
},
{
"epoch": 0.18029401044830518,
"grad_norm": 0.433889240026474,
"learning_rate": 8e-05,
"loss": 1.7732,
"step": 742
},
{
"epoch": 0.1805369942898797,
"grad_norm": 0.4261779487133026,
"learning_rate": 8e-05,
"loss": 1.6658,
"step": 743
},
{
"epoch": 0.18077997813145427,
"grad_norm": 0.41455700993537903,
"learning_rate": 8e-05,
"loss": 1.8022,
"step": 744
},
{
"epoch": 0.1810229619730288,
"grad_norm": 0.41002756357192993,
"learning_rate": 8e-05,
"loss": 1.8093,
"step": 745
},
{
"epoch": 0.18126594581460334,
"grad_norm": 0.4073259234428406,
"learning_rate": 8e-05,
"loss": 1.6903,
"step": 746
},
{
"epoch": 0.18150892965617788,
"grad_norm": 0.4292190372943878,
"learning_rate": 8e-05,
"loss": 1.7455,
"step": 747
},
{
"epoch": 0.1817519134977524,
"grad_norm": 0.3857293725013733,
"learning_rate": 8e-05,
"loss": 1.7038,
"step": 748
},
{
"epoch": 0.18199489733932694,
"grad_norm": 0.4110948145389557,
"learning_rate": 8e-05,
"loss": 1.7761,
"step": 749
},
{
"epoch": 0.18223788118090148,
"grad_norm": 0.4361400306224823,
"learning_rate": 8e-05,
"loss": 1.5824,
"step": 750
},
{
"epoch": 0.182480865022476,
"grad_norm": 0.4137819707393646,
"learning_rate": 8e-05,
"loss": 1.6897,
"step": 751
},
{
"epoch": 0.18272384886405055,
"grad_norm": 0.4077703058719635,
"learning_rate": 8e-05,
"loss": 1.7019,
"step": 752
},
{
"epoch": 0.18296683270562508,
"grad_norm": 0.44059401750564575,
"learning_rate": 8e-05,
"loss": 1.6948,
"step": 753
},
{
"epoch": 0.18320981654719962,
"grad_norm": 0.4075104296207428,
"learning_rate": 8e-05,
"loss": 1.6569,
"step": 754
},
{
"epoch": 0.18345280038877415,
"grad_norm": 0.4188830554485321,
"learning_rate": 8e-05,
"loss": 1.7414,
"step": 755
},
{
"epoch": 0.18369578423034869,
"grad_norm": 0.41404202580451965,
"learning_rate": 8e-05,
"loss": 1.6821,
"step": 756
},
{
"epoch": 0.18393876807192322,
"grad_norm": 0.41878440976142883,
"learning_rate": 8e-05,
"loss": 1.7434,
"step": 757
},
{
"epoch": 0.18418175191349775,
"grad_norm": 0.43462273478507996,
"learning_rate": 8e-05,
"loss": 1.6551,
"step": 758
},
{
"epoch": 0.1844247357550723,
"grad_norm": 0.39942023158073425,
"learning_rate": 8e-05,
"loss": 1.6298,
"step": 759
},
{
"epoch": 0.18466771959664682,
"grad_norm": 0.4408327341079712,
"learning_rate": 8e-05,
"loss": 1.7053,
"step": 760
},
{
"epoch": 0.18491070343822136,
"grad_norm": 0.45880624651908875,
"learning_rate": 8e-05,
"loss": 1.8,
"step": 761
},
{
"epoch": 0.1851536872797959,
"grad_norm": 0.4250109791755676,
"learning_rate": 8e-05,
"loss": 1.7737,
"step": 762
},
{
"epoch": 0.18539667112137043,
"grad_norm": 0.43293994665145874,
"learning_rate": 8e-05,
"loss": 1.8144,
"step": 763
},
{
"epoch": 0.18563965496294496,
"grad_norm": 0.3994579613208771,
"learning_rate": 8e-05,
"loss": 1.7968,
"step": 764
},
{
"epoch": 0.1858826388045195,
"grad_norm": 0.427898645401001,
"learning_rate": 8e-05,
"loss": 1.6265,
"step": 765
},
{
"epoch": 0.18612562264609403,
"grad_norm": 0.4274621307849884,
"learning_rate": 8e-05,
"loss": 1.8205,
"step": 766
},
{
"epoch": 0.18636860648766856,
"grad_norm": 0.42581209540367126,
"learning_rate": 8e-05,
"loss": 1.7412,
"step": 767
},
{
"epoch": 0.1866115903292431,
"grad_norm": 0.4216351807117462,
"learning_rate": 8e-05,
"loss": 1.6394,
"step": 768
},
{
"epoch": 0.18685457417081763,
"grad_norm": 0.41380995512008667,
"learning_rate": 8e-05,
"loss": 1.7582,
"step": 769
},
{
"epoch": 0.18709755801239217,
"grad_norm": 0.3948034346103668,
"learning_rate": 8e-05,
"loss": 1.545,
"step": 770
},
{
"epoch": 0.1873405418539667,
"grad_norm": 0.40538617968559265,
"learning_rate": 8e-05,
"loss": 1.7171,
"step": 771
},
{
"epoch": 0.18758352569554124,
"grad_norm": 0.40522703528404236,
"learning_rate": 8e-05,
"loss": 1.742,
"step": 772
},
{
"epoch": 0.18782650953711577,
"grad_norm": 0.40100088715553284,
"learning_rate": 8e-05,
"loss": 1.4948,
"step": 773
},
{
"epoch": 0.1880694933786903,
"grad_norm": 0.4220276176929474,
"learning_rate": 8e-05,
"loss": 1.7515,
"step": 774
},
{
"epoch": 0.18831247722026484,
"grad_norm": 0.4018454849720001,
"learning_rate": 8e-05,
"loss": 1.4636,
"step": 775
},
{
"epoch": 0.18855546106183937,
"grad_norm": 0.4422740638256073,
"learning_rate": 8e-05,
"loss": 1.7838,
"step": 776
},
{
"epoch": 0.18879844490341394,
"grad_norm": 0.40587881207466125,
"learning_rate": 8e-05,
"loss": 1.6725,
"step": 777
},
{
"epoch": 0.18904142874498847,
"grad_norm": 0.4551006853580475,
"learning_rate": 8e-05,
"loss": 1.8645,
"step": 778
},
{
"epoch": 0.189284412586563,
"grad_norm": 0.41088271141052246,
"learning_rate": 8e-05,
"loss": 1.7109,
"step": 779
},
{
"epoch": 0.18952739642813754,
"grad_norm": 0.4191667139530182,
"learning_rate": 8e-05,
"loss": 1.738,
"step": 780
},
{
"epoch": 0.18977038026971207,
"grad_norm": 0.4077453911304474,
"learning_rate": 8e-05,
"loss": 1.6447,
"step": 781
},
{
"epoch": 0.1900133641112866,
"grad_norm": 0.3883300721645355,
"learning_rate": 8e-05,
"loss": 1.5535,
"step": 782
},
{
"epoch": 0.19025634795286114,
"grad_norm": 0.4219571053981781,
"learning_rate": 8e-05,
"loss": 1.5934,
"step": 783
},
{
"epoch": 0.19049933179443568,
"grad_norm": 0.4366585910320282,
"learning_rate": 8e-05,
"loss": 1.7601,
"step": 784
},
{
"epoch": 0.1907423156360102,
"grad_norm": 0.4421660900115967,
"learning_rate": 8e-05,
"loss": 1.8607,
"step": 785
},
{
"epoch": 0.19098529947758475,
"grad_norm": 0.44615718722343445,
"learning_rate": 8e-05,
"loss": 1.7898,
"step": 786
},
{
"epoch": 0.19122828331915928,
"grad_norm": 0.39300891757011414,
"learning_rate": 8e-05,
"loss": 1.5949,
"step": 787
},
{
"epoch": 0.19147126716073382,
"grad_norm": 0.42134273052215576,
"learning_rate": 8e-05,
"loss": 1.7447,
"step": 788
},
{
"epoch": 0.19171425100230835,
"grad_norm": 0.4182814955711365,
"learning_rate": 8e-05,
"loss": 1.7481,
"step": 789
},
{
"epoch": 0.19195723484388288,
"grad_norm": 0.41383031010627747,
"learning_rate": 8e-05,
"loss": 1.7465,
"step": 790
},
{
"epoch": 0.19220021868545742,
"grad_norm": 0.41922250390052795,
"learning_rate": 8e-05,
"loss": 1.7863,
"step": 791
},
{
"epoch": 0.19244320252703195,
"grad_norm": 0.4196271300315857,
"learning_rate": 8e-05,
"loss": 1.8236,
"step": 792
},
{
"epoch": 0.1926861863686065,
"grad_norm": 0.41270729899406433,
"learning_rate": 8e-05,
"loss": 1.823,
"step": 793
},
{
"epoch": 0.19292917021018102,
"grad_norm": 0.40714430809020996,
"learning_rate": 8e-05,
"loss": 1.7527,
"step": 794
},
{
"epoch": 0.19317215405175556,
"grad_norm": 0.4369906783103943,
"learning_rate": 8e-05,
"loss": 1.8872,
"step": 795
},
{
"epoch": 0.1934151378933301,
"grad_norm": 0.4148472249507904,
"learning_rate": 8e-05,
"loss": 1.519,
"step": 796
},
{
"epoch": 0.19365812173490463,
"grad_norm": 0.4084674119949341,
"learning_rate": 8e-05,
"loss": 1.7417,
"step": 797
},
{
"epoch": 0.19390110557647916,
"grad_norm": 0.40457743406295776,
"learning_rate": 8e-05,
"loss": 1.7221,
"step": 798
},
{
"epoch": 0.1941440894180537,
"grad_norm": 0.4159713387489319,
"learning_rate": 8e-05,
"loss": 1.6979,
"step": 799
},
{
"epoch": 0.19438707325962823,
"grad_norm": 0.42744749784469604,
"learning_rate": 8e-05,
"loss": 1.6661,
"step": 800
},
{
"epoch": 0.19463005710120276,
"grad_norm": 0.40813565254211426,
"learning_rate": 8e-05,
"loss": 1.5935,
"step": 801
},
{
"epoch": 0.1948730409427773,
"grad_norm": 0.42466405034065247,
"learning_rate": 8e-05,
"loss": 1.7304,
"step": 802
},
{
"epoch": 0.19511602478435183,
"grad_norm": 0.41067975759506226,
"learning_rate": 8e-05,
"loss": 1.7647,
"step": 803
},
{
"epoch": 0.19535900862592637,
"grad_norm": 0.415198415517807,
"learning_rate": 8e-05,
"loss": 1.678,
"step": 804
},
{
"epoch": 0.1956019924675009,
"grad_norm": 0.43364110589027405,
"learning_rate": 8e-05,
"loss": 1.7125,
"step": 805
},
{
"epoch": 0.19584497630907544,
"grad_norm": 0.42752882838249207,
"learning_rate": 8e-05,
"loss": 1.7021,
"step": 806
},
{
"epoch": 0.19608796015064997,
"grad_norm": 0.40812936425209045,
"learning_rate": 8e-05,
"loss": 1.5766,
"step": 807
},
{
"epoch": 0.1963309439922245,
"grad_norm": 0.4319069981575012,
"learning_rate": 8e-05,
"loss": 1.7433,
"step": 808
},
{
"epoch": 0.19657392783379904,
"grad_norm": 0.40930795669555664,
"learning_rate": 8e-05,
"loss": 1.6043,
"step": 809
},
{
"epoch": 0.1968169116753736,
"grad_norm": 0.4289321303367615,
"learning_rate": 8e-05,
"loss": 1.7135,
"step": 810
},
{
"epoch": 0.19705989551694814,
"grad_norm": 0.4316776692867279,
"learning_rate": 8e-05,
"loss": 1.8198,
"step": 811
},
{
"epoch": 0.19730287935852267,
"grad_norm": 0.4388635456562042,
"learning_rate": 8e-05,
"loss": 1.7959,
"step": 812
},
{
"epoch": 0.1975458632000972,
"grad_norm": 0.425304114818573,
"learning_rate": 8e-05,
"loss": 1.7251,
"step": 813
},
{
"epoch": 0.19778884704167174,
"grad_norm": 0.4264742136001587,
"learning_rate": 8e-05,
"loss": 1.7032,
"step": 814
},
{
"epoch": 0.19803183088324627,
"grad_norm": 0.3999445140361786,
"learning_rate": 8e-05,
"loss": 1.4865,
"step": 815
},
{
"epoch": 0.1982748147248208,
"grad_norm": 0.39516952633857727,
"learning_rate": 8e-05,
"loss": 1.69,
"step": 816
},
{
"epoch": 0.19851779856639534,
"grad_norm": 0.41314324736595154,
"learning_rate": 8e-05,
"loss": 1.6587,
"step": 817
},
{
"epoch": 0.19876078240796988,
"grad_norm": 0.4036077558994293,
"learning_rate": 8e-05,
"loss": 1.6872,
"step": 818
},
{
"epoch": 0.1990037662495444,
"grad_norm": 0.41270309686660767,
"learning_rate": 8e-05,
"loss": 1.6919,
"step": 819
},
{
"epoch": 0.19924675009111895,
"grad_norm": 0.42153701186180115,
"learning_rate": 8e-05,
"loss": 1.7061,
"step": 820
},
{
"epoch": 0.19948973393269348,
"grad_norm": 0.40164998173713684,
"learning_rate": 8e-05,
"loss": 1.6747,
"step": 821
},
{
"epoch": 0.19973271777426801,
"grad_norm": 0.42216864228248596,
"learning_rate": 8e-05,
"loss": 1.7704,
"step": 822
},
{
"epoch": 0.19997570161584255,
"grad_norm": 0.4152696132659912,
"learning_rate": 8e-05,
"loss": 1.6546,
"step": 823
},
{
"epoch": 0.20021868545741708,
"grad_norm": 0.44916486740112305,
"learning_rate": 8e-05,
"loss": 1.7995,
"step": 824
},
{
"epoch": 0.20046166929899162,
"grad_norm": 0.40811580419540405,
"learning_rate": 8e-05,
"loss": 1.6106,
"step": 825
},
{
"epoch": 0.20070465314056615,
"grad_norm": 0.4276411831378937,
"learning_rate": 8e-05,
"loss": 1.7476,
"step": 826
},
{
"epoch": 0.2009476369821407,
"grad_norm": 0.43811461329460144,
"learning_rate": 8e-05,
"loss": 1.6982,
"step": 827
},
{
"epoch": 0.20119062082371522,
"grad_norm": 0.43729057908058167,
"learning_rate": 8e-05,
"loss": 1.7084,
"step": 828
},
{
"epoch": 0.20143360466528976,
"grad_norm": 0.4092738628387451,
"learning_rate": 8e-05,
"loss": 1.7276,
"step": 829
},
{
"epoch": 0.2016765885068643,
"grad_norm": 0.42942163348197937,
"learning_rate": 8e-05,
"loss": 1.64,
"step": 830
},
{
"epoch": 0.20191957234843882,
"grad_norm": 0.4280286729335785,
"learning_rate": 8e-05,
"loss": 1.795,
"step": 831
},
{
"epoch": 0.20216255619001336,
"grad_norm": 0.4240659475326538,
"learning_rate": 8e-05,
"loss": 1.811,
"step": 832
},
{
"epoch": 0.2024055400315879,
"grad_norm": 0.40779125690460205,
"learning_rate": 8e-05,
"loss": 1.6911,
"step": 833
},
{
"epoch": 0.20264852387316243,
"grad_norm": 0.4293864965438843,
"learning_rate": 8e-05,
"loss": 1.7607,
"step": 834
},
{
"epoch": 0.20289150771473696,
"grad_norm": 0.4241393506526947,
"learning_rate": 8e-05,
"loss": 1.6829,
"step": 835
},
{
"epoch": 0.2031344915563115,
"grad_norm": 0.3994300067424774,
"learning_rate": 8e-05,
"loss": 1.6725,
"step": 836
},
{
"epoch": 0.20337747539788603,
"grad_norm": 0.4241347908973694,
"learning_rate": 8e-05,
"loss": 1.723,
"step": 837
},
{
"epoch": 0.20362045923946057,
"grad_norm": 0.4182228744029999,
"learning_rate": 8e-05,
"loss": 1.8148,
"step": 838
},
{
"epoch": 0.2038634430810351,
"grad_norm": 0.44692403078079224,
"learning_rate": 8e-05,
"loss": 1.7862,
"step": 839
},
{
"epoch": 0.20410642692260964,
"grad_norm": 0.4088408052921295,
"learning_rate": 8e-05,
"loss": 1.8305,
"step": 840
},
{
"epoch": 0.20434941076418417,
"grad_norm": 0.4297780692577362,
"learning_rate": 8e-05,
"loss": 1.6506,
"step": 841
},
{
"epoch": 0.2045923946057587,
"grad_norm": 0.43446582555770874,
"learning_rate": 8e-05,
"loss": 1.7397,
"step": 842
},
{
"epoch": 0.20483537844733327,
"grad_norm": 0.4295143187046051,
"learning_rate": 8e-05,
"loss": 1.7383,
"step": 843
},
{
"epoch": 0.2050783622889078,
"grad_norm": 0.3991616666316986,
"learning_rate": 8e-05,
"loss": 1.636,
"step": 844
},
{
"epoch": 0.20532134613048234,
"grad_norm": 0.4088374078273773,
"learning_rate": 8e-05,
"loss": 1.828,
"step": 845
},
{
"epoch": 0.20556432997205687,
"grad_norm": 0.4084298014640808,
"learning_rate": 8e-05,
"loss": 1.714,
"step": 846
},
{
"epoch": 0.2058073138136314,
"grad_norm": 0.4130483567714691,
"learning_rate": 8e-05,
"loss": 1.5867,
"step": 847
},
{
"epoch": 0.20605029765520594,
"grad_norm": 0.415761798620224,
"learning_rate": 8e-05,
"loss": 1.5826,
"step": 848
},
{
"epoch": 0.20629328149678047,
"grad_norm": 0.4126366674900055,
"learning_rate": 8e-05,
"loss": 1.6981,
"step": 849
},
{
"epoch": 0.206536265338355,
"grad_norm": 0.41798070073127747,
"learning_rate": 8e-05,
"loss": 1.8068,
"step": 850
},
{
"epoch": 0.20677924917992954,
"grad_norm": 0.41303834319114685,
"learning_rate": 8e-05,
"loss": 1.7239,
"step": 851
},
{
"epoch": 0.20702223302150408,
"grad_norm": 0.40471598505973816,
"learning_rate": 8e-05,
"loss": 1.737,
"step": 852
},
{
"epoch": 0.2072652168630786,
"grad_norm": 0.4138995409011841,
"learning_rate": 8e-05,
"loss": 1.6055,
"step": 853
},
{
"epoch": 0.20750820070465315,
"grad_norm": 0.42502766847610474,
"learning_rate": 8e-05,
"loss": 1.7967,
"step": 854
},
{
"epoch": 0.20775118454622768,
"grad_norm": 0.4025301933288574,
"learning_rate": 8e-05,
"loss": 1.6696,
"step": 855
},
{
"epoch": 0.20799416838780221,
"grad_norm": 0.40375009179115295,
"learning_rate": 8e-05,
"loss": 1.7121,
"step": 856
},
{
"epoch": 0.20823715222937675,
"grad_norm": 0.4137362241744995,
"learning_rate": 8e-05,
"loss": 1.5489,
"step": 857
},
{
"epoch": 0.20848013607095128,
"grad_norm": 0.4193675220012665,
"learning_rate": 8e-05,
"loss": 1.746,
"step": 858
},
{
"epoch": 0.20872311991252582,
"grad_norm": 0.4016789197921753,
"learning_rate": 8e-05,
"loss": 1.6771,
"step": 859
},
{
"epoch": 0.20896610375410035,
"grad_norm": 0.45497262477874756,
"learning_rate": 8e-05,
"loss": 1.6858,
"step": 860
},
{
"epoch": 0.2092090875956749,
"grad_norm": 0.43340960144996643,
"learning_rate": 8e-05,
"loss": 1.6765,
"step": 861
},
{
"epoch": 0.20945207143724942,
"grad_norm": 0.39834776520729065,
"learning_rate": 8e-05,
"loss": 1.6862,
"step": 862
},
{
"epoch": 0.20969505527882396,
"grad_norm": 0.4076816439628601,
"learning_rate": 8e-05,
"loss": 1.7348,
"step": 863
},
{
"epoch": 0.2099380391203985,
"grad_norm": 0.4182765483856201,
"learning_rate": 8e-05,
"loss": 1.7633,
"step": 864
},
{
"epoch": 0.21018102296197302,
"grad_norm": 0.38607677817344666,
"learning_rate": 8e-05,
"loss": 1.5854,
"step": 865
},
{
"epoch": 0.21042400680354756,
"grad_norm": 0.4406508803367615,
"learning_rate": 8e-05,
"loss": 1.6903,
"step": 866
},
{
"epoch": 0.2106669906451221,
"grad_norm": 0.4337162971496582,
"learning_rate": 8e-05,
"loss": 1.6993,
"step": 867
},
{
"epoch": 0.21090997448669663,
"grad_norm": 0.40655967593193054,
"learning_rate": 8e-05,
"loss": 1.6215,
"step": 868
},
{
"epoch": 0.21115295832827116,
"grad_norm": 0.4192434549331665,
"learning_rate": 8e-05,
"loss": 1.8316,
"step": 869
},
{
"epoch": 0.2113959421698457,
"grad_norm": 0.41924941539764404,
"learning_rate": 8e-05,
"loss": 1.9092,
"step": 870
},
{
"epoch": 0.21163892601142023,
"grad_norm": 0.4139121472835541,
"learning_rate": 8e-05,
"loss": 1.7908,
"step": 871
},
{
"epoch": 0.21188190985299477,
"grad_norm": 0.42128536105155945,
"learning_rate": 8e-05,
"loss": 1.6032,
"step": 872
},
{
"epoch": 0.2121248936945693,
"grad_norm": 0.419964075088501,
"learning_rate": 8e-05,
"loss": 1.7105,
"step": 873
},
{
"epoch": 0.21236787753614383,
"grad_norm": 0.44340789318084717,
"learning_rate": 8e-05,
"loss": 1.8523,
"step": 874
},
{
"epoch": 0.21261086137771837,
"grad_norm": 0.3913477957248688,
"learning_rate": 8e-05,
"loss": 1.6358,
"step": 875
},
{
"epoch": 0.21285384521929293,
"grad_norm": 0.41680392622947693,
"learning_rate": 8e-05,
"loss": 1.7872,
"step": 876
},
{
"epoch": 0.21309682906086747,
"grad_norm": 0.40231987833976746,
"learning_rate": 8e-05,
"loss": 1.5709,
"step": 877
},
{
"epoch": 0.213339812902442,
"grad_norm": 0.42542412877082825,
"learning_rate": 8e-05,
"loss": 1.6609,
"step": 878
},
{
"epoch": 0.21358279674401653,
"grad_norm": 0.40235117077827454,
"learning_rate": 8e-05,
"loss": 1.6371,
"step": 879
},
{
"epoch": 0.21382578058559107,
"grad_norm": 0.41061437129974365,
"learning_rate": 8e-05,
"loss": 1.7808,
"step": 880
},
{
"epoch": 0.2140687644271656,
"grad_norm": 0.42108458280563354,
"learning_rate": 8e-05,
"loss": 1.7367,
"step": 881
},
{
"epoch": 0.21431174826874014,
"grad_norm": 0.4302060008049011,
"learning_rate": 8e-05,
"loss": 1.6046,
"step": 882
},
{
"epoch": 0.21455473211031467,
"grad_norm": 0.4124979078769684,
"learning_rate": 8e-05,
"loss": 1.6505,
"step": 883
},
{
"epoch": 0.2147977159518892,
"grad_norm": 0.40241193771362305,
"learning_rate": 8e-05,
"loss": 1.6988,
"step": 884
},
{
"epoch": 0.21504069979346374,
"grad_norm": 0.3977680802345276,
"learning_rate": 8e-05,
"loss": 1.5659,
"step": 885
},
{
"epoch": 0.21528368363503828,
"grad_norm": 0.4362911283969879,
"learning_rate": 8e-05,
"loss": 1.8247,
"step": 886
},
{
"epoch": 0.2155266674766128,
"grad_norm": 0.4307892322540283,
"learning_rate": 8e-05,
"loss": 1.7296,
"step": 887
},
{
"epoch": 0.21576965131818734,
"grad_norm": 0.4166683852672577,
"learning_rate": 8e-05,
"loss": 1.7336,
"step": 888
},
{
"epoch": 0.21601263515976188,
"grad_norm": 0.4215519428253174,
"learning_rate": 8e-05,
"loss": 1.7006,
"step": 889
},
{
"epoch": 0.2162556190013364,
"grad_norm": 0.40709665417671204,
"learning_rate": 8e-05,
"loss": 1.5952,
"step": 890
},
{
"epoch": 0.21649860284291095,
"grad_norm": 0.42042356729507446,
"learning_rate": 8e-05,
"loss": 1.7194,
"step": 891
},
{
"epoch": 0.21674158668448548,
"grad_norm": 0.43915465474128723,
"learning_rate": 8e-05,
"loss": 1.7472,
"step": 892
},
{
"epoch": 0.21698457052606002,
"grad_norm": 0.40834030508995056,
"learning_rate": 8e-05,
"loss": 1.7417,
"step": 893
},
{
"epoch": 0.21722755436763455,
"grad_norm": 0.4310332238674164,
"learning_rate": 8e-05,
"loss": 1.8158,
"step": 894
},
{
"epoch": 0.21747053820920909,
"grad_norm": 0.39638951420783997,
"learning_rate": 8e-05,
"loss": 1.6619,
"step": 895
},
{
"epoch": 0.21771352205078362,
"grad_norm": 0.421747088432312,
"learning_rate": 8e-05,
"loss": 1.7057,
"step": 896
},
{
"epoch": 0.21795650589235815,
"grad_norm": 0.4247625470161438,
"learning_rate": 8e-05,
"loss": 1.8315,
"step": 897
},
{
"epoch": 0.2181994897339327,
"grad_norm": 0.3869495689868927,
"learning_rate": 8e-05,
"loss": 1.6272,
"step": 898
},
{
"epoch": 0.21844247357550722,
"grad_norm": 0.4097820520401001,
"learning_rate": 8e-05,
"loss": 1.6738,
"step": 899
},
{
"epoch": 0.21868545741708176,
"grad_norm": 0.40507251024246216,
"learning_rate": 8e-05,
"loss": 1.6671,
"step": 900
},
{
"epoch": 0.2189284412586563,
"grad_norm": 0.4169073700904846,
"learning_rate": 8e-05,
"loss": 1.7553,
"step": 901
},
{
"epoch": 0.21917142510023083,
"grad_norm": 0.42916202545166016,
"learning_rate": 8e-05,
"loss": 1.6308,
"step": 902
},
{
"epoch": 0.21941440894180536,
"grad_norm": 0.4131448268890381,
"learning_rate": 8e-05,
"loss": 1.7594,
"step": 903
},
{
"epoch": 0.2196573927833799,
"grad_norm": 0.4290483891963959,
"learning_rate": 8e-05,
"loss": 1.7547,
"step": 904
},
{
"epoch": 0.21990037662495443,
"grad_norm": 0.44039106369018555,
"learning_rate": 8e-05,
"loss": 1.708,
"step": 905
},
{
"epoch": 0.22014336046652896,
"grad_norm": 0.42198899388313293,
"learning_rate": 8e-05,
"loss": 1.6685,
"step": 906
},
{
"epoch": 0.2203863443081035,
"grad_norm": 0.4307883679866791,
"learning_rate": 8e-05,
"loss": 1.748,
"step": 907
},
{
"epoch": 0.22062932814967803,
"grad_norm": 0.43735718727111816,
"learning_rate": 8e-05,
"loss": 1.7796,
"step": 908
},
{
"epoch": 0.22087231199125257,
"grad_norm": 0.4238229990005493,
"learning_rate": 8e-05,
"loss": 1.8024,
"step": 909
},
{
"epoch": 0.22111529583282713,
"grad_norm": 0.44213277101516724,
"learning_rate": 8e-05,
"loss": 1.7095,
"step": 910
},
{
"epoch": 0.22135827967440166,
"grad_norm": 0.450988233089447,
"learning_rate": 8e-05,
"loss": 1.9036,
"step": 911
},
{
"epoch": 0.2216012635159762,
"grad_norm": 0.40055665373802185,
"learning_rate": 8e-05,
"loss": 1.5883,
"step": 912
},
{
"epoch": 0.22184424735755073,
"grad_norm": 0.4240516126155853,
"learning_rate": 8e-05,
"loss": 1.6643,
"step": 913
},
{
"epoch": 0.22208723119912527,
"grad_norm": 0.4209631383419037,
"learning_rate": 8e-05,
"loss": 1.7108,
"step": 914
},
{
"epoch": 0.2223302150406998,
"grad_norm": 0.40620243549346924,
"learning_rate": 8e-05,
"loss": 1.6811,
"step": 915
},
{
"epoch": 0.22257319888227434,
"grad_norm": 0.4493052661418915,
"learning_rate": 8e-05,
"loss": 1.904,
"step": 916
},
{
"epoch": 0.22281618272384887,
"grad_norm": 0.4233781099319458,
"learning_rate": 8e-05,
"loss": 1.7257,
"step": 917
},
{
"epoch": 0.2230591665654234,
"grad_norm": 0.4173276722431183,
"learning_rate": 8e-05,
"loss": 1.785,
"step": 918
},
{
"epoch": 0.22330215040699794,
"grad_norm": 0.4131208062171936,
"learning_rate": 8e-05,
"loss": 1.8299,
"step": 919
},
{
"epoch": 0.22354513424857247,
"grad_norm": 0.4253666400909424,
"learning_rate": 8e-05,
"loss": 1.7282,
"step": 920
},
{
"epoch": 0.223788118090147,
"grad_norm": 0.40283799171447754,
"learning_rate": 8e-05,
"loss": 1.759,
"step": 921
},
{
"epoch": 0.22403110193172154,
"grad_norm": 0.40612563490867615,
"learning_rate": 8e-05,
"loss": 1.7855,
"step": 922
},
{
"epoch": 0.22427408577329608,
"grad_norm": 0.4286128282546997,
"learning_rate": 8e-05,
"loss": 1.6473,
"step": 923
},
{
"epoch": 0.2245170696148706,
"grad_norm": 0.44093555212020874,
"learning_rate": 8e-05,
"loss": 1.7744,
"step": 924
},
{
"epoch": 0.22476005345644515,
"grad_norm": 0.431942343711853,
"learning_rate": 8e-05,
"loss": 1.7882,
"step": 925
},
{
"epoch": 0.22500303729801968,
"grad_norm": 0.44907891750335693,
"learning_rate": 8e-05,
"loss": 1.7911,
"step": 926
},
{
"epoch": 0.22524602113959422,
"grad_norm": 0.4033655524253845,
"learning_rate": 8e-05,
"loss": 1.6256,
"step": 927
},
{
"epoch": 0.22548900498116875,
"grad_norm": 0.42380547523498535,
"learning_rate": 8e-05,
"loss": 1.7455,
"step": 928
},
{
"epoch": 0.22573198882274328,
"grad_norm": 0.4354460537433624,
"learning_rate": 8e-05,
"loss": 1.745,
"step": 929
},
{
"epoch": 0.22597497266431782,
"grad_norm": 0.446147084236145,
"learning_rate": 8e-05,
"loss": 1.778,
"step": 930
},
{
"epoch": 0.22621795650589235,
"grad_norm": 0.41288965940475464,
"learning_rate": 8e-05,
"loss": 1.7558,
"step": 931
},
{
"epoch": 0.2264609403474669,
"grad_norm": 0.4251484274864197,
"learning_rate": 8e-05,
"loss": 1.7162,
"step": 932
},
{
"epoch": 0.22670392418904142,
"grad_norm": 0.39323094487190247,
"learning_rate": 8e-05,
"loss": 1.5605,
"step": 933
},
{
"epoch": 0.22694690803061596,
"grad_norm": 0.4114528298377991,
"learning_rate": 8e-05,
"loss": 1.6126,
"step": 934
},
{
"epoch": 0.2271898918721905,
"grad_norm": 0.40497374534606934,
"learning_rate": 8e-05,
"loss": 1.6789,
"step": 935
},
{
"epoch": 0.22743287571376503,
"grad_norm": 0.4167117178440094,
"learning_rate": 8e-05,
"loss": 1.7146,
"step": 936
},
{
"epoch": 0.22767585955533956,
"grad_norm": 0.45462724566459656,
"learning_rate": 8e-05,
"loss": 1.7174,
"step": 937
},
{
"epoch": 0.2279188433969141,
"grad_norm": 0.39964571595191956,
"learning_rate": 8e-05,
"loss": 1.6931,
"step": 938
},
{
"epoch": 0.22816182723848863,
"grad_norm": 0.4638615548610687,
"learning_rate": 8e-05,
"loss": 1.9907,
"step": 939
},
{
"epoch": 0.22840481108006316,
"grad_norm": 0.4084910750389099,
"learning_rate": 8e-05,
"loss": 1.7023,
"step": 940
},
{
"epoch": 0.2286477949216377,
"grad_norm": 0.4054889380931854,
"learning_rate": 8e-05,
"loss": 1.6179,
"step": 941
},
{
"epoch": 0.22889077876321223,
"grad_norm": 0.40866798162460327,
"learning_rate": 8e-05,
"loss": 1.6645,
"step": 942
},
{
"epoch": 0.2291337626047868,
"grad_norm": 0.41853010654449463,
"learning_rate": 8e-05,
"loss": 1.7759,
"step": 943
},
{
"epoch": 0.22937674644636133,
"grad_norm": 0.4199506938457489,
"learning_rate": 8e-05,
"loss": 1.7393,
"step": 944
},
{
"epoch": 0.22961973028793586,
"grad_norm": 0.4339092969894409,
"learning_rate": 8e-05,
"loss": 1.7896,
"step": 945
},
{
"epoch": 0.2298627141295104,
"grad_norm": 0.41555607318878174,
"learning_rate": 8e-05,
"loss": 1.5698,
"step": 946
},
{
"epoch": 0.23010569797108493,
"grad_norm": 0.4364539086818695,
"learning_rate": 8e-05,
"loss": 1.8479,
"step": 947
},
{
"epoch": 0.23034868181265947,
"grad_norm": 0.40731021761894226,
"learning_rate": 8e-05,
"loss": 1.6878,
"step": 948
},
{
"epoch": 0.230591665654234,
"grad_norm": 0.4170721471309662,
"learning_rate": 8e-05,
"loss": 1.7859,
"step": 949
},
{
"epoch": 0.23083464949580854,
"grad_norm": 0.39958837628364563,
"learning_rate": 8e-05,
"loss": 1.7188,
"step": 950
},
{
"epoch": 0.23107763333738307,
"grad_norm": 0.4102901518344879,
"learning_rate": 8e-05,
"loss": 1.7157,
"step": 951
},
{
"epoch": 0.2313206171789576,
"grad_norm": 0.42558231949806213,
"learning_rate": 8e-05,
"loss": 1.8472,
"step": 952
},
{
"epoch": 0.23156360102053214,
"grad_norm": 0.4051419496536255,
"learning_rate": 8e-05,
"loss": 1.7987,
"step": 953
},
{
"epoch": 0.23180658486210667,
"grad_norm": 0.39528441429138184,
"learning_rate": 8e-05,
"loss": 1.6232,
"step": 954
},
{
"epoch": 0.2320495687036812,
"grad_norm": 0.41888582706451416,
"learning_rate": 8e-05,
"loss": 1.7172,
"step": 955
},
{
"epoch": 0.23229255254525574,
"grad_norm": 0.567353367805481,
"learning_rate": 8e-05,
"loss": 1.8401,
"step": 956
},
{
"epoch": 0.23253553638683028,
"grad_norm": 0.43099990487098694,
"learning_rate": 8e-05,
"loss": 1.9614,
"step": 957
},
{
"epoch": 0.2327785202284048,
"grad_norm": 0.40936943888664246,
"learning_rate": 8e-05,
"loss": 1.6891,
"step": 958
},
{
"epoch": 0.23302150406997935,
"grad_norm": 0.42363786697387695,
"learning_rate": 8e-05,
"loss": 1.7326,
"step": 959
},
{
"epoch": 0.23326448791155388,
"grad_norm": 0.39604732394218445,
"learning_rate": 8e-05,
"loss": 1.663,
"step": 960
},
{
"epoch": 0.23350747175312841,
"grad_norm": 0.3995433449745178,
"learning_rate": 8e-05,
"loss": 1.5927,
"step": 961
},
{
"epoch": 0.23375045559470295,
"grad_norm": 0.4398478865623474,
"learning_rate": 8e-05,
"loss": 1.7686,
"step": 962
},
{
"epoch": 0.23399343943627748,
"grad_norm": 0.4354543387889862,
"learning_rate": 8e-05,
"loss": 1.6986,
"step": 963
},
{
"epoch": 0.23423642327785202,
"grad_norm": 0.4096083641052246,
"learning_rate": 8e-05,
"loss": 1.6899,
"step": 964
},
{
"epoch": 0.23447940711942655,
"grad_norm": 0.3929365873336792,
"learning_rate": 8e-05,
"loss": 1.4504,
"step": 965
},
{
"epoch": 0.2347223909610011,
"grad_norm": 0.41313865780830383,
"learning_rate": 8e-05,
"loss": 1.6626,
"step": 966
},
{
"epoch": 0.23496537480257562,
"grad_norm": 0.41436493396759033,
"learning_rate": 8e-05,
"loss": 1.7809,
"step": 967
},
{
"epoch": 0.23520835864415016,
"grad_norm": 0.4562958776950836,
"learning_rate": 8e-05,
"loss": 2.001,
"step": 968
},
{
"epoch": 0.2354513424857247,
"grad_norm": 0.4054759442806244,
"learning_rate": 8e-05,
"loss": 1.6621,
"step": 969
},
{
"epoch": 0.23569432632729923,
"grad_norm": 0.40638554096221924,
"learning_rate": 8e-05,
"loss": 1.7972,
"step": 970
},
{
"epoch": 0.23593731016887376,
"grad_norm": 0.41580459475517273,
"learning_rate": 8e-05,
"loss": 1.7458,
"step": 971
},
{
"epoch": 0.2361802940104483,
"grad_norm": 0.41007718443870544,
"learning_rate": 8e-05,
"loss": 1.6175,
"step": 972
},
{
"epoch": 0.23642327785202283,
"grad_norm": 0.38843604922294617,
"learning_rate": 8e-05,
"loss": 1.6463,
"step": 973
},
{
"epoch": 0.23666626169359736,
"grad_norm": 0.4144339859485626,
"learning_rate": 8e-05,
"loss": 1.8184,
"step": 974
},
{
"epoch": 0.2369092455351719,
"grad_norm": 0.4222639799118042,
"learning_rate": 8e-05,
"loss": 1.6873,
"step": 975
},
{
"epoch": 0.23715222937674646,
"grad_norm": 0.428195983171463,
"learning_rate": 8e-05,
"loss": 1.6401,
"step": 976
},
{
"epoch": 0.237395213218321,
"grad_norm": 0.425357460975647,
"learning_rate": 8e-05,
"loss": 1.7286,
"step": 977
},
{
"epoch": 0.23763819705989553,
"grad_norm": 0.42763423919677734,
"learning_rate": 8e-05,
"loss": 1.8252,
"step": 978
},
{
"epoch": 0.23788118090147006,
"grad_norm": 0.42000171542167664,
"learning_rate": 8e-05,
"loss": 1.7491,
"step": 979
},
{
"epoch": 0.2381241647430446,
"grad_norm": 0.4197124242782593,
"learning_rate": 8e-05,
"loss": 1.7387,
"step": 980
},
{
"epoch": 0.23836714858461913,
"grad_norm": 0.42555204033851624,
"learning_rate": 8e-05,
"loss": 1.6899,
"step": 981
},
{
"epoch": 0.23861013242619367,
"grad_norm": 0.42190057039260864,
"learning_rate": 8e-05,
"loss": 1.6623,
"step": 982
},
{
"epoch": 0.2388531162677682,
"grad_norm": 0.41451454162597656,
"learning_rate": 8e-05,
"loss": 1.6618,
"step": 983
},
{
"epoch": 0.23909610010934274,
"grad_norm": 0.4178244471549988,
"learning_rate": 8e-05,
"loss": 1.7211,
"step": 984
},
{
"epoch": 0.23933908395091727,
"grad_norm": 0.42830926179885864,
"learning_rate": 8e-05,
"loss": 1.8328,
"step": 985
},
{
"epoch": 0.2395820677924918,
"grad_norm": 0.42744654417037964,
"learning_rate": 8e-05,
"loss": 1.7594,
"step": 986
},
{
"epoch": 0.23982505163406634,
"grad_norm": 0.4242519736289978,
"learning_rate": 8e-05,
"loss": 1.6771,
"step": 987
},
{
"epoch": 0.24006803547564087,
"grad_norm": 0.41713517904281616,
"learning_rate": 8e-05,
"loss": 1.7097,
"step": 988
},
{
"epoch": 0.2403110193172154,
"grad_norm": 0.40585222840309143,
"learning_rate": 8e-05,
"loss": 1.7413,
"step": 989
},
{
"epoch": 0.24055400315878994,
"grad_norm": 0.4133054316043854,
"learning_rate": 8e-05,
"loss": 1.772,
"step": 990
},
{
"epoch": 0.24079698700036448,
"grad_norm": 0.39868175983428955,
"learning_rate": 8e-05,
"loss": 1.6509,
"step": 991
},
{
"epoch": 0.241039970841939,
"grad_norm": 0.40420007705688477,
"learning_rate": 8e-05,
"loss": 1.6442,
"step": 992
},
{
"epoch": 0.24128295468351355,
"grad_norm": 0.41098877787590027,
"learning_rate": 8e-05,
"loss": 1.6195,
"step": 993
},
{
"epoch": 0.24152593852508808,
"grad_norm": 0.40686550736427307,
"learning_rate": 8e-05,
"loss": 1.665,
"step": 994
},
{
"epoch": 0.24176892236666261,
"grad_norm": 0.43064266443252563,
"learning_rate": 8e-05,
"loss": 1.6133,
"step": 995
},
{
"epoch": 0.24201190620823715,
"grad_norm": 0.40894436836242676,
"learning_rate": 8e-05,
"loss": 1.7699,
"step": 996
},
{
"epoch": 0.24225489004981168,
"grad_norm": 0.405504047870636,
"learning_rate": 8e-05,
"loss": 1.6634,
"step": 997
},
{
"epoch": 0.24249787389138622,
"grad_norm": 0.39840230345726013,
"learning_rate": 8e-05,
"loss": 1.5641,
"step": 998
},
{
"epoch": 0.24274085773296075,
"grad_norm": 0.4055247902870178,
"learning_rate": 8e-05,
"loss": 1.657,
"step": 999
},
{
"epoch": 0.2429838415745353,
"grad_norm": 0.4019063711166382,
"learning_rate": 8e-05,
"loss": 1.6462,
"step": 1000
},
{
"epoch": 0.24322682541610982,
"grad_norm": 0.4296441376209259,
"learning_rate": 8e-05,
"loss": 1.7337,
"step": 1001
},
{
"epoch": 0.24346980925768436,
"grad_norm": 0.43681925535202026,
"learning_rate": 8e-05,
"loss": 1.7556,
"step": 1002
},
{
"epoch": 0.2437127930992589,
"grad_norm": 0.42198410630226135,
"learning_rate": 8e-05,
"loss": 1.6575,
"step": 1003
},
{
"epoch": 0.24395577694083342,
"grad_norm": 0.4400230050086975,
"learning_rate": 8e-05,
"loss": 1.8853,
"step": 1004
},
{
"epoch": 0.24419876078240796,
"grad_norm": 0.4218568503856659,
"learning_rate": 8e-05,
"loss": 1.7897,
"step": 1005
},
{
"epoch": 0.2444417446239825,
"grad_norm": 0.4169543981552124,
"learning_rate": 8e-05,
"loss": 1.6215,
"step": 1006
},
{
"epoch": 0.24468472846555703,
"grad_norm": 0.3962773084640503,
"learning_rate": 8e-05,
"loss": 1.6126,
"step": 1007
},
{
"epoch": 0.24492771230713156,
"grad_norm": 0.4171653687953949,
"learning_rate": 8e-05,
"loss": 1.7158,
"step": 1008
},
{
"epoch": 0.24517069614870612,
"grad_norm": 0.3938234746456146,
"learning_rate": 8e-05,
"loss": 1.5476,
"step": 1009
},
{
"epoch": 0.24541367999028066,
"grad_norm": 0.426641583442688,
"learning_rate": 8e-05,
"loss": 1.7535,
"step": 1010
},
{
"epoch": 0.2456566638318552,
"grad_norm": 0.44361692667007446,
"learning_rate": 8e-05,
"loss": 1.7383,
"step": 1011
},
{
"epoch": 0.24589964767342973,
"grad_norm": 0.43024498224258423,
"learning_rate": 8e-05,
"loss": 1.8564,
"step": 1012
},
{
"epoch": 0.24614263151500426,
"grad_norm": 0.39949142932891846,
"learning_rate": 8e-05,
"loss": 1.7309,
"step": 1013
},
{
"epoch": 0.2463856153565788,
"grad_norm": 0.39729544520378113,
"learning_rate": 8e-05,
"loss": 1.6329,
"step": 1014
},
{
"epoch": 0.24662859919815333,
"grad_norm": 0.406109094619751,
"learning_rate": 8e-05,
"loss": 1.7481,
"step": 1015
},
{
"epoch": 0.24687158303972787,
"grad_norm": 0.40192610025405884,
"learning_rate": 8e-05,
"loss": 1.6502,
"step": 1016
},
{
"epoch": 0.2471145668813024,
"grad_norm": 0.40629956126213074,
"learning_rate": 8e-05,
"loss": 1.7185,
"step": 1017
},
{
"epoch": 0.24735755072287693,
"grad_norm": 0.4411627948284149,
"learning_rate": 8e-05,
"loss": 1.8105,
"step": 1018
},
{
"epoch": 0.24760053456445147,
"grad_norm": 0.4106011986732483,
"learning_rate": 8e-05,
"loss": 1.6672,
"step": 1019
},
{
"epoch": 0.247843518406026,
"grad_norm": 0.4224635660648346,
"learning_rate": 8e-05,
"loss": 1.8233,
"step": 1020
},
{
"epoch": 0.24808650224760054,
"grad_norm": 0.4095596969127655,
"learning_rate": 8e-05,
"loss": 1.7653,
"step": 1021
},
{
"epoch": 0.24832948608917507,
"grad_norm": 0.41897109150886536,
"learning_rate": 8e-05,
"loss": 1.7584,
"step": 1022
},
{
"epoch": 0.2485724699307496,
"grad_norm": 0.42478346824645996,
"learning_rate": 8e-05,
"loss": 1.7572,
"step": 1023
},
{
"epoch": 0.24881545377232414,
"grad_norm": 0.42339861392974854,
"learning_rate": 8e-05,
"loss": 1.7448,
"step": 1024
},
{
"epoch": 0.24905843761389868,
"grad_norm": 0.3910640478134155,
"learning_rate": 8e-05,
"loss": 1.4181,
"step": 1025
},
{
"epoch": 0.2493014214554732,
"grad_norm": 0.41935157775878906,
"learning_rate": 8e-05,
"loss": 1.7321,
"step": 1026
},
{
"epoch": 0.24954440529704774,
"grad_norm": 0.4243759512901306,
"learning_rate": 8e-05,
"loss": 1.7001,
"step": 1027
},
{
"epoch": 0.24978738913862228,
"grad_norm": 0.40742623805999756,
"learning_rate": 8e-05,
"loss": 1.6915,
"step": 1028
},
{
"epoch": 0.2500303729801968,
"grad_norm": 0.3955259323120117,
"learning_rate": 8e-05,
"loss": 1.6848,
"step": 1029
},
{
"epoch": 0.2502733568217714,
"grad_norm": 0.38137251138687134,
"learning_rate": 8e-05,
"loss": 1.4792,
"step": 1030
},
{
"epoch": 0.2505163406633459,
"grad_norm": 0.41162389516830444,
"learning_rate": 8e-05,
"loss": 1.8394,
"step": 1031
},
{
"epoch": 0.25075932450492044,
"grad_norm": 0.4106503129005432,
"learning_rate": 8e-05,
"loss": 1.6944,
"step": 1032
},
{
"epoch": 0.25100230834649495,
"grad_norm": 0.4271000623703003,
"learning_rate": 8e-05,
"loss": 1.6986,
"step": 1033
},
{
"epoch": 0.2512452921880695,
"grad_norm": 0.4363470673561096,
"learning_rate": 8e-05,
"loss": 1.7393,
"step": 1034
},
{
"epoch": 0.251488276029644,
"grad_norm": 0.40387797355651855,
"learning_rate": 8e-05,
"loss": 1.7888,
"step": 1035
},
{
"epoch": 0.2517312598712186,
"grad_norm": 0.3888157904148102,
"learning_rate": 8e-05,
"loss": 1.5436,
"step": 1036
},
{
"epoch": 0.2519742437127931,
"grad_norm": 0.4226617217063904,
"learning_rate": 8e-05,
"loss": 1.7919,
"step": 1037
},
{
"epoch": 0.25221722755436765,
"grad_norm": 0.44716712832450867,
"learning_rate": 8e-05,
"loss": 1.6322,
"step": 1038
},
{
"epoch": 0.25246021139594216,
"grad_norm": 0.42776966094970703,
"learning_rate": 8e-05,
"loss": 1.7561,
"step": 1039
},
{
"epoch": 0.2527031952375167,
"grad_norm": 0.4059743583202362,
"learning_rate": 8e-05,
"loss": 1.6639,
"step": 1040
},
{
"epoch": 0.2529461790790912,
"grad_norm": 0.4273454248905182,
"learning_rate": 8e-05,
"loss": 1.7498,
"step": 1041
},
{
"epoch": 0.2531891629206658,
"grad_norm": 0.41879257559776306,
"learning_rate": 8e-05,
"loss": 1.786,
"step": 1042
},
{
"epoch": 0.2534321467622403,
"grad_norm": 0.42721250653266907,
"learning_rate": 8e-05,
"loss": 1.663,
"step": 1043
},
{
"epoch": 0.25367513060381486,
"grad_norm": 0.42117446660995483,
"learning_rate": 8e-05,
"loss": 1.6064,
"step": 1044
},
{
"epoch": 0.25391811444538936,
"grad_norm": 0.4064764678478241,
"learning_rate": 8e-05,
"loss": 1.8178,
"step": 1045
},
{
"epoch": 0.2541610982869639,
"grad_norm": 0.3869502544403076,
"learning_rate": 8e-05,
"loss": 1.5692,
"step": 1046
},
{
"epoch": 0.25440408212853843,
"grad_norm": 0.41754618287086487,
"learning_rate": 8e-05,
"loss": 1.8209,
"step": 1047
},
{
"epoch": 0.254647065970113,
"grad_norm": 0.41012975573539734,
"learning_rate": 8e-05,
"loss": 1.7034,
"step": 1048
},
{
"epoch": 0.2548900498116875,
"grad_norm": 0.39602041244506836,
"learning_rate": 8e-05,
"loss": 1.4707,
"step": 1049
},
{
"epoch": 0.25513303365326206,
"grad_norm": 0.4394192099571228,
"learning_rate": 8e-05,
"loss": 1.6524,
"step": 1050
},
{
"epoch": 0.25537601749483657,
"grad_norm": 0.4520413875579834,
"learning_rate": 8e-05,
"loss": 1.8656,
"step": 1051
},
{
"epoch": 0.25561900133641113,
"grad_norm": 0.4225905239582062,
"learning_rate": 8e-05,
"loss": 1.8081,
"step": 1052
},
{
"epoch": 0.25586198517798564,
"grad_norm": 0.4238746464252472,
"learning_rate": 8e-05,
"loss": 1.7315,
"step": 1053
},
{
"epoch": 0.2561049690195602,
"grad_norm": 0.40032291412353516,
"learning_rate": 8e-05,
"loss": 1.6789,
"step": 1054
},
{
"epoch": 0.2563479528611347,
"grad_norm": 0.3927189111709595,
"learning_rate": 8e-05,
"loss": 1.6654,
"step": 1055
},
{
"epoch": 0.25659093670270927,
"grad_norm": 0.4187318682670593,
"learning_rate": 8e-05,
"loss": 1.8698,
"step": 1056
},
{
"epoch": 0.2568339205442838,
"grad_norm": 0.43324345350265503,
"learning_rate": 8e-05,
"loss": 1.608,
"step": 1057
},
{
"epoch": 0.25707690438585834,
"grad_norm": 0.40138164162635803,
"learning_rate": 8e-05,
"loss": 1.6986,
"step": 1058
},
{
"epoch": 0.2573198882274329,
"grad_norm": 0.43308112025260925,
"learning_rate": 8e-05,
"loss": 1.7412,
"step": 1059
},
{
"epoch": 0.2575628720690074,
"grad_norm": 0.43418794870376587,
"learning_rate": 8e-05,
"loss": 1.761,
"step": 1060
},
{
"epoch": 0.25780585591058197,
"grad_norm": 0.4303111732006073,
"learning_rate": 8e-05,
"loss": 1.6774,
"step": 1061
},
{
"epoch": 0.2580488397521565,
"grad_norm": 0.4321209192276001,
"learning_rate": 8e-05,
"loss": 1.7081,
"step": 1062
},
{
"epoch": 0.25829182359373104,
"grad_norm": 0.46187859773635864,
"learning_rate": 8e-05,
"loss": 1.978,
"step": 1063
},
{
"epoch": 0.25853480743530555,
"grad_norm": 0.41593241691589355,
"learning_rate": 8e-05,
"loss": 1.6613,
"step": 1064
},
{
"epoch": 0.2587777912768801,
"grad_norm": 0.4512927830219269,
"learning_rate": 8e-05,
"loss": 1.8112,
"step": 1065
},
{
"epoch": 0.2590207751184546,
"grad_norm": 0.41697239875793457,
"learning_rate": 8e-05,
"loss": 1.5832,
"step": 1066
},
{
"epoch": 0.2592637589600292,
"grad_norm": 0.4299595355987549,
"learning_rate": 8e-05,
"loss": 1.7994,
"step": 1067
},
{
"epoch": 0.2595067428016037,
"grad_norm": 0.43223658204078674,
"learning_rate": 8e-05,
"loss": 1.8077,
"step": 1068
},
{
"epoch": 0.25974972664317825,
"grad_norm": 0.4131694436073303,
"learning_rate": 8e-05,
"loss": 1.8751,
"step": 1069
},
{
"epoch": 0.25999271048475275,
"grad_norm": 0.4038296341896057,
"learning_rate": 8e-05,
"loss": 1.6667,
"step": 1070
},
{
"epoch": 0.2602356943263273,
"grad_norm": 0.41273900866508484,
"learning_rate": 8e-05,
"loss": 1.6093,
"step": 1071
},
{
"epoch": 0.2604786781679018,
"grad_norm": 0.4254191517829895,
"learning_rate": 8e-05,
"loss": 1.7514,
"step": 1072
},
{
"epoch": 0.2607216620094764,
"grad_norm": 0.4102127254009247,
"learning_rate": 8e-05,
"loss": 1.6394,
"step": 1073
},
{
"epoch": 0.2609646458510509,
"grad_norm": 0.42967456579208374,
"learning_rate": 8e-05,
"loss": 1.8004,
"step": 1074
},
{
"epoch": 0.26120762969262545,
"grad_norm": 0.42619723081588745,
"learning_rate": 8e-05,
"loss": 1.7346,
"step": 1075
},
{
"epoch": 0.26145061353419996,
"grad_norm": 0.42867016792297363,
"learning_rate": 8e-05,
"loss": 1.6979,
"step": 1076
},
{
"epoch": 0.2616935973757745,
"grad_norm": 0.435442715883255,
"learning_rate": 8e-05,
"loss": 1.8384,
"step": 1077
},
{
"epoch": 0.26193658121734903,
"grad_norm": 0.41969239711761475,
"learning_rate": 8e-05,
"loss": 1.7434,
"step": 1078
},
{
"epoch": 0.2621795650589236,
"grad_norm": 0.4229139983654022,
"learning_rate": 8e-05,
"loss": 1.7943,
"step": 1079
},
{
"epoch": 0.2624225489004981,
"grad_norm": 0.42010724544525146,
"learning_rate": 8e-05,
"loss": 1.7332,
"step": 1080
},
{
"epoch": 0.26266553274207266,
"grad_norm": 0.4319921135902405,
"learning_rate": 8e-05,
"loss": 1.7998,
"step": 1081
},
{
"epoch": 0.26290851658364717,
"grad_norm": 0.4249647259712219,
"learning_rate": 8e-05,
"loss": 1.771,
"step": 1082
},
{
"epoch": 0.26315150042522173,
"grad_norm": 0.4056098759174347,
"learning_rate": 8e-05,
"loss": 1.4846,
"step": 1083
},
{
"epoch": 0.26339448426679624,
"grad_norm": 0.4061814546585083,
"learning_rate": 8e-05,
"loss": 1.401,
"step": 1084
},
{
"epoch": 0.2636374681083708,
"grad_norm": 0.4380955696105957,
"learning_rate": 8e-05,
"loss": 1.8282,
"step": 1085
},
{
"epoch": 0.2638804519499453,
"grad_norm": 0.4158300757408142,
"learning_rate": 8e-05,
"loss": 1.744,
"step": 1086
},
{
"epoch": 0.26412343579151987,
"grad_norm": 0.39589807391166687,
"learning_rate": 8e-05,
"loss": 1.5389,
"step": 1087
},
{
"epoch": 0.2643664196330944,
"grad_norm": 0.39479830861091614,
"learning_rate": 8e-05,
"loss": 1.6465,
"step": 1088
},
{
"epoch": 0.26460940347466894,
"grad_norm": 0.4172505736351013,
"learning_rate": 8e-05,
"loss": 1.783,
"step": 1089
},
{
"epoch": 0.26485238731624344,
"grad_norm": 0.4129602313041687,
"learning_rate": 8e-05,
"loss": 1.7944,
"step": 1090
},
{
"epoch": 0.265095371157818,
"grad_norm": 0.40379106998443604,
"learning_rate": 8e-05,
"loss": 1.6992,
"step": 1091
},
{
"epoch": 0.26533835499939257,
"grad_norm": 0.4196067154407501,
"learning_rate": 8e-05,
"loss": 1.7052,
"step": 1092
},
{
"epoch": 0.2655813388409671,
"grad_norm": 0.4030078947544098,
"learning_rate": 8e-05,
"loss": 1.6861,
"step": 1093
},
{
"epoch": 0.26582432268254164,
"grad_norm": 0.4095304608345032,
"learning_rate": 8e-05,
"loss": 1.6703,
"step": 1094
},
{
"epoch": 0.26606730652411614,
"grad_norm": 0.4298233091831207,
"learning_rate": 8e-05,
"loss": 1.7448,
"step": 1095
},
{
"epoch": 0.2663102903656907,
"grad_norm": 0.41325706243515015,
"learning_rate": 8e-05,
"loss": 1.7149,
"step": 1096
},
{
"epoch": 0.2665532742072652,
"grad_norm": 0.41717031598091125,
"learning_rate": 8e-05,
"loss": 1.6688,
"step": 1097
},
{
"epoch": 0.2667962580488398,
"grad_norm": 0.4257058799266815,
"learning_rate": 8e-05,
"loss": 1.7694,
"step": 1098
},
{
"epoch": 0.2670392418904143,
"grad_norm": 0.4161388874053955,
"learning_rate": 8e-05,
"loss": 1.6243,
"step": 1099
},
{
"epoch": 0.26728222573198884,
"grad_norm": 0.43736934661865234,
"learning_rate": 8e-05,
"loss": 1.8092,
"step": 1100
},
{
"epoch": 0.26752520957356335,
"grad_norm": 0.4007851183414459,
"learning_rate": 8e-05,
"loss": 1.5047,
"step": 1101
},
{
"epoch": 0.2677681934151379,
"grad_norm": 0.4082740843296051,
"learning_rate": 8e-05,
"loss": 1.7787,
"step": 1102
},
{
"epoch": 0.2680111772567124,
"grad_norm": 0.42005470395088196,
"learning_rate": 8e-05,
"loss": 1.7195,
"step": 1103
},
{
"epoch": 0.268254161098287,
"grad_norm": 0.4103584885597229,
"learning_rate": 8e-05,
"loss": 1.4854,
"step": 1104
},
{
"epoch": 0.2684971449398615,
"grad_norm": 0.4048874080181122,
"learning_rate": 8e-05,
"loss": 1.8195,
"step": 1105
},
{
"epoch": 0.26874012878143605,
"grad_norm": 0.4084217846393585,
"learning_rate": 8e-05,
"loss": 1.6032,
"step": 1106
},
{
"epoch": 0.26898311262301056,
"grad_norm": 0.4224699139595032,
"learning_rate": 8e-05,
"loss": 1.7122,
"step": 1107
},
{
"epoch": 0.2692260964645851,
"grad_norm": 0.43159735202789307,
"learning_rate": 8e-05,
"loss": 1.7818,
"step": 1108
},
{
"epoch": 0.2694690803061596,
"grad_norm": 0.40543919801712036,
"learning_rate": 8e-05,
"loss": 1.5697,
"step": 1109
},
{
"epoch": 0.2697120641477342,
"grad_norm": 0.4308815002441406,
"learning_rate": 8e-05,
"loss": 1.7352,
"step": 1110
},
{
"epoch": 0.2699550479893087,
"grad_norm": 0.44923198223114014,
"learning_rate": 8e-05,
"loss": 1.7749,
"step": 1111
},
{
"epoch": 0.27019803183088326,
"grad_norm": 0.42572107911109924,
"learning_rate": 8e-05,
"loss": 1.8659,
"step": 1112
},
{
"epoch": 0.27044101567245776,
"grad_norm": 0.41339996457099915,
"learning_rate": 8e-05,
"loss": 1.7747,
"step": 1113
},
{
"epoch": 0.2706839995140323,
"grad_norm": 0.4206458032131195,
"learning_rate": 8e-05,
"loss": 1.7271,
"step": 1114
},
{
"epoch": 0.27092698335560683,
"grad_norm": 0.429023414850235,
"learning_rate": 8e-05,
"loss": 1.7286,
"step": 1115
},
{
"epoch": 0.2711699671971814,
"grad_norm": 0.40702301263809204,
"learning_rate": 8e-05,
"loss": 1.79,
"step": 1116
},
{
"epoch": 0.2714129510387559,
"grad_norm": 0.4375455379486084,
"learning_rate": 8e-05,
"loss": 1.8345,
"step": 1117
},
{
"epoch": 0.27165593488033046,
"grad_norm": 0.3962376117706299,
"learning_rate": 8e-05,
"loss": 1.6394,
"step": 1118
},
{
"epoch": 0.27189891872190497,
"grad_norm": 0.40322914719581604,
"learning_rate": 8e-05,
"loss": 1.6283,
"step": 1119
},
{
"epoch": 0.27214190256347953,
"grad_norm": 0.43046557903289795,
"learning_rate": 8e-05,
"loss": 1.8043,
"step": 1120
},
{
"epoch": 0.27238488640505404,
"grad_norm": 0.4123181700706482,
"learning_rate": 8e-05,
"loss": 1.7929,
"step": 1121
},
{
"epoch": 0.2726278702466286,
"grad_norm": 0.41423794627189636,
"learning_rate": 8e-05,
"loss": 1.7222,
"step": 1122
},
{
"epoch": 0.2728708540882031,
"grad_norm": 0.40966901183128357,
"learning_rate": 8e-05,
"loss": 1.6769,
"step": 1123
},
{
"epoch": 0.27311383792977767,
"grad_norm": 0.43045124411582947,
"learning_rate": 8e-05,
"loss": 1.6193,
"step": 1124
},
{
"epoch": 0.27335682177135223,
"grad_norm": 0.41753265261650085,
"learning_rate": 8e-05,
"loss": 1.6965,
"step": 1125
},
{
"epoch": 0.27359980561292674,
"grad_norm": 0.4054257273674011,
"learning_rate": 8e-05,
"loss": 1.6824,
"step": 1126
},
{
"epoch": 0.2738427894545013,
"grad_norm": 0.4242555499076843,
"learning_rate": 8e-05,
"loss": 1.6507,
"step": 1127
},
{
"epoch": 0.2740857732960758,
"grad_norm": 0.4189755618572235,
"learning_rate": 8e-05,
"loss": 1.6478,
"step": 1128
},
{
"epoch": 0.27432875713765037,
"grad_norm": 0.4091269373893738,
"learning_rate": 8e-05,
"loss": 1.6603,
"step": 1129
},
{
"epoch": 0.2745717409792249,
"grad_norm": 0.4247717559337616,
"learning_rate": 8e-05,
"loss": 1.7505,
"step": 1130
},
{
"epoch": 0.27481472482079944,
"grad_norm": 0.4193531572818756,
"learning_rate": 8e-05,
"loss": 1.8572,
"step": 1131
},
{
"epoch": 0.27505770866237395,
"grad_norm": 0.42771103978157043,
"learning_rate": 8e-05,
"loss": 1.8101,
"step": 1132
},
{
"epoch": 0.2753006925039485,
"grad_norm": 0.4443416893482208,
"learning_rate": 8e-05,
"loss": 1.9342,
"step": 1133
},
{
"epoch": 0.275543676345523,
"grad_norm": 0.452569842338562,
"learning_rate": 8e-05,
"loss": 1.9045,
"step": 1134
},
{
"epoch": 0.2757866601870976,
"grad_norm": 0.41667863726615906,
"learning_rate": 8e-05,
"loss": 1.7451,
"step": 1135
},
{
"epoch": 0.2760296440286721,
"grad_norm": 0.4566730856895447,
"learning_rate": 8e-05,
"loss": 1.7832,
"step": 1136
},
{
"epoch": 0.27627262787024665,
"grad_norm": 0.42623674869537354,
"learning_rate": 8e-05,
"loss": 1.7397,
"step": 1137
},
{
"epoch": 0.27651561171182115,
"grad_norm": 0.4352518618106842,
"learning_rate": 8e-05,
"loss": 1.811,
"step": 1138
},
{
"epoch": 0.2767585955533957,
"grad_norm": 0.4077364206314087,
"learning_rate": 8e-05,
"loss": 1.7585,
"step": 1139
},
{
"epoch": 0.2770015793949702,
"grad_norm": 0.427761435508728,
"learning_rate": 8e-05,
"loss": 1.6655,
"step": 1140
},
{
"epoch": 0.2772445632365448,
"grad_norm": 0.41399678587913513,
"learning_rate": 8e-05,
"loss": 1.7356,
"step": 1141
},
{
"epoch": 0.2774875470781193,
"grad_norm": 0.3881222605705261,
"learning_rate": 8e-05,
"loss": 1.592,
"step": 1142
},
{
"epoch": 0.27773053091969385,
"grad_norm": 0.41863733530044556,
"learning_rate": 8e-05,
"loss": 1.7983,
"step": 1143
},
{
"epoch": 0.27797351476126836,
"grad_norm": 0.41757330298423767,
"learning_rate": 8e-05,
"loss": 1.7363,
"step": 1144
},
{
"epoch": 0.2782164986028429,
"grad_norm": 0.4343527555465698,
"learning_rate": 8e-05,
"loss": 1.7453,
"step": 1145
},
{
"epoch": 0.2784594824444174,
"grad_norm": 0.411081463098526,
"learning_rate": 8e-05,
"loss": 1.6617,
"step": 1146
},
{
"epoch": 0.278702466285992,
"grad_norm": 0.42007976770401,
"learning_rate": 8e-05,
"loss": 1.7125,
"step": 1147
},
{
"epoch": 0.2789454501275665,
"grad_norm": 0.41134604811668396,
"learning_rate": 8e-05,
"loss": 1.6123,
"step": 1148
},
{
"epoch": 0.27918843396914106,
"grad_norm": 0.41881364583969116,
"learning_rate": 8e-05,
"loss": 1.6334,
"step": 1149
},
{
"epoch": 0.27943141781071557,
"grad_norm": 0.4174276888370514,
"learning_rate": 8e-05,
"loss": 1.6526,
"step": 1150
},
{
"epoch": 0.2796744016522901,
"grad_norm": 0.4109209477901459,
"learning_rate": 8e-05,
"loss": 1.6622,
"step": 1151
},
{
"epoch": 0.27991738549386463,
"grad_norm": 0.4209733307361603,
"learning_rate": 8e-05,
"loss": 1.6243,
"step": 1152
},
{
"epoch": 0.2801603693354392,
"grad_norm": 0.43282854557037354,
"learning_rate": 8e-05,
"loss": 1.7454,
"step": 1153
},
{
"epoch": 0.2804033531770137,
"grad_norm": 0.4117317199707031,
"learning_rate": 8e-05,
"loss": 1.6901,
"step": 1154
},
{
"epoch": 0.28064633701858827,
"grad_norm": 0.44751831889152527,
"learning_rate": 8e-05,
"loss": 1.7894,
"step": 1155
},
{
"epoch": 0.28088932086016277,
"grad_norm": 0.401326447725296,
"learning_rate": 8e-05,
"loss": 1.5896,
"step": 1156
},
{
"epoch": 0.28113230470173733,
"grad_norm": 0.4252309203147888,
"learning_rate": 8e-05,
"loss": 1.7422,
"step": 1157
},
{
"epoch": 0.2813752885433119,
"grad_norm": 0.42324867844581604,
"learning_rate": 8e-05,
"loss": 1.7284,
"step": 1158
},
{
"epoch": 0.2816182723848864,
"grad_norm": 0.42442786693573,
"learning_rate": 8e-05,
"loss": 1.6773,
"step": 1159
},
{
"epoch": 0.28186125622646097,
"grad_norm": 0.39588966965675354,
"learning_rate": 8e-05,
"loss": 1.7574,
"step": 1160
},
{
"epoch": 0.2821042400680355,
"grad_norm": 0.42665570974349976,
"learning_rate": 8e-05,
"loss": 1.779,
"step": 1161
},
{
"epoch": 0.28234722390961003,
"grad_norm": 0.4189774692058563,
"learning_rate": 8e-05,
"loss": 1.6678,
"step": 1162
},
{
"epoch": 0.28259020775118454,
"grad_norm": 0.41338545083999634,
"learning_rate": 8e-05,
"loss": 1.701,
"step": 1163
},
{
"epoch": 0.2828331915927591,
"grad_norm": 0.4207904040813446,
"learning_rate": 8e-05,
"loss": 1.5652,
"step": 1164
},
{
"epoch": 0.2830761754343336,
"grad_norm": 0.4322403371334076,
"learning_rate": 8e-05,
"loss": 1.7479,
"step": 1165
},
{
"epoch": 0.2833191592759082,
"grad_norm": 0.43739262223243713,
"learning_rate": 8e-05,
"loss": 1.8341,
"step": 1166
},
{
"epoch": 0.2835621431174827,
"grad_norm": 0.4155855178833008,
"learning_rate": 8e-05,
"loss": 1.7949,
"step": 1167
},
{
"epoch": 0.28380512695905724,
"grad_norm": 0.42609381675720215,
"learning_rate": 8e-05,
"loss": 1.6388,
"step": 1168
},
{
"epoch": 0.28404811080063175,
"grad_norm": 0.4329025149345398,
"learning_rate": 8e-05,
"loss": 1.7481,
"step": 1169
},
{
"epoch": 0.2842910946422063,
"grad_norm": 0.4329528510570526,
"learning_rate": 8e-05,
"loss": 1.9396,
"step": 1170
},
{
"epoch": 0.2845340784837808,
"grad_norm": 0.44219064712524414,
"learning_rate": 8e-05,
"loss": 1.6349,
"step": 1171
},
{
"epoch": 0.2847770623253554,
"grad_norm": 0.44023966789245605,
"learning_rate": 8e-05,
"loss": 1.8167,
"step": 1172
},
{
"epoch": 0.2850200461669299,
"grad_norm": 0.4343285858631134,
"learning_rate": 8e-05,
"loss": 1.9336,
"step": 1173
},
{
"epoch": 0.28526303000850445,
"grad_norm": 0.41535404324531555,
"learning_rate": 8e-05,
"loss": 1.6894,
"step": 1174
},
{
"epoch": 0.28550601385007895,
"grad_norm": 0.4356030523777008,
"learning_rate": 8e-05,
"loss": 1.6956,
"step": 1175
},
{
"epoch": 0.2857489976916535,
"grad_norm": 0.4417020082473755,
"learning_rate": 8e-05,
"loss": 1.7016,
"step": 1176
},
{
"epoch": 0.285991981533228,
"grad_norm": 0.39354732632637024,
"learning_rate": 8e-05,
"loss": 1.725,
"step": 1177
},
{
"epoch": 0.2862349653748026,
"grad_norm": 0.4548605680465698,
"learning_rate": 8e-05,
"loss": 1.6613,
"step": 1178
},
{
"epoch": 0.2864779492163771,
"grad_norm": 0.4130997061729431,
"learning_rate": 8e-05,
"loss": 1.6241,
"step": 1179
},
{
"epoch": 0.28672093305795165,
"grad_norm": 0.4564334750175476,
"learning_rate": 8e-05,
"loss": 1.8025,
"step": 1180
},
{
"epoch": 0.28696391689952616,
"grad_norm": 0.4132091701030731,
"learning_rate": 8e-05,
"loss": 1.6924,
"step": 1181
},
{
"epoch": 0.2872069007411007,
"grad_norm": 0.43107539415359497,
"learning_rate": 8e-05,
"loss": 1.8584,
"step": 1182
},
{
"epoch": 0.28744988458267523,
"grad_norm": 0.4062747061252594,
"learning_rate": 8e-05,
"loss": 1.6204,
"step": 1183
},
{
"epoch": 0.2876928684242498,
"grad_norm": 0.4347899556159973,
"learning_rate": 8e-05,
"loss": 1.8802,
"step": 1184
},
{
"epoch": 0.2879358522658243,
"grad_norm": 0.45099252462387085,
"learning_rate": 8e-05,
"loss": 1.6991,
"step": 1185
},
{
"epoch": 0.28817883610739886,
"grad_norm": 0.41353484988212585,
"learning_rate": 8e-05,
"loss": 1.7113,
"step": 1186
},
{
"epoch": 0.28842181994897337,
"grad_norm": 0.4440639317035675,
"learning_rate": 8e-05,
"loss": 1.7266,
"step": 1187
},
{
"epoch": 0.28866480379054793,
"grad_norm": 0.4112803041934967,
"learning_rate": 8e-05,
"loss": 1.7264,
"step": 1188
},
{
"epoch": 0.28890778763212244,
"grad_norm": 0.38698825240135193,
"learning_rate": 8e-05,
"loss": 1.5702,
"step": 1189
},
{
"epoch": 0.289150771473697,
"grad_norm": 0.40388596057891846,
"learning_rate": 8e-05,
"loss": 1.6227,
"step": 1190
},
{
"epoch": 0.28939375531527156,
"grad_norm": 0.39465680718421936,
"learning_rate": 8e-05,
"loss": 1.6133,
"step": 1191
},
{
"epoch": 0.28963673915684607,
"grad_norm": 0.4267415702342987,
"learning_rate": 8e-05,
"loss": 1.7361,
"step": 1192
},
{
"epoch": 0.28987972299842063,
"grad_norm": 0.42168205976486206,
"learning_rate": 8e-05,
"loss": 1.6825,
"step": 1193
},
{
"epoch": 0.29012270683999514,
"grad_norm": 0.41416439414024353,
"learning_rate": 8e-05,
"loss": 1.7368,
"step": 1194
},
{
"epoch": 0.2903656906815697,
"grad_norm": 0.4276784658432007,
"learning_rate": 8e-05,
"loss": 1.7046,
"step": 1195
},
{
"epoch": 0.2906086745231442,
"grad_norm": 0.4125884175300598,
"learning_rate": 8e-05,
"loss": 1.5906,
"step": 1196
},
{
"epoch": 0.29085165836471877,
"grad_norm": 0.4149490296840668,
"learning_rate": 8e-05,
"loss": 1.8031,
"step": 1197
},
{
"epoch": 0.2910946422062933,
"grad_norm": 0.42758074402809143,
"learning_rate": 8e-05,
"loss": 1.7272,
"step": 1198
},
{
"epoch": 0.29133762604786784,
"grad_norm": 0.4167897701263428,
"learning_rate": 8e-05,
"loss": 1.6751,
"step": 1199
},
{
"epoch": 0.29158060988944234,
"grad_norm": 0.4239620864391327,
"learning_rate": 8e-05,
"loss": 1.5379,
"step": 1200
},
{
"epoch": 0.2918235937310169,
"grad_norm": 0.4121783673763275,
"learning_rate": 8e-05,
"loss": 1.6634,
"step": 1201
},
{
"epoch": 0.2920665775725914,
"grad_norm": 0.43166282773017883,
"learning_rate": 8e-05,
"loss": 1.6699,
"step": 1202
},
{
"epoch": 0.292309561414166,
"grad_norm": 0.442334920167923,
"learning_rate": 8e-05,
"loss": 1.8271,
"step": 1203
},
{
"epoch": 0.2925525452557405,
"grad_norm": 0.4301314055919647,
"learning_rate": 8e-05,
"loss": 1.6404,
"step": 1204
},
{
"epoch": 0.29279552909731504,
"grad_norm": 0.45724913477897644,
"learning_rate": 8e-05,
"loss": 1.7264,
"step": 1205
},
{
"epoch": 0.29303851293888955,
"grad_norm": 0.4075496792793274,
"learning_rate": 8e-05,
"loss": 1.6885,
"step": 1206
},
{
"epoch": 0.2932814967804641,
"grad_norm": 0.42279496788978577,
"learning_rate": 8e-05,
"loss": 1.8865,
"step": 1207
},
{
"epoch": 0.2935244806220386,
"grad_norm": 0.4176923930644989,
"learning_rate": 8e-05,
"loss": 1.7014,
"step": 1208
},
{
"epoch": 0.2937674644636132,
"grad_norm": 0.4202452301979065,
"learning_rate": 8e-05,
"loss": 1.7175,
"step": 1209
},
{
"epoch": 0.2940104483051877,
"grad_norm": 0.41166600584983826,
"learning_rate": 8e-05,
"loss": 1.7455,
"step": 1210
},
{
"epoch": 0.29425343214676225,
"grad_norm": 0.4162682592868805,
"learning_rate": 8e-05,
"loss": 1.7159,
"step": 1211
},
{
"epoch": 0.29449641598833676,
"grad_norm": 0.42768990993499756,
"learning_rate": 8e-05,
"loss": 1.7041,
"step": 1212
},
{
"epoch": 0.2947393998299113,
"grad_norm": 0.4126628637313843,
"learning_rate": 8e-05,
"loss": 1.8088,
"step": 1213
},
{
"epoch": 0.2949823836714858,
"grad_norm": 0.41666892170906067,
"learning_rate": 8e-05,
"loss": 1.7258,
"step": 1214
},
{
"epoch": 0.2952253675130604,
"grad_norm": 0.39374640583992004,
"learning_rate": 8e-05,
"loss": 1.5282,
"step": 1215
},
{
"epoch": 0.2954683513546349,
"grad_norm": 0.40578532218933105,
"learning_rate": 8e-05,
"loss": 1.7309,
"step": 1216
},
{
"epoch": 0.29571133519620946,
"grad_norm": 0.4230765104293823,
"learning_rate": 8e-05,
"loss": 1.7824,
"step": 1217
},
{
"epoch": 0.29595431903778396,
"grad_norm": 0.4090322256088257,
"learning_rate": 8e-05,
"loss": 1.4695,
"step": 1218
},
{
"epoch": 0.2961973028793585,
"grad_norm": 0.39290255308151245,
"learning_rate": 8e-05,
"loss": 1.6753,
"step": 1219
},
{
"epoch": 0.29644028672093303,
"grad_norm": 0.4163476228713989,
"learning_rate": 8e-05,
"loss": 1.6955,
"step": 1220
},
{
"epoch": 0.2966832705625076,
"grad_norm": 0.4184444844722748,
"learning_rate": 8e-05,
"loss": 1.6452,
"step": 1221
},
{
"epoch": 0.2969262544040821,
"grad_norm": 0.46152612566947937,
"learning_rate": 8e-05,
"loss": 1.8103,
"step": 1222
},
{
"epoch": 0.29716923824565666,
"grad_norm": 0.4153369069099426,
"learning_rate": 8e-05,
"loss": 1.6935,
"step": 1223
},
{
"epoch": 0.2974122220872312,
"grad_norm": 0.5140358209609985,
"learning_rate": 8e-05,
"loss": 1.6682,
"step": 1224
},
{
"epoch": 0.29765520592880573,
"grad_norm": 0.473117470741272,
"learning_rate": 8e-05,
"loss": 1.7152,
"step": 1225
},
{
"epoch": 0.2978981897703803,
"grad_norm": 0.4486581087112427,
"learning_rate": 8e-05,
"loss": 1.6985,
"step": 1226
},
{
"epoch": 0.2981411736119548,
"grad_norm": 0.4636811912059784,
"learning_rate": 8e-05,
"loss": 1.9135,
"step": 1227
},
{
"epoch": 0.29838415745352936,
"grad_norm": 0.4324636459350586,
"learning_rate": 8e-05,
"loss": 1.5375,
"step": 1228
},
{
"epoch": 0.29862714129510387,
"grad_norm": 0.41699549555778503,
"learning_rate": 8e-05,
"loss": 1.6287,
"step": 1229
},
{
"epoch": 0.29887012513667843,
"grad_norm": 0.42031776905059814,
"learning_rate": 8e-05,
"loss": 1.8032,
"step": 1230
},
{
"epoch": 0.29911310897825294,
"grad_norm": 0.43739062547683716,
"learning_rate": 8e-05,
"loss": 1.8043,
"step": 1231
},
{
"epoch": 0.2993560928198275,
"grad_norm": 0.4243648052215576,
"learning_rate": 8e-05,
"loss": 1.704,
"step": 1232
},
{
"epoch": 0.299599076661402,
"grad_norm": 0.40939441323280334,
"learning_rate": 8e-05,
"loss": 1.7802,
"step": 1233
},
{
"epoch": 0.29984206050297657,
"grad_norm": 0.4723411500453949,
"learning_rate": 8e-05,
"loss": 1.8328,
"step": 1234
},
{
"epoch": 0.3000850443445511,
"grad_norm": 0.4084712564945221,
"learning_rate": 8e-05,
"loss": 1.7189,
"step": 1235
},
{
"epoch": 0.30032802818612564,
"grad_norm": 0.44527482986450195,
"learning_rate": 8e-05,
"loss": 1.7545,
"step": 1236
},
{
"epoch": 0.30057101202770015,
"grad_norm": 0.40758949518203735,
"learning_rate": 8e-05,
"loss": 1.5875,
"step": 1237
},
{
"epoch": 0.3008139958692747,
"grad_norm": 0.4355928599834442,
"learning_rate": 8e-05,
"loss": 1.7816,
"step": 1238
},
{
"epoch": 0.3010569797108492,
"grad_norm": 0.4212299585342407,
"learning_rate": 8e-05,
"loss": 1.7651,
"step": 1239
},
{
"epoch": 0.3012999635524238,
"grad_norm": 0.40415745973587036,
"learning_rate": 8e-05,
"loss": 1.6109,
"step": 1240
},
{
"epoch": 0.3015429473939983,
"grad_norm": 0.43318644165992737,
"learning_rate": 8e-05,
"loss": 1.5923,
"step": 1241
},
{
"epoch": 0.30178593123557285,
"grad_norm": 0.3988751173019409,
"learning_rate": 8e-05,
"loss": 1.6214,
"step": 1242
},
{
"epoch": 0.30202891507714735,
"grad_norm": 0.46518203616142273,
"learning_rate": 8e-05,
"loss": 1.7732,
"step": 1243
},
{
"epoch": 0.3022718989187219,
"grad_norm": 0.4167146384716034,
"learning_rate": 8e-05,
"loss": 1.7137,
"step": 1244
},
{
"epoch": 0.3025148827602964,
"grad_norm": 0.4274185299873352,
"learning_rate": 8e-05,
"loss": 1.5868,
"step": 1245
},
{
"epoch": 0.302757866601871,
"grad_norm": 0.41032370924949646,
"learning_rate": 8e-05,
"loss": 1.7365,
"step": 1246
},
{
"epoch": 0.3030008504434455,
"grad_norm": 0.4476938843727112,
"learning_rate": 8e-05,
"loss": 1.6922,
"step": 1247
},
{
"epoch": 0.30324383428502005,
"grad_norm": 0.3983435034751892,
"learning_rate": 8e-05,
"loss": 1.6584,
"step": 1248
},
{
"epoch": 0.30348681812659456,
"grad_norm": 0.4122038185596466,
"learning_rate": 8e-05,
"loss": 1.7713,
"step": 1249
},
{
"epoch": 0.3037298019681691,
"grad_norm": 0.4572095572948456,
"learning_rate": 8e-05,
"loss": 1.6615,
"step": 1250
},
{
"epoch": 0.30397278580974363,
"grad_norm": 0.4236910343170166,
"learning_rate": 8e-05,
"loss": 1.7273,
"step": 1251
},
{
"epoch": 0.3042157696513182,
"grad_norm": 0.42044517397880554,
"learning_rate": 8e-05,
"loss": 1.665,
"step": 1252
},
{
"epoch": 0.3044587534928927,
"grad_norm": 0.4279285967350006,
"learning_rate": 8e-05,
"loss": 1.7749,
"step": 1253
},
{
"epoch": 0.30470173733446726,
"grad_norm": 0.43583187460899353,
"learning_rate": 8e-05,
"loss": 1.6781,
"step": 1254
},
{
"epoch": 0.30494472117604177,
"grad_norm": 0.4059058427810669,
"learning_rate": 8e-05,
"loss": 1.6672,
"step": 1255
},
{
"epoch": 0.30518770501761633,
"grad_norm": 0.43845224380493164,
"learning_rate": 8e-05,
"loss": 1.7272,
"step": 1256
},
{
"epoch": 0.3054306888591909,
"grad_norm": 0.4243011772632599,
"learning_rate": 8e-05,
"loss": 1.7039,
"step": 1257
},
{
"epoch": 0.3056736727007654,
"grad_norm": 0.43698588013648987,
"learning_rate": 8e-05,
"loss": 1.6247,
"step": 1258
},
{
"epoch": 0.30591665654233996,
"grad_norm": 0.4046631157398224,
"learning_rate": 8e-05,
"loss": 1.7473,
"step": 1259
},
{
"epoch": 0.30615964038391447,
"grad_norm": 0.4501211941242218,
"learning_rate": 8e-05,
"loss": 1.7592,
"step": 1260
},
{
"epoch": 0.30640262422548903,
"grad_norm": 0.40824440121650696,
"learning_rate": 8e-05,
"loss": 1.7088,
"step": 1261
},
{
"epoch": 0.30664560806706354,
"grad_norm": 0.3860519230365753,
"learning_rate": 8e-05,
"loss": 1.5531,
"step": 1262
},
{
"epoch": 0.3068885919086381,
"grad_norm": 0.43458759784698486,
"learning_rate": 8e-05,
"loss": 1.7604,
"step": 1263
},
{
"epoch": 0.3071315757502126,
"grad_norm": 0.4188707768917084,
"learning_rate": 8e-05,
"loss": 1.6595,
"step": 1264
},
{
"epoch": 0.30737455959178717,
"grad_norm": 0.42965972423553467,
"learning_rate": 8e-05,
"loss": 1.6683,
"step": 1265
},
{
"epoch": 0.3076175434333617,
"grad_norm": 0.45659002661705017,
"learning_rate": 8e-05,
"loss": 1.8528,
"step": 1266
},
{
"epoch": 0.30786052727493624,
"grad_norm": 0.4213427007198334,
"learning_rate": 8e-05,
"loss": 1.7526,
"step": 1267
},
{
"epoch": 0.30810351111651074,
"grad_norm": 0.44389426708221436,
"learning_rate": 8e-05,
"loss": 1.7928,
"step": 1268
},
{
"epoch": 0.3083464949580853,
"grad_norm": 0.3961242437362671,
"learning_rate": 8e-05,
"loss": 1.5397,
"step": 1269
},
{
"epoch": 0.3085894787996598,
"grad_norm": 0.389335960149765,
"learning_rate": 8e-05,
"loss": 1.5995,
"step": 1270
},
{
"epoch": 0.3088324626412344,
"grad_norm": 0.42223578691482544,
"learning_rate": 8e-05,
"loss": 1.7528,
"step": 1271
},
{
"epoch": 0.3090754464828089,
"grad_norm": 0.40308067202568054,
"learning_rate": 8e-05,
"loss": 1.5917,
"step": 1272
},
{
"epoch": 0.30931843032438344,
"grad_norm": 0.41871175169944763,
"learning_rate": 8e-05,
"loss": 1.8313,
"step": 1273
},
{
"epoch": 0.30956141416595795,
"grad_norm": 0.404159277677536,
"learning_rate": 8e-05,
"loss": 1.5875,
"step": 1274
},
{
"epoch": 0.3098043980075325,
"grad_norm": 0.4272163212299347,
"learning_rate": 8e-05,
"loss": 1.575,
"step": 1275
},
{
"epoch": 0.310047381849107,
"grad_norm": 0.4222562313079834,
"learning_rate": 8e-05,
"loss": 1.6618,
"step": 1276
},
{
"epoch": 0.3102903656906816,
"grad_norm": 0.42326620221138,
"learning_rate": 8e-05,
"loss": 1.7097,
"step": 1277
},
{
"epoch": 0.3105333495322561,
"grad_norm": 0.4882233142852783,
"learning_rate": 8e-05,
"loss": 1.7169,
"step": 1278
},
{
"epoch": 0.31077633337383065,
"grad_norm": 0.4253446161746979,
"learning_rate": 8e-05,
"loss": 1.7495,
"step": 1279
},
{
"epoch": 0.31101931721540516,
"grad_norm": 0.4078899323940277,
"learning_rate": 8e-05,
"loss": 1.7789,
"step": 1280
},
{
"epoch": 0.3112623010569797,
"grad_norm": 0.4250544011592865,
"learning_rate": 8e-05,
"loss": 1.685,
"step": 1281
},
{
"epoch": 0.3115052848985542,
"grad_norm": 0.42535340785980225,
"learning_rate": 8e-05,
"loss": 1.6879,
"step": 1282
},
{
"epoch": 0.3117482687401288,
"grad_norm": 0.42241019010543823,
"learning_rate": 8e-05,
"loss": 1.8051,
"step": 1283
},
{
"epoch": 0.3119912525817033,
"grad_norm": 0.4194417893886566,
"learning_rate": 8e-05,
"loss": 1.6419,
"step": 1284
},
{
"epoch": 0.31223423642327786,
"grad_norm": 0.42229947447776794,
"learning_rate": 8e-05,
"loss": 1.6275,
"step": 1285
},
{
"epoch": 0.31247722026485236,
"grad_norm": 0.4440034031867981,
"learning_rate": 8e-05,
"loss": 1.7319,
"step": 1286
},
{
"epoch": 0.3127202041064269,
"grad_norm": 0.4309479296207428,
"learning_rate": 8e-05,
"loss": 1.7535,
"step": 1287
},
{
"epoch": 0.31296318794800143,
"grad_norm": 0.4373340606689453,
"learning_rate": 8e-05,
"loss": 1.7831,
"step": 1288
},
{
"epoch": 0.313206171789576,
"grad_norm": 0.40313777327537537,
"learning_rate": 8e-05,
"loss": 1.6644,
"step": 1289
},
{
"epoch": 0.31344915563115056,
"grad_norm": 0.4201229214668274,
"learning_rate": 8e-05,
"loss": 1.7659,
"step": 1290
},
{
"epoch": 0.31369213947272506,
"grad_norm": 0.43222776055336,
"learning_rate": 8e-05,
"loss": 1.6677,
"step": 1291
},
{
"epoch": 0.3139351233142996,
"grad_norm": 0.40938252210617065,
"learning_rate": 8e-05,
"loss": 1.6404,
"step": 1292
},
{
"epoch": 0.31417810715587413,
"grad_norm": 0.43402594327926636,
"learning_rate": 8e-05,
"loss": 1.7937,
"step": 1293
},
{
"epoch": 0.3144210909974487,
"grad_norm": 0.4150988459587097,
"learning_rate": 8e-05,
"loss": 1.6934,
"step": 1294
},
{
"epoch": 0.3146640748390232,
"grad_norm": 0.4323060214519501,
"learning_rate": 8e-05,
"loss": 1.7252,
"step": 1295
},
{
"epoch": 0.31490705868059776,
"grad_norm": 0.4141565263271332,
"learning_rate": 8e-05,
"loss": 1.6576,
"step": 1296
},
{
"epoch": 0.31515004252217227,
"grad_norm": 0.41137444972991943,
"learning_rate": 8e-05,
"loss": 1.7266,
"step": 1297
},
{
"epoch": 0.31539302636374683,
"grad_norm": 0.40606027841567993,
"learning_rate": 8e-05,
"loss": 1.7513,
"step": 1298
},
{
"epoch": 0.31563601020532134,
"grad_norm": 0.41533470153808594,
"learning_rate": 8e-05,
"loss": 1.7189,
"step": 1299
},
{
"epoch": 0.3158789940468959,
"grad_norm": 0.39931249618530273,
"learning_rate": 8e-05,
"loss": 1.4294,
"step": 1300
},
{
"epoch": 0.3161219778884704,
"grad_norm": 0.3959195613861084,
"learning_rate": 8e-05,
"loss": 1.6631,
"step": 1301
},
{
"epoch": 0.31636496173004497,
"grad_norm": 0.42165106534957886,
"learning_rate": 8e-05,
"loss": 1.637,
"step": 1302
},
{
"epoch": 0.3166079455716195,
"grad_norm": 0.40195026993751526,
"learning_rate": 8e-05,
"loss": 1.5368,
"step": 1303
},
{
"epoch": 0.31685092941319404,
"grad_norm": 0.41727834939956665,
"learning_rate": 8e-05,
"loss": 1.5983,
"step": 1304
},
{
"epoch": 0.31709391325476854,
"grad_norm": 0.42682093381881714,
"learning_rate": 8e-05,
"loss": 1.8175,
"step": 1305
},
{
"epoch": 0.3173368970963431,
"grad_norm": 0.4234749376773834,
"learning_rate": 8e-05,
"loss": 1.6421,
"step": 1306
},
{
"epoch": 0.3175798809379176,
"grad_norm": 0.43160662055015564,
"learning_rate": 8e-05,
"loss": 1.7788,
"step": 1307
},
{
"epoch": 0.3178228647794922,
"grad_norm": 0.39189645648002625,
"learning_rate": 8e-05,
"loss": 1.607,
"step": 1308
},
{
"epoch": 0.3180658486210667,
"grad_norm": 0.41887709498405457,
"learning_rate": 8e-05,
"loss": 1.7226,
"step": 1309
},
{
"epoch": 0.31830883246264124,
"grad_norm": 0.42342591285705566,
"learning_rate": 8e-05,
"loss": 1.7036,
"step": 1310
},
{
"epoch": 0.31855181630421575,
"grad_norm": 0.4288804233074188,
"learning_rate": 8e-05,
"loss": 1.7602,
"step": 1311
},
{
"epoch": 0.3187948001457903,
"grad_norm": 0.4197126030921936,
"learning_rate": 8e-05,
"loss": 1.6785,
"step": 1312
},
{
"epoch": 0.3190377839873648,
"grad_norm": 0.43244338035583496,
"learning_rate": 8e-05,
"loss": 1.7697,
"step": 1313
},
{
"epoch": 0.3192807678289394,
"grad_norm": 0.42230987548828125,
"learning_rate": 8e-05,
"loss": 1.676,
"step": 1314
},
{
"epoch": 0.3195237516705139,
"grad_norm": 0.4212227463722229,
"learning_rate": 8e-05,
"loss": 1.7405,
"step": 1315
},
{
"epoch": 0.31976673551208845,
"grad_norm": 0.4391201436519623,
"learning_rate": 8e-05,
"loss": 1.7155,
"step": 1316
},
{
"epoch": 0.32000971935366296,
"grad_norm": 0.42658093571662903,
"learning_rate": 8e-05,
"loss": 1.7903,
"step": 1317
},
{
"epoch": 0.3202527031952375,
"grad_norm": 0.44827648997306824,
"learning_rate": 8e-05,
"loss": 1.8623,
"step": 1318
},
{
"epoch": 0.320495687036812,
"grad_norm": 0.5477005243301392,
"learning_rate": 8e-05,
"loss": 1.7167,
"step": 1319
},
{
"epoch": 0.3207386708783866,
"grad_norm": 0.4057164490222931,
"learning_rate": 8e-05,
"loss": 1.6578,
"step": 1320
},
{
"epoch": 0.3209816547199611,
"grad_norm": 0.438320517539978,
"learning_rate": 8e-05,
"loss": 1.7614,
"step": 1321
},
{
"epoch": 0.32122463856153566,
"grad_norm": 0.40908926725387573,
"learning_rate": 8e-05,
"loss": 1.6288,
"step": 1322
},
{
"epoch": 0.3214676224031102,
"grad_norm": 0.43416181206703186,
"learning_rate": 8e-05,
"loss": 1.7809,
"step": 1323
},
{
"epoch": 0.3217106062446847,
"grad_norm": 0.40206679701805115,
"learning_rate": 8e-05,
"loss": 1.5453,
"step": 1324
},
{
"epoch": 0.3219535900862593,
"grad_norm": 0.39225509762763977,
"learning_rate": 8e-05,
"loss": 1.6881,
"step": 1325
},
{
"epoch": 0.3221965739278338,
"grad_norm": 0.41371697187423706,
"learning_rate": 8e-05,
"loss": 1.7893,
"step": 1326
},
{
"epoch": 0.32243955776940836,
"grad_norm": 0.42055219411849976,
"learning_rate": 8e-05,
"loss": 1.8141,
"step": 1327
},
{
"epoch": 0.32268254161098286,
"grad_norm": 0.41531890630722046,
"learning_rate": 8e-05,
"loss": 1.7458,
"step": 1328
},
{
"epoch": 0.3229255254525574,
"grad_norm": 0.39801040291786194,
"learning_rate": 8e-05,
"loss": 1.595,
"step": 1329
},
{
"epoch": 0.32316850929413193,
"grad_norm": 0.4303327798843384,
"learning_rate": 8e-05,
"loss": 1.7254,
"step": 1330
},
{
"epoch": 0.3234114931357065,
"grad_norm": 0.40039440989494324,
"learning_rate": 8e-05,
"loss": 1.6678,
"step": 1331
},
{
"epoch": 0.323654476977281,
"grad_norm": 0.42339256405830383,
"learning_rate": 8e-05,
"loss": 1.7048,
"step": 1332
},
{
"epoch": 0.32389746081885556,
"grad_norm": 0.4127884805202484,
"learning_rate": 8e-05,
"loss": 1.7663,
"step": 1333
},
{
"epoch": 0.32414044466043007,
"grad_norm": 0.3969059884548187,
"learning_rate": 8e-05,
"loss": 1.6196,
"step": 1334
},
{
"epoch": 0.32438342850200463,
"grad_norm": 0.4179375469684601,
"learning_rate": 8e-05,
"loss": 1.6819,
"step": 1335
},
{
"epoch": 0.32462641234357914,
"grad_norm": 0.42289412021636963,
"learning_rate": 8e-05,
"loss": 1.7619,
"step": 1336
},
{
"epoch": 0.3248693961851537,
"grad_norm": 0.4024028182029724,
"learning_rate": 8e-05,
"loss": 1.6723,
"step": 1337
},
{
"epoch": 0.3251123800267282,
"grad_norm": 0.42601507902145386,
"learning_rate": 8e-05,
"loss": 1.7478,
"step": 1338
},
{
"epoch": 0.32535536386830277,
"grad_norm": 0.4272320866584778,
"learning_rate": 8e-05,
"loss": 1.7722,
"step": 1339
},
{
"epoch": 0.3255983477098773,
"grad_norm": 0.41830718517303467,
"learning_rate": 8e-05,
"loss": 1.7319,
"step": 1340
},
{
"epoch": 0.32584133155145184,
"grad_norm": 0.41066452860832214,
"learning_rate": 8e-05,
"loss": 1.7447,
"step": 1341
},
{
"epoch": 0.32608431539302635,
"grad_norm": 0.4213522970676422,
"learning_rate": 8e-05,
"loss": 1.7642,
"step": 1342
},
{
"epoch": 0.3263272992346009,
"grad_norm": 0.4362061321735382,
"learning_rate": 8e-05,
"loss": 1.9346,
"step": 1343
},
{
"epoch": 0.3265702830761754,
"grad_norm": 0.40942779183387756,
"learning_rate": 8e-05,
"loss": 1.7208,
"step": 1344
},
{
"epoch": 0.32681326691775,
"grad_norm": 0.41534700989723206,
"learning_rate": 8e-05,
"loss": 1.5951,
"step": 1345
},
{
"epoch": 0.3270562507593245,
"grad_norm": 0.43242648243904114,
"learning_rate": 8e-05,
"loss": 1.7103,
"step": 1346
},
{
"epoch": 0.32729923460089905,
"grad_norm": 0.45282021164894104,
"learning_rate": 8e-05,
"loss": 1.7501,
"step": 1347
},
{
"epoch": 0.32754221844247355,
"grad_norm": 0.41814547777175903,
"learning_rate": 8e-05,
"loss": 1.7,
"step": 1348
},
{
"epoch": 0.3277852022840481,
"grad_norm": 0.433636873960495,
"learning_rate": 8e-05,
"loss": 1.8099,
"step": 1349
},
{
"epoch": 0.3280281861256226,
"grad_norm": 0.42978718876838684,
"learning_rate": 8e-05,
"loss": 1.6812,
"step": 1350
},
{
"epoch": 0.3282711699671972,
"grad_norm": 0.4175388813018799,
"learning_rate": 8e-05,
"loss": 1.6847,
"step": 1351
},
{
"epoch": 0.3285141538087717,
"grad_norm": 0.40037235617637634,
"learning_rate": 8e-05,
"loss": 1.7295,
"step": 1352
},
{
"epoch": 0.32875713765034625,
"grad_norm": 0.4433320462703705,
"learning_rate": 8e-05,
"loss": 1.8396,
"step": 1353
},
{
"epoch": 0.32900012149192076,
"grad_norm": 0.4098261594772339,
"learning_rate": 8e-05,
"loss": 1.6605,
"step": 1354
},
{
"epoch": 0.3292431053334953,
"grad_norm": 0.4211520552635193,
"learning_rate": 8e-05,
"loss": 1.6127,
"step": 1355
},
{
"epoch": 0.3294860891750699,
"grad_norm": 0.43561968207359314,
"learning_rate": 8e-05,
"loss": 1.784,
"step": 1356
},
{
"epoch": 0.3297290730166444,
"grad_norm": 0.40554532408714294,
"learning_rate": 8e-05,
"loss": 1.4564,
"step": 1357
},
{
"epoch": 0.32997205685821895,
"grad_norm": 0.434696763753891,
"learning_rate": 8e-05,
"loss": 1.6686,
"step": 1358
},
{
"epoch": 0.33021504069979346,
"grad_norm": 0.4481261670589447,
"learning_rate": 8e-05,
"loss": 1.7727,
"step": 1359
},
{
"epoch": 0.330458024541368,
"grad_norm": 0.4229499399662018,
"learning_rate": 8e-05,
"loss": 1.7349,
"step": 1360
},
{
"epoch": 0.33070100838294253,
"grad_norm": 0.4054132103919983,
"learning_rate": 8e-05,
"loss": 1.4939,
"step": 1361
},
{
"epoch": 0.3309439922245171,
"grad_norm": 0.4409117102622986,
"learning_rate": 8e-05,
"loss": 1.9241,
"step": 1362
},
{
"epoch": 0.3311869760660916,
"grad_norm": 0.4396663010120392,
"learning_rate": 8e-05,
"loss": 1.732,
"step": 1363
},
{
"epoch": 0.33142995990766616,
"grad_norm": 0.42291024327278137,
"learning_rate": 8e-05,
"loss": 1.655,
"step": 1364
},
{
"epoch": 0.33167294374924067,
"grad_norm": 0.416900098323822,
"learning_rate": 8e-05,
"loss": 1.5643,
"step": 1365
},
{
"epoch": 0.33191592759081523,
"grad_norm": 0.41162773966789246,
"learning_rate": 8e-05,
"loss": 1.5947,
"step": 1366
},
{
"epoch": 0.33215891143238974,
"grad_norm": 0.4159378409385681,
"learning_rate": 8e-05,
"loss": 1.5593,
"step": 1367
},
{
"epoch": 0.3324018952739643,
"grad_norm": 0.4554840326309204,
"learning_rate": 8e-05,
"loss": 1.886,
"step": 1368
},
{
"epoch": 0.3326448791155388,
"grad_norm": 0.4456634521484375,
"learning_rate": 8e-05,
"loss": 1.7944,
"step": 1369
},
{
"epoch": 0.33288786295711337,
"grad_norm": 0.4127257466316223,
"learning_rate": 8e-05,
"loss": 1.6923,
"step": 1370
},
{
"epoch": 0.3331308467986879,
"grad_norm": 0.4211070239543915,
"learning_rate": 8e-05,
"loss": 1.5673,
"step": 1371
},
{
"epoch": 0.33337383064026244,
"grad_norm": 0.4010700285434723,
"learning_rate": 8e-05,
"loss": 1.6978,
"step": 1372
},
{
"epoch": 0.33361681448183694,
"grad_norm": 0.3907526135444641,
"learning_rate": 8e-05,
"loss": 1.5714,
"step": 1373
},
{
"epoch": 0.3338597983234115,
"grad_norm": 0.40844234824180603,
"learning_rate": 8e-05,
"loss": 1.7085,
"step": 1374
},
{
"epoch": 0.334102782164986,
"grad_norm": 0.4103162884712219,
"learning_rate": 8e-05,
"loss": 1.6451,
"step": 1375
},
{
"epoch": 0.3343457660065606,
"grad_norm": 0.4243805408477783,
"learning_rate": 8e-05,
"loss": 1.7556,
"step": 1376
},
{
"epoch": 0.3345887498481351,
"grad_norm": 0.4297308921813965,
"learning_rate": 8e-05,
"loss": 1.8386,
"step": 1377
},
{
"epoch": 0.33483173368970964,
"grad_norm": 0.4148638844490051,
"learning_rate": 8e-05,
"loss": 1.7938,
"step": 1378
},
{
"epoch": 0.33507471753128415,
"grad_norm": 0.4163691997528076,
"learning_rate": 8e-05,
"loss": 1.6178,
"step": 1379
},
{
"epoch": 0.3353177013728587,
"grad_norm": 0.41445234417915344,
"learning_rate": 8e-05,
"loss": 1.7608,
"step": 1380
},
{
"epoch": 0.3355606852144332,
"grad_norm": 0.4312629997730255,
"learning_rate": 8e-05,
"loss": 1.7591,
"step": 1381
},
{
"epoch": 0.3358036690560078,
"grad_norm": 0.41140955686569214,
"learning_rate": 8e-05,
"loss": 1.7743,
"step": 1382
},
{
"epoch": 0.3360466528975823,
"grad_norm": 0.4554731249809265,
"learning_rate": 8e-05,
"loss": 1.7677,
"step": 1383
},
{
"epoch": 0.33628963673915685,
"grad_norm": 0.44381895661354065,
"learning_rate": 8e-05,
"loss": 1.5892,
"step": 1384
},
{
"epoch": 0.33653262058073136,
"grad_norm": 0.4227243661880493,
"learning_rate": 8e-05,
"loss": 1.7484,
"step": 1385
},
{
"epoch": 0.3367756044223059,
"grad_norm": 0.4265260100364685,
"learning_rate": 8e-05,
"loss": 1.7558,
"step": 1386
},
{
"epoch": 0.3370185882638804,
"grad_norm": 0.4079815149307251,
"learning_rate": 8e-05,
"loss": 1.6108,
"step": 1387
},
{
"epoch": 0.337261572105455,
"grad_norm": 0.4142341911792755,
"learning_rate": 8e-05,
"loss": 1.766,
"step": 1388
},
{
"epoch": 0.33750455594702955,
"grad_norm": 0.424040824174881,
"learning_rate": 8e-05,
"loss": 1.8513,
"step": 1389
},
{
"epoch": 0.33774753978860406,
"grad_norm": 0.41974717378616333,
"learning_rate": 8e-05,
"loss": 1.6393,
"step": 1390
},
{
"epoch": 0.3379905236301786,
"grad_norm": 0.42985066771507263,
"learning_rate": 8e-05,
"loss": 1.8263,
"step": 1391
},
{
"epoch": 0.3382335074717531,
"grad_norm": 0.4142277240753174,
"learning_rate": 8e-05,
"loss": 1.7291,
"step": 1392
},
{
"epoch": 0.3384764913133277,
"grad_norm": 0.398746132850647,
"learning_rate": 8e-05,
"loss": 1.633,
"step": 1393
},
{
"epoch": 0.3387194751549022,
"grad_norm": 0.40721410512924194,
"learning_rate": 8e-05,
"loss": 1.642,
"step": 1394
},
{
"epoch": 0.33896245899647676,
"grad_norm": 0.421593576669693,
"learning_rate": 8e-05,
"loss": 1.7464,
"step": 1395
},
{
"epoch": 0.33920544283805126,
"grad_norm": 0.4076847732067108,
"learning_rate": 8e-05,
"loss": 1.7031,
"step": 1396
},
{
"epoch": 0.3394484266796258,
"grad_norm": 0.44437170028686523,
"learning_rate": 8e-05,
"loss": 1.7229,
"step": 1397
},
{
"epoch": 0.33969141052120033,
"grad_norm": 0.4115573465824127,
"learning_rate": 8e-05,
"loss": 1.65,
"step": 1398
},
{
"epoch": 0.3399343943627749,
"grad_norm": 0.42203497886657715,
"learning_rate": 8e-05,
"loss": 1.7032,
"step": 1399
},
{
"epoch": 0.3401773782043494,
"grad_norm": 0.42462655901908875,
"learning_rate": 8e-05,
"loss": 1.6971,
"step": 1400
},
{
"epoch": 0.34042036204592396,
"grad_norm": 0.42710965871810913,
"learning_rate": 8e-05,
"loss": 1.7192,
"step": 1401
},
{
"epoch": 0.34066334588749847,
"grad_norm": 0.41822290420532227,
"learning_rate": 8e-05,
"loss": 1.707,
"step": 1402
},
{
"epoch": 0.34090632972907303,
"grad_norm": 0.40744471549987793,
"learning_rate": 8e-05,
"loss": 1.6524,
"step": 1403
},
{
"epoch": 0.34114931357064754,
"grad_norm": 0.4046993553638458,
"learning_rate": 8e-05,
"loss": 1.6915,
"step": 1404
},
{
"epoch": 0.3413922974122221,
"grad_norm": 0.4165453612804413,
"learning_rate": 8e-05,
"loss": 1.6841,
"step": 1405
},
{
"epoch": 0.3416352812537966,
"grad_norm": 0.4177655279636383,
"learning_rate": 8e-05,
"loss": 1.704,
"step": 1406
},
{
"epoch": 0.34187826509537117,
"grad_norm": 0.43630239367485046,
"learning_rate": 8e-05,
"loss": 1.7462,
"step": 1407
},
{
"epoch": 0.3421212489369457,
"grad_norm": 0.43209436535835266,
"learning_rate": 8e-05,
"loss": 1.6524,
"step": 1408
},
{
"epoch": 0.34236423277852024,
"grad_norm": 0.4308101534843445,
"learning_rate": 8e-05,
"loss": 1.7389,
"step": 1409
},
{
"epoch": 0.34260721662009475,
"grad_norm": 0.40700584650039673,
"learning_rate": 8e-05,
"loss": 1.6262,
"step": 1410
},
{
"epoch": 0.3428502004616693,
"grad_norm": 0.4164145290851593,
"learning_rate": 8e-05,
"loss": 1.7506,
"step": 1411
},
{
"epoch": 0.3430931843032438,
"grad_norm": 0.40757638216018677,
"learning_rate": 8e-05,
"loss": 1.7417,
"step": 1412
},
{
"epoch": 0.3433361681448184,
"grad_norm": 0.4093579053878784,
"learning_rate": 8e-05,
"loss": 1.6372,
"step": 1413
},
{
"epoch": 0.3435791519863929,
"grad_norm": 0.41316044330596924,
"learning_rate": 8e-05,
"loss": 1.7199,
"step": 1414
},
{
"epoch": 0.34382213582796745,
"grad_norm": 0.41529348492622375,
"learning_rate": 8e-05,
"loss": 1.5377,
"step": 1415
},
{
"epoch": 0.34406511966954195,
"grad_norm": 0.410444051027298,
"learning_rate": 8e-05,
"loss": 1.6284,
"step": 1416
},
{
"epoch": 0.3443081035111165,
"grad_norm": 0.4090292453765869,
"learning_rate": 8e-05,
"loss": 1.8186,
"step": 1417
},
{
"epoch": 0.344551087352691,
"grad_norm": 0.44185879826545715,
"learning_rate": 8e-05,
"loss": 1.8422,
"step": 1418
},
{
"epoch": 0.3447940711942656,
"grad_norm": 0.4232048988342285,
"learning_rate": 8e-05,
"loss": 1.7903,
"step": 1419
},
{
"epoch": 0.3450370550358401,
"grad_norm": 0.4253289997577667,
"learning_rate": 8e-05,
"loss": 1.789,
"step": 1420
},
{
"epoch": 0.34528003887741465,
"grad_norm": 0.3991374373435974,
"learning_rate": 8e-05,
"loss": 1.5593,
"step": 1421
},
{
"epoch": 0.3455230227189892,
"grad_norm": 0.41997548937797546,
"learning_rate": 8e-05,
"loss": 1.7432,
"step": 1422
},
{
"epoch": 0.3457660065605637,
"grad_norm": 0.4185141921043396,
"learning_rate": 8e-05,
"loss": 1.6209,
"step": 1423
},
{
"epoch": 0.3460089904021383,
"grad_norm": 0.41045713424682617,
"learning_rate": 8e-05,
"loss": 1.7732,
"step": 1424
},
{
"epoch": 0.3462519742437128,
"grad_norm": 0.4068754315376282,
"learning_rate": 8e-05,
"loss": 1.6597,
"step": 1425
},
{
"epoch": 0.34649495808528735,
"grad_norm": 0.40424397587776184,
"learning_rate": 8e-05,
"loss": 1.6652,
"step": 1426
},
{
"epoch": 0.34673794192686186,
"grad_norm": 0.39907094836235046,
"learning_rate": 8e-05,
"loss": 1.645,
"step": 1427
},
{
"epoch": 0.3469809257684364,
"grad_norm": 0.42224764823913574,
"learning_rate": 8e-05,
"loss": 1.8078,
"step": 1428
},
{
"epoch": 0.34722390961001093,
"grad_norm": 0.42266082763671875,
"learning_rate": 8e-05,
"loss": 1.7104,
"step": 1429
},
{
"epoch": 0.3474668934515855,
"grad_norm": 0.4065650701522827,
"learning_rate": 8e-05,
"loss": 1.5969,
"step": 1430
},
{
"epoch": 0.34770987729316,
"grad_norm": 0.42584428191185,
"learning_rate": 8e-05,
"loss": 1.6969,
"step": 1431
},
{
"epoch": 0.34795286113473456,
"grad_norm": 0.43668845295906067,
"learning_rate": 8e-05,
"loss": 1.8497,
"step": 1432
},
{
"epoch": 0.34819584497630907,
"grad_norm": 0.422085702419281,
"learning_rate": 8e-05,
"loss": 1.5448,
"step": 1433
},
{
"epoch": 0.34843882881788363,
"grad_norm": 0.438243567943573,
"learning_rate": 8e-05,
"loss": 1.9311,
"step": 1434
},
{
"epoch": 0.34868181265945813,
"grad_norm": 0.412178099155426,
"learning_rate": 8e-05,
"loss": 1.7131,
"step": 1435
},
{
"epoch": 0.3489247965010327,
"grad_norm": 0.4246040880680084,
"learning_rate": 8e-05,
"loss": 1.7761,
"step": 1436
},
{
"epoch": 0.3491677803426072,
"grad_norm": 0.44907382130622864,
"learning_rate": 8e-05,
"loss": 1.8752,
"step": 1437
},
{
"epoch": 0.34941076418418177,
"grad_norm": 0.4161170423030853,
"learning_rate": 8e-05,
"loss": 1.8043,
"step": 1438
},
{
"epoch": 0.3496537480257563,
"grad_norm": 0.4451916813850403,
"learning_rate": 8e-05,
"loss": 1.7319,
"step": 1439
},
{
"epoch": 0.34989673186733083,
"grad_norm": 0.4271717369556427,
"learning_rate": 8e-05,
"loss": 1.7535,
"step": 1440
},
{
"epoch": 0.35013971570890534,
"grad_norm": 0.43731844425201416,
"learning_rate": 8e-05,
"loss": 1.8988,
"step": 1441
},
{
"epoch": 0.3503826995504799,
"grad_norm": 0.41044342517852783,
"learning_rate": 8e-05,
"loss": 1.6483,
"step": 1442
},
{
"epoch": 0.3506256833920544,
"grad_norm": 0.4042555093765259,
"learning_rate": 8e-05,
"loss": 1.757,
"step": 1443
},
{
"epoch": 0.350868667233629,
"grad_norm": 0.44009044766426086,
"learning_rate": 8e-05,
"loss": 1.6416,
"step": 1444
},
{
"epoch": 0.3511116510752035,
"grad_norm": 0.3960078954696655,
"learning_rate": 8e-05,
"loss": 1.5363,
"step": 1445
},
{
"epoch": 0.35135463491677804,
"grad_norm": 0.4207194745540619,
"learning_rate": 8e-05,
"loss": 1.7292,
"step": 1446
},
{
"epoch": 0.35159761875835255,
"grad_norm": 0.41871359944343567,
"learning_rate": 8e-05,
"loss": 1.6439,
"step": 1447
},
{
"epoch": 0.3518406025999271,
"grad_norm": 0.42891284823417664,
"learning_rate": 8e-05,
"loss": 1.671,
"step": 1448
},
{
"epoch": 0.3520835864415016,
"grad_norm": 0.4187850058078766,
"learning_rate": 8e-05,
"loss": 1.6498,
"step": 1449
},
{
"epoch": 0.3523265702830762,
"grad_norm": 0.4176000952720642,
"learning_rate": 8e-05,
"loss": 1.7174,
"step": 1450
},
{
"epoch": 0.3525695541246507,
"grad_norm": 0.4076887369155884,
"learning_rate": 8e-05,
"loss": 1.6618,
"step": 1451
},
{
"epoch": 0.35281253796622525,
"grad_norm": 0.4008098244667053,
"learning_rate": 8e-05,
"loss": 1.638,
"step": 1452
},
{
"epoch": 0.35305552180779975,
"grad_norm": 0.42970818281173706,
"learning_rate": 8e-05,
"loss": 1.7839,
"step": 1453
},
{
"epoch": 0.3532985056493743,
"grad_norm": 0.43289485573768616,
"learning_rate": 8e-05,
"loss": 1.6696,
"step": 1454
},
{
"epoch": 0.3535414894909489,
"grad_norm": 0.43016672134399414,
"learning_rate": 8e-05,
"loss": 1.6346,
"step": 1455
},
{
"epoch": 0.3537844733325234,
"grad_norm": 0.41671884059906006,
"learning_rate": 8e-05,
"loss": 1.7256,
"step": 1456
},
{
"epoch": 0.35402745717409795,
"grad_norm": 0.4214855432510376,
"learning_rate": 8e-05,
"loss": 1.7032,
"step": 1457
},
{
"epoch": 0.35427044101567245,
"grad_norm": 0.4305601716041565,
"learning_rate": 8e-05,
"loss": 1.804,
"step": 1458
},
{
"epoch": 0.354513424857247,
"grad_norm": 0.42480790615081787,
"learning_rate": 8e-05,
"loss": 1.7741,
"step": 1459
},
{
"epoch": 0.3547564086988215,
"grad_norm": 0.41388338804244995,
"learning_rate": 8e-05,
"loss": 1.714,
"step": 1460
},
{
"epoch": 0.3549993925403961,
"grad_norm": 0.418966680765152,
"learning_rate": 8e-05,
"loss": 1.7532,
"step": 1461
},
{
"epoch": 0.3552423763819706,
"grad_norm": 0.4355233907699585,
"learning_rate": 8e-05,
"loss": 1.6545,
"step": 1462
},
{
"epoch": 0.35548536022354515,
"grad_norm": 0.45201870799064636,
"learning_rate": 8e-05,
"loss": 1.7212,
"step": 1463
},
{
"epoch": 0.35572834406511966,
"grad_norm": 0.43834418058395386,
"learning_rate": 8e-05,
"loss": 1.8601,
"step": 1464
},
{
"epoch": 0.3559713279066942,
"grad_norm": 0.4327118694782257,
"learning_rate": 8e-05,
"loss": 1.6826,
"step": 1465
},
{
"epoch": 0.35621431174826873,
"grad_norm": 0.4124552011489868,
"learning_rate": 8e-05,
"loss": 1.6582,
"step": 1466
},
{
"epoch": 0.3564572955898433,
"grad_norm": 0.4168725907802582,
"learning_rate": 8e-05,
"loss": 1.6836,
"step": 1467
},
{
"epoch": 0.3567002794314178,
"grad_norm": 0.4121659994125366,
"learning_rate": 8e-05,
"loss": 1.672,
"step": 1468
},
{
"epoch": 0.35694326327299236,
"grad_norm": 0.40972062945365906,
"learning_rate": 8e-05,
"loss": 1.7033,
"step": 1469
},
{
"epoch": 0.35718624711456687,
"grad_norm": 0.43188977241516113,
"learning_rate": 8e-05,
"loss": 1.7234,
"step": 1470
},
{
"epoch": 0.35742923095614143,
"grad_norm": 0.4164832532405853,
"learning_rate": 8e-05,
"loss": 1.5865,
"step": 1471
},
{
"epoch": 0.35767221479771594,
"grad_norm": 0.41471636295318604,
"learning_rate": 8e-05,
"loss": 1.7707,
"step": 1472
},
{
"epoch": 0.3579151986392905,
"grad_norm": 0.42676058411598206,
"learning_rate": 8e-05,
"loss": 1.7083,
"step": 1473
},
{
"epoch": 0.358158182480865,
"grad_norm": 0.4020005464553833,
"learning_rate": 8e-05,
"loss": 1.4822,
"step": 1474
},
{
"epoch": 0.35840116632243957,
"grad_norm": 0.43297043442726135,
"learning_rate": 8e-05,
"loss": 1.6205,
"step": 1475
},
{
"epoch": 0.3586441501640141,
"grad_norm": 0.4124680459499359,
"learning_rate": 8e-05,
"loss": 1.6264,
"step": 1476
},
{
"epoch": 0.35888713400558864,
"grad_norm": 0.42065784335136414,
"learning_rate": 8e-05,
"loss": 1.6895,
"step": 1477
},
{
"epoch": 0.35913011784716314,
"grad_norm": 0.439014196395874,
"learning_rate": 8e-05,
"loss": 1.728,
"step": 1478
},
{
"epoch": 0.3593731016887377,
"grad_norm": 0.41923463344573975,
"learning_rate": 8e-05,
"loss": 1.6757,
"step": 1479
},
{
"epoch": 0.3596160855303122,
"grad_norm": 0.40060892701148987,
"learning_rate": 8e-05,
"loss": 1.6887,
"step": 1480
},
{
"epoch": 0.3598590693718868,
"grad_norm": 0.41042670607566833,
"learning_rate": 8e-05,
"loss": 1.7557,
"step": 1481
},
{
"epoch": 0.3601020532134613,
"grad_norm": 0.45834240317344666,
"learning_rate": 8e-05,
"loss": 1.8234,
"step": 1482
},
{
"epoch": 0.36034503705503584,
"grad_norm": 0.44385936856269836,
"learning_rate": 8e-05,
"loss": 1.7123,
"step": 1483
},
{
"epoch": 0.36058802089661035,
"grad_norm": 0.4490148425102234,
"learning_rate": 8e-05,
"loss": 1.7722,
"step": 1484
},
{
"epoch": 0.3608310047381849,
"grad_norm": 0.44380930066108704,
"learning_rate": 8e-05,
"loss": 1.6149,
"step": 1485
},
{
"epoch": 0.3610739885797594,
"grad_norm": 0.45426803827285767,
"learning_rate": 8e-05,
"loss": 1.7943,
"step": 1486
},
{
"epoch": 0.361316972421334,
"grad_norm": 0.42598724365234375,
"learning_rate": 8e-05,
"loss": 1.8526,
"step": 1487
},
{
"epoch": 0.36155995626290854,
"grad_norm": 0.41523346304893494,
"learning_rate": 8e-05,
"loss": 1.671,
"step": 1488
},
{
"epoch": 0.36180294010448305,
"grad_norm": 0.40627723932266235,
"learning_rate": 8e-05,
"loss": 1.5897,
"step": 1489
},
{
"epoch": 0.3620459239460576,
"grad_norm": 0.44283729791641235,
"learning_rate": 8e-05,
"loss": 1.7863,
"step": 1490
},
{
"epoch": 0.3622889077876321,
"grad_norm": 0.42527687549591064,
"learning_rate": 8e-05,
"loss": 1.7138,
"step": 1491
},
{
"epoch": 0.3625318916292067,
"grad_norm": 0.4094756543636322,
"learning_rate": 8e-05,
"loss": 1.7009,
"step": 1492
},
{
"epoch": 0.3627748754707812,
"grad_norm": 0.44145467877388,
"learning_rate": 8e-05,
"loss": 1.7635,
"step": 1493
},
{
"epoch": 0.36301785931235575,
"grad_norm": 0.41469717025756836,
"learning_rate": 8e-05,
"loss": 1.6229,
"step": 1494
},
{
"epoch": 0.36326084315393026,
"grad_norm": 0.4286789298057556,
"learning_rate": 8e-05,
"loss": 1.6875,
"step": 1495
},
{
"epoch": 0.3635038269955048,
"grad_norm": 0.39633095264434814,
"learning_rate": 8e-05,
"loss": 1.6631,
"step": 1496
},
{
"epoch": 0.3637468108370793,
"grad_norm": 0.44227567315101624,
"learning_rate": 8e-05,
"loss": 1.7327,
"step": 1497
},
{
"epoch": 0.3639897946786539,
"grad_norm": 0.42462319135665894,
"learning_rate": 8e-05,
"loss": 1.6027,
"step": 1498
},
{
"epoch": 0.3642327785202284,
"grad_norm": 0.5036365985870361,
"learning_rate": 8e-05,
"loss": 1.7893,
"step": 1499
},
{
"epoch": 0.36447576236180296,
"grad_norm": 0.40516528487205505,
"learning_rate": 8e-05,
"loss": 1.6793,
"step": 1500
},
{
"epoch": 0.36471874620337746,
"grad_norm": 0.4498935341835022,
"learning_rate": 8e-05,
"loss": 1.8523,
"step": 1501
},
{
"epoch": 0.364961730044952,
"grad_norm": 0.44853898882865906,
"learning_rate": 8e-05,
"loss": 1.6119,
"step": 1502
},
{
"epoch": 0.36520471388652653,
"grad_norm": 0.43535977602005005,
"learning_rate": 8e-05,
"loss": 1.7583,
"step": 1503
},
{
"epoch": 0.3654476977281011,
"grad_norm": 0.4103722870349884,
"learning_rate": 8e-05,
"loss": 1.6411,
"step": 1504
},
{
"epoch": 0.3656906815696756,
"grad_norm": 0.4043563902378082,
"learning_rate": 8e-05,
"loss": 1.6256,
"step": 1505
},
{
"epoch": 0.36593366541125016,
"grad_norm": 0.4265104830265045,
"learning_rate": 8e-05,
"loss": 1.5655,
"step": 1506
},
{
"epoch": 0.36617664925282467,
"grad_norm": 0.45437732338905334,
"learning_rate": 8e-05,
"loss": 1.7729,
"step": 1507
},
{
"epoch": 0.36641963309439923,
"grad_norm": 0.4220513105392456,
"learning_rate": 8e-05,
"loss": 1.6852,
"step": 1508
},
{
"epoch": 0.36666261693597374,
"grad_norm": 0.4355618953704834,
"learning_rate": 8e-05,
"loss": 1.6458,
"step": 1509
},
{
"epoch": 0.3669056007775483,
"grad_norm": 0.3921411335468292,
"learning_rate": 8e-05,
"loss": 1.6566,
"step": 1510
},
{
"epoch": 0.3671485846191228,
"grad_norm": 0.4296509921550751,
"learning_rate": 8e-05,
"loss": 1.8064,
"step": 1511
},
{
"epoch": 0.36739156846069737,
"grad_norm": 0.42648905515670776,
"learning_rate": 8e-05,
"loss": 1.7684,
"step": 1512
},
{
"epoch": 0.3676345523022719,
"grad_norm": 0.41103217005729675,
"learning_rate": 8e-05,
"loss": 1.5205,
"step": 1513
},
{
"epoch": 0.36787753614384644,
"grad_norm": 0.4502837657928467,
"learning_rate": 8e-05,
"loss": 1.8414,
"step": 1514
},
{
"epoch": 0.36812051998542095,
"grad_norm": 0.4428488612174988,
"learning_rate": 8e-05,
"loss": 1.676,
"step": 1515
},
{
"epoch": 0.3683635038269955,
"grad_norm": 0.434501588344574,
"learning_rate": 8e-05,
"loss": 1.7028,
"step": 1516
},
{
"epoch": 0.36860648766857,
"grad_norm": 0.429174542427063,
"learning_rate": 8e-05,
"loss": 1.7448,
"step": 1517
},
{
"epoch": 0.3688494715101446,
"grad_norm": 0.4316585063934326,
"learning_rate": 8e-05,
"loss": 1.6992,
"step": 1518
},
{
"epoch": 0.3690924553517191,
"grad_norm": 0.43607017397880554,
"learning_rate": 8e-05,
"loss": 1.681,
"step": 1519
},
{
"epoch": 0.36933543919329365,
"grad_norm": 0.3982023596763611,
"learning_rate": 8e-05,
"loss": 1.558,
"step": 1520
},
{
"epoch": 0.3695784230348682,
"grad_norm": 0.4116176962852478,
"learning_rate": 8e-05,
"loss": 1.6175,
"step": 1521
},
{
"epoch": 0.3698214068764427,
"grad_norm": 0.45402878522872925,
"learning_rate": 8e-05,
"loss": 1.7713,
"step": 1522
},
{
"epoch": 0.3700643907180173,
"grad_norm": 0.42919039726257324,
"learning_rate": 8e-05,
"loss": 1.7244,
"step": 1523
},
{
"epoch": 0.3703073745595918,
"grad_norm": 0.42670485377311707,
"learning_rate": 8e-05,
"loss": 1.7326,
"step": 1524
},
{
"epoch": 0.37055035840116635,
"grad_norm": 0.4170015752315521,
"learning_rate": 8e-05,
"loss": 1.707,
"step": 1525
},
{
"epoch": 0.37079334224274085,
"grad_norm": 0.42701733112335205,
"learning_rate": 8e-05,
"loss": 1.7559,
"step": 1526
},
{
"epoch": 0.3710363260843154,
"grad_norm": 0.4310395419597626,
"learning_rate": 8e-05,
"loss": 1.6319,
"step": 1527
},
{
"epoch": 0.3712793099258899,
"grad_norm": 0.40354016423225403,
"learning_rate": 8e-05,
"loss": 1.6646,
"step": 1528
},
{
"epoch": 0.3715222937674645,
"grad_norm": 0.4501798748970032,
"learning_rate": 8e-05,
"loss": 1.7569,
"step": 1529
},
{
"epoch": 0.371765277609039,
"grad_norm": 0.4462423026561737,
"learning_rate": 8e-05,
"loss": 1.8189,
"step": 1530
},
{
"epoch": 0.37200826145061355,
"grad_norm": 0.4353228211402893,
"learning_rate": 8e-05,
"loss": 1.618,
"step": 1531
},
{
"epoch": 0.37225124529218806,
"grad_norm": 0.4250049591064453,
"learning_rate": 8e-05,
"loss": 1.6773,
"step": 1532
},
{
"epoch": 0.3724942291337626,
"grad_norm": 0.4075191915035248,
"learning_rate": 8e-05,
"loss": 1.7537,
"step": 1533
},
{
"epoch": 0.37273721297533713,
"grad_norm": 0.4113391935825348,
"learning_rate": 8e-05,
"loss": 1.6407,
"step": 1534
},
{
"epoch": 0.3729801968169117,
"grad_norm": 0.41470906138420105,
"learning_rate": 8e-05,
"loss": 1.5854,
"step": 1535
},
{
"epoch": 0.3732231806584862,
"grad_norm": 0.4519250690937042,
"learning_rate": 8e-05,
"loss": 1.755,
"step": 1536
},
{
"epoch": 0.37346616450006076,
"grad_norm": 0.41602808237075806,
"learning_rate": 8e-05,
"loss": 1.6691,
"step": 1537
},
{
"epoch": 0.37370914834163527,
"grad_norm": 0.4330156147480011,
"learning_rate": 8e-05,
"loss": 1.6672,
"step": 1538
},
{
"epoch": 0.37395213218320983,
"grad_norm": 0.428617000579834,
"learning_rate": 8e-05,
"loss": 1.8051,
"step": 1539
},
{
"epoch": 0.37419511602478434,
"grad_norm": 0.42976975440979004,
"learning_rate": 8e-05,
"loss": 1.5662,
"step": 1540
},
{
"epoch": 0.3744380998663589,
"grad_norm": 0.40064191818237305,
"learning_rate": 8e-05,
"loss": 1.6227,
"step": 1541
},
{
"epoch": 0.3746810837079334,
"grad_norm": 0.4101344645023346,
"learning_rate": 8e-05,
"loss": 1.6954,
"step": 1542
},
{
"epoch": 0.37492406754950797,
"grad_norm": 0.4178948998451233,
"learning_rate": 8e-05,
"loss": 1.6523,
"step": 1543
},
{
"epoch": 0.3751670513910825,
"grad_norm": 0.4114849865436554,
"learning_rate": 8e-05,
"loss": 1.6881,
"step": 1544
},
{
"epoch": 0.37541003523265704,
"grad_norm": 0.4005047082901001,
"learning_rate": 8e-05,
"loss": 1.5517,
"step": 1545
},
{
"epoch": 0.37565301907423154,
"grad_norm": 0.4733780026435852,
"learning_rate": 8e-05,
"loss": 1.8076,
"step": 1546
},
{
"epoch": 0.3758960029158061,
"grad_norm": 0.4235011041164398,
"learning_rate": 8e-05,
"loss": 1.625,
"step": 1547
},
{
"epoch": 0.3761389867573806,
"grad_norm": 0.4334684908390045,
"learning_rate": 8e-05,
"loss": 1.747,
"step": 1548
},
{
"epoch": 0.3763819705989552,
"grad_norm": 0.42389872670173645,
"learning_rate": 8e-05,
"loss": 1.7593,
"step": 1549
},
{
"epoch": 0.3766249544405297,
"grad_norm": 0.4101676046848297,
"learning_rate": 8e-05,
"loss": 1.7444,
"step": 1550
},
{
"epoch": 0.37686793828210424,
"grad_norm": 0.41118088364601135,
"learning_rate": 8e-05,
"loss": 1.7075,
"step": 1551
},
{
"epoch": 0.37711092212367875,
"grad_norm": 0.431918203830719,
"learning_rate": 8e-05,
"loss": 1.716,
"step": 1552
},
{
"epoch": 0.3773539059652533,
"grad_norm": 0.41058656573295593,
"learning_rate": 8e-05,
"loss": 1.6153,
"step": 1553
},
{
"epoch": 0.3775968898068279,
"grad_norm": 0.4221169650554657,
"learning_rate": 8e-05,
"loss": 1.7801,
"step": 1554
},
{
"epoch": 0.3778398736484024,
"grad_norm": 0.43766945600509644,
"learning_rate": 8e-05,
"loss": 1.8801,
"step": 1555
},
{
"epoch": 0.37808285748997694,
"grad_norm": 0.4249259829521179,
"learning_rate": 8e-05,
"loss": 1.5557,
"step": 1556
},
{
"epoch": 0.37832584133155145,
"grad_norm": 0.4021548330783844,
"learning_rate": 8e-05,
"loss": 1.5998,
"step": 1557
},
{
"epoch": 0.378568825173126,
"grad_norm": 0.402353972196579,
"learning_rate": 8e-05,
"loss": 1.6979,
"step": 1558
},
{
"epoch": 0.3788118090147005,
"grad_norm": 0.42179930210113525,
"learning_rate": 8e-05,
"loss": 1.728,
"step": 1559
},
{
"epoch": 0.3790547928562751,
"grad_norm": 0.4151405394077301,
"learning_rate": 8e-05,
"loss": 1.7918,
"step": 1560
},
{
"epoch": 0.3792977766978496,
"grad_norm": 0.43462592363357544,
"learning_rate": 8e-05,
"loss": 1.5964,
"step": 1561
},
{
"epoch": 0.37954076053942415,
"grad_norm": 0.424850732088089,
"learning_rate": 8e-05,
"loss": 1.6482,
"step": 1562
},
{
"epoch": 0.37978374438099866,
"grad_norm": 0.41227251291275024,
"learning_rate": 8e-05,
"loss": 1.5886,
"step": 1563
},
{
"epoch": 0.3800267282225732,
"grad_norm": 0.4173636734485626,
"learning_rate": 8e-05,
"loss": 1.7012,
"step": 1564
},
{
"epoch": 0.3802697120641477,
"grad_norm": 0.40978628396987915,
"learning_rate": 8e-05,
"loss": 1.6677,
"step": 1565
},
{
"epoch": 0.3805126959057223,
"grad_norm": 0.40448302030563354,
"learning_rate": 8e-05,
"loss": 1.4954,
"step": 1566
},
{
"epoch": 0.3807556797472968,
"grad_norm": 0.42568641901016235,
"learning_rate": 8e-05,
"loss": 1.7504,
"step": 1567
},
{
"epoch": 0.38099866358887136,
"grad_norm": 0.4181409180164337,
"learning_rate": 8e-05,
"loss": 1.6838,
"step": 1568
},
{
"epoch": 0.38124164743044586,
"grad_norm": 0.4193849265575409,
"learning_rate": 8e-05,
"loss": 1.7605,
"step": 1569
},
{
"epoch": 0.3814846312720204,
"grad_norm": 0.4197647273540497,
"learning_rate": 8e-05,
"loss": 1.6615,
"step": 1570
},
{
"epoch": 0.38172761511359493,
"grad_norm": 0.4191533923149109,
"learning_rate": 8e-05,
"loss": 1.6292,
"step": 1571
},
{
"epoch": 0.3819705989551695,
"grad_norm": 0.4143694341182709,
"learning_rate": 8e-05,
"loss": 1.6677,
"step": 1572
},
{
"epoch": 0.382213582796744,
"grad_norm": 0.41417399048805237,
"learning_rate": 8e-05,
"loss": 1.6817,
"step": 1573
},
{
"epoch": 0.38245656663831856,
"grad_norm": 0.4393376111984253,
"learning_rate": 8e-05,
"loss": 1.7662,
"step": 1574
},
{
"epoch": 0.38269955047989307,
"grad_norm": 0.4121435284614563,
"learning_rate": 8e-05,
"loss": 1.5774,
"step": 1575
},
{
"epoch": 0.38294253432146763,
"grad_norm": 0.41746968030929565,
"learning_rate": 8e-05,
"loss": 1.7227,
"step": 1576
},
{
"epoch": 0.38318551816304214,
"grad_norm": 0.43105581402778625,
"learning_rate": 8e-05,
"loss": 1.7402,
"step": 1577
},
{
"epoch": 0.3834285020046167,
"grad_norm": 0.43507346510887146,
"learning_rate": 8e-05,
"loss": 1.7902,
"step": 1578
},
{
"epoch": 0.3836714858461912,
"grad_norm": 0.3958384692668915,
"learning_rate": 8e-05,
"loss": 1.5684,
"step": 1579
},
{
"epoch": 0.38391446968776577,
"grad_norm": 0.44736096262931824,
"learning_rate": 8e-05,
"loss": 1.8793,
"step": 1580
},
{
"epoch": 0.3841574535293403,
"grad_norm": 0.4385405480861664,
"learning_rate": 8e-05,
"loss": 1.8495,
"step": 1581
},
{
"epoch": 0.38440043737091484,
"grad_norm": 0.438483864068985,
"learning_rate": 8e-05,
"loss": 1.8367,
"step": 1582
},
{
"epoch": 0.38464342121248934,
"grad_norm": 0.4386710822582245,
"learning_rate": 8e-05,
"loss": 1.7457,
"step": 1583
},
{
"epoch": 0.3848864050540639,
"grad_norm": 0.44526001811027527,
"learning_rate": 8e-05,
"loss": 1.7394,
"step": 1584
},
{
"epoch": 0.3851293888956384,
"grad_norm": 0.43033620715141296,
"learning_rate": 8e-05,
"loss": 1.7281,
"step": 1585
},
{
"epoch": 0.385372372737213,
"grad_norm": 0.43658843636512756,
"learning_rate": 8e-05,
"loss": 1.6915,
"step": 1586
},
{
"epoch": 0.38561535657878754,
"grad_norm": 0.4354395866394043,
"learning_rate": 8e-05,
"loss": 1.7699,
"step": 1587
},
{
"epoch": 0.38585834042036204,
"grad_norm": 0.40769627690315247,
"learning_rate": 8e-05,
"loss": 1.6185,
"step": 1588
},
{
"epoch": 0.3861013242619366,
"grad_norm": 0.4172569215297699,
"learning_rate": 8e-05,
"loss": 1.6343,
"step": 1589
},
{
"epoch": 0.3863443081035111,
"grad_norm": 0.4266487956047058,
"learning_rate": 8e-05,
"loss": 1.7551,
"step": 1590
},
{
"epoch": 0.3865872919450857,
"grad_norm": 0.4560794234275818,
"learning_rate": 8e-05,
"loss": 1.7337,
"step": 1591
},
{
"epoch": 0.3868302757866602,
"grad_norm": 0.4079170525074005,
"learning_rate": 8e-05,
"loss": 1.6886,
"step": 1592
},
{
"epoch": 0.38707325962823474,
"grad_norm": 0.43422430753707886,
"learning_rate": 8e-05,
"loss": 1.7242,
"step": 1593
},
{
"epoch": 0.38731624346980925,
"grad_norm": 0.4020366370677948,
"learning_rate": 8e-05,
"loss": 1.6267,
"step": 1594
},
{
"epoch": 0.3875592273113838,
"grad_norm": 0.42723163962364197,
"learning_rate": 8e-05,
"loss": 1.6646,
"step": 1595
},
{
"epoch": 0.3878022111529583,
"grad_norm": 0.44158118963241577,
"learning_rate": 8e-05,
"loss": 1.7811,
"step": 1596
},
{
"epoch": 0.3880451949945329,
"grad_norm": 0.3944547176361084,
"learning_rate": 8e-05,
"loss": 1.6216,
"step": 1597
},
{
"epoch": 0.3882881788361074,
"grad_norm": 0.43721675872802734,
"learning_rate": 8e-05,
"loss": 1.6909,
"step": 1598
},
{
"epoch": 0.38853116267768195,
"grad_norm": 0.44996586441993713,
"learning_rate": 8e-05,
"loss": 1.6533,
"step": 1599
},
{
"epoch": 0.38877414651925646,
"grad_norm": 0.42338284850120544,
"learning_rate": 8e-05,
"loss": 1.7383,
"step": 1600
},
{
"epoch": 0.389017130360831,
"grad_norm": 0.44629910588264465,
"learning_rate": 8e-05,
"loss": 1.7169,
"step": 1601
},
{
"epoch": 0.3892601142024055,
"grad_norm": 0.4520544111728668,
"learning_rate": 8e-05,
"loss": 1.7062,
"step": 1602
},
{
"epoch": 0.3895030980439801,
"grad_norm": 0.396907776594162,
"learning_rate": 8e-05,
"loss": 1.6522,
"step": 1603
},
{
"epoch": 0.3897460818855546,
"grad_norm": 0.4147047996520996,
"learning_rate": 8e-05,
"loss": 1.7224,
"step": 1604
},
{
"epoch": 0.38998906572712916,
"grad_norm": 0.42678162455558777,
"learning_rate": 8e-05,
"loss": 1.7471,
"step": 1605
},
{
"epoch": 0.39023204956870366,
"grad_norm": 0.42349156737327576,
"learning_rate": 8e-05,
"loss": 1.7027,
"step": 1606
},
{
"epoch": 0.3904750334102782,
"grad_norm": 0.40292665362358093,
"learning_rate": 8e-05,
"loss": 1.6614,
"step": 1607
},
{
"epoch": 0.39071801725185273,
"grad_norm": 0.44564610719680786,
"learning_rate": 8e-05,
"loss": 1.8699,
"step": 1608
},
{
"epoch": 0.3909610010934273,
"grad_norm": 0.41767212748527527,
"learning_rate": 8e-05,
"loss": 1.7606,
"step": 1609
},
{
"epoch": 0.3912039849350018,
"grad_norm": 0.4166221022605896,
"learning_rate": 8e-05,
"loss": 1.6802,
"step": 1610
},
{
"epoch": 0.39144696877657637,
"grad_norm": 0.4434347152709961,
"learning_rate": 8e-05,
"loss": 1.8043,
"step": 1611
},
{
"epoch": 0.39168995261815087,
"grad_norm": 0.4146440923213959,
"learning_rate": 8e-05,
"loss": 1.7159,
"step": 1612
},
{
"epoch": 0.39193293645972543,
"grad_norm": 0.3920593559741974,
"learning_rate": 8e-05,
"loss": 1.612,
"step": 1613
},
{
"epoch": 0.39217592030129994,
"grad_norm": 0.4225471317768097,
"learning_rate": 8e-05,
"loss": 1.6693,
"step": 1614
},
{
"epoch": 0.3924189041428745,
"grad_norm": 0.44872719049453735,
"learning_rate": 8e-05,
"loss": 1.6434,
"step": 1615
},
{
"epoch": 0.392661887984449,
"grad_norm": 0.3961496353149414,
"learning_rate": 8e-05,
"loss": 1.6164,
"step": 1616
},
{
"epoch": 0.39290487182602357,
"grad_norm": 0.38982248306274414,
"learning_rate": 8e-05,
"loss": 1.6827,
"step": 1617
},
{
"epoch": 0.3931478556675981,
"grad_norm": 0.4293288588523865,
"learning_rate": 8e-05,
"loss": 1.8845,
"step": 1618
},
{
"epoch": 0.39339083950917264,
"grad_norm": 0.4216848611831665,
"learning_rate": 8e-05,
"loss": 1.6812,
"step": 1619
},
{
"epoch": 0.3936338233507472,
"grad_norm": 0.3969649374485016,
"learning_rate": 8e-05,
"loss": 1.5817,
"step": 1620
},
{
"epoch": 0.3938768071923217,
"grad_norm": 0.4064768850803375,
"learning_rate": 8e-05,
"loss": 1.6356,
"step": 1621
},
{
"epoch": 0.39411979103389627,
"grad_norm": 0.41068586707115173,
"learning_rate": 8e-05,
"loss": 1.7272,
"step": 1622
},
{
"epoch": 0.3943627748754708,
"grad_norm": 0.4052871763706207,
"learning_rate": 8e-05,
"loss": 1.6007,
"step": 1623
},
{
"epoch": 0.39460575871704534,
"grad_norm": 0.435359925031662,
"learning_rate": 8e-05,
"loss": 1.6832,
"step": 1624
},
{
"epoch": 0.39484874255861985,
"grad_norm": 0.4205849766731262,
"learning_rate": 8e-05,
"loss": 1.7329,
"step": 1625
},
{
"epoch": 0.3950917264001944,
"grad_norm": 0.43587934970855713,
"learning_rate": 8e-05,
"loss": 1.7942,
"step": 1626
},
{
"epoch": 0.3953347102417689,
"grad_norm": 0.41418036818504333,
"learning_rate": 8e-05,
"loss": 1.6616,
"step": 1627
},
{
"epoch": 0.3955776940833435,
"grad_norm": 0.3877982795238495,
"learning_rate": 8e-05,
"loss": 1.6862,
"step": 1628
},
{
"epoch": 0.395820677924918,
"grad_norm": 0.4145828187465668,
"learning_rate": 8e-05,
"loss": 1.5951,
"step": 1629
},
{
"epoch": 0.39606366176649255,
"grad_norm": 0.4185433089733124,
"learning_rate": 8e-05,
"loss": 1.7882,
"step": 1630
},
{
"epoch": 0.39630664560806705,
"grad_norm": 0.41606849431991577,
"learning_rate": 8e-05,
"loss": 1.6905,
"step": 1631
},
{
"epoch": 0.3965496294496416,
"grad_norm": 0.4242626130580902,
"learning_rate": 8e-05,
"loss": 1.7756,
"step": 1632
},
{
"epoch": 0.3967926132912161,
"grad_norm": 0.39382022619247437,
"learning_rate": 8e-05,
"loss": 1.5491,
"step": 1633
},
{
"epoch": 0.3970355971327907,
"grad_norm": 0.4146677553653717,
"learning_rate": 8e-05,
"loss": 1.6751,
"step": 1634
},
{
"epoch": 0.3972785809743652,
"grad_norm": 0.4308668076992035,
"learning_rate": 8e-05,
"loss": 1.8234,
"step": 1635
},
{
"epoch": 0.39752156481593975,
"grad_norm": 0.39495551586151123,
"learning_rate": 8e-05,
"loss": 1.6106,
"step": 1636
},
{
"epoch": 0.39776454865751426,
"grad_norm": 0.4327457845211029,
"learning_rate": 8e-05,
"loss": 1.8254,
"step": 1637
},
{
"epoch": 0.3980075324990888,
"grad_norm": 0.4194384217262268,
"learning_rate": 8e-05,
"loss": 1.7943,
"step": 1638
},
{
"epoch": 0.39825051634066333,
"grad_norm": 0.4201095998287201,
"learning_rate": 8e-05,
"loss": 1.6589,
"step": 1639
},
{
"epoch": 0.3984935001822379,
"grad_norm": 0.42406922578811646,
"learning_rate": 8e-05,
"loss": 1.8066,
"step": 1640
},
{
"epoch": 0.3987364840238124,
"grad_norm": 0.4152491092681885,
"learning_rate": 8e-05,
"loss": 1.7096,
"step": 1641
},
{
"epoch": 0.39897946786538696,
"grad_norm": 0.4010281562805176,
"learning_rate": 8e-05,
"loss": 1.57,
"step": 1642
},
{
"epoch": 0.39922245170696147,
"grad_norm": 0.4447481036186218,
"learning_rate": 8e-05,
"loss": 1.6956,
"step": 1643
},
{
"epoch": 0.39946543554853603,
"grad_norm": 0.4013897478580475,
"learning_rate": 8e-05,
"loss": 1.6554,
"step": 1644
},
{
"epoch": 0.39970841939011054,
"grad_norm": 0.400485098361969,
"learning_rate": 8e-05,
"loss": 1.6602,
"step": 1645
},
{
"epoch": 0.3999514032316851,
"grad_norm": 0.42620760202407837,
"learning_rate": 8e-05,
"loss": 1.7478,
"step": 1646
},
{
"epoch": 0.4001943870732596,
"grad_norm": 0.45568305253982544,
"learning_rate": 8e-05,
"loss": 1.6761,
"step": 1647
},
{
"epoch": 0.40043737091483417,
"grad_norm": 0.42114564776420593,
"learning_rate": 8e-05,
"loss": 1.6134,
"step": 1648
},
{
"epoch": 0.4006803547564087,
"grad_norm": 0.4360169470310211,
"learning_rate": 8e-05,
"loss": 1.801,
"step": 1649
},
{
"epoch": 0.40092333859798324,
"grad_norm": 0.4264334738254547,
"learning_rate": 8e-05,
"loss": 1.8276,
"step": 1650
},
{
"epoch": 0.40116632243955774,
"grad_norm": 0.4771878123283386,
"learning_rate": 8e-05,
"loss": 1.9623,
"step": 1651
},
{
"epoch": 0.4014093062811323,
"grad_norm": 0.40234559774398804,
"learning_rate": 8e-05,
"loss": 1.63,
"step": 1652
},
{
"epoch": 0.40165229012270687,
"grad_norm": 0.4067186415195465,
"learning_rate": 8e-05,
"loss": 1.6945,
"step": 1653
},
{
"epoch": 0.4018952739642814,
"grad_norm": 0.4087909758090973,
"learning_rate": 8e-05,
"loss": 1.6356,
"step": 1654
},
{
"epoch": 0.40213825780585594,
"grad_norm": 0.42809879779815674,
"learning_rate": 8e-05,
"loss": 1.6883,
"step": 1655
},
{
"epoch": 0.40238124164743044,
"grad_norm": 0.4224081337451935,
"learning_rate": 8e-05,
"loss": 1.7737,
"step": 1656
},
{
"epoch": 0.402624225489005,
"grad_norm": 0.41031697392463684,
"learning_rate": 8e-05,
"loss": 1.6614,
"step": 1657
},
{
"epoch": 0.4028672093305795,
"grad_norm": 0.43192222714424133,
"learning_rate": 8e-05,
"loss": 1.7033,
"step": 1658
},
{
"epoch": 0.4031101931721541,
"grad_norm": 0.4231877624988556,
"learning_rate": 8e-05,
"loss": 1.7694,
"step": 1659
},
{
"epoch": 0.4033531770137286,
"grad_norm": 0.4189487397670746,
"learning_rate": 8e-05,
"loss": 1.723,
"step": 1660
},
{
"epoch": 0.40359616085530314,
"grad_norm": 0.43119171261787415,
"learning_rate": 8e-05,
"loss": 1.6891,
"step": 1661
},
{
"epoch": 0.40383914469687765,
"grad_norm": 0.39486396312713623,
"learning_rate": 8e-05,
"loss": 1.5963,
"step": 1662
},
{
"epoch": 0.4040821285384522,
"grad_norm": 0.42586183547973633,
"learning_rate": 8e-05,
"loss": 1.87,
"step": 1663
},
{
"epoch": 0.4043251123800267,
"grad_norm": 0.42093515396118164,
"learning_rate": 8e-05,
"loss": 1.6897,
"step": 1664
},
{
"epoch": 0.4045680962216013,
"grad_norm": 0.418782114982605,
"learning_rate": 8e-05,
"loss": 1.7326,
"step": 1665
},
{
"epoch": 0.4048110800631758,
"grad_norm": 0.43015027046203613,
"learning_rate": 8e-05,
"loss": 1.6397,
"step": 1666
},
{
"epoch": 0.40505406390475035,
"grad_norm": 0.44864341616630554,
"learning_rate": 8e-05,
"loss": 1.7804,
"step": 1667
},
{
"epoch": 0.40529704774632486,
"grad_norm": 0.41778072714805603,
"learning_rate": 8e-05,
"loss": 1.5896,
"step": 1668
},
{
"epoch": 0.4055400315878994,
"grad_norm": 0.3932303786277771,
"learning_rate": 8e-05,
"loss": 1.5931,
"step": 1669
},
{
"epoch": 0.4057830154294739,
"grad_norm": 0.41271570324897766,
"learning_rate": 8e-05,
"loss": 1.6061,
"step": 1670
},
{
"epoch": 0.4060259992710485,
"grad_norm": 0.44117751717567444,
"learning_rate": 8e-05,
"loss": 1.5682,
"step": 1671
},
{
"epoch": 0.406268983112623,
"grad_norm": 0.4247724711894989,
"learning_rate": 8e-05,
"loss": 1.7314,
"step": 1672
},
{
"epoch": 0.40651196695419756,
"grad_norm": 0.41147905588150024,
"learning_rate": 8e-05,
"loss": 1.6803,
"step": 1673
},
{
"epoch": 0.40675495079577206,
"grad_norm": 0.4455786943435669,
"learning_rate": 8e-05,
"loss": 1.7796,
"step": 1674
},
{
"epoch": 0.4069979346373466,
"grad_norm": 0.4278455674648285,
"learning_rate": 8e-05,
"loss": 1.6492,
"step": 1675
},
{
"epoch": 0.40724091847892113,
"grad_norm": 0.42609670758247375,
"learning_rate": 8e-05,
"loss": 1.6499,
"step": 1676
},
{
"epoch": 0.4074839023204957,
"grad_norm": 0.41262224316596985,
"learning_rate": 8e-05,
"loss": 1.7344,
"step": 1677
},
{
"epoch": 0.4077268861620702,
"grad_norm": 0.4027808904647827,
"learning_rate": 8e-05,
"loss": 1.5917,
"step": 1678
},
{
"epoch": 0.40796987000364476,
"grad_norm": 0.42386099696159363,
"learning_rate": 8e-05,
"loss": 1.8049,
"step": 1679
},
{
"epoch": 0.40821285384521927,
"grad_norm": 0.43816202878952026,
"learning_rate": 8e-05,
"loss": 1.6891,
"step": 1680
},
{
"epoch": 0.40845583768679383,
"grad_norm": 0.4120200276374817,
"learning_rate": 8e-05,
"loss": 1.6902,
"step": 1681
},
{
"epoch": 0.40869882152836834,
"grad_norm": 0.42139312624931335,
"learning_rate": 8e-05,
"loss": 1.6431,
"step": 1682
},
{
"epoch": 0.4089418053699429,
"grad_norm": 0.44489359855651855,
"learning_rate": 8e-05,
"loss": 1.8039,
"step": 1683
},
{
"epoch": 0.4091847892115174,
"grad_norm": 0.400413453578949,
"learning_rate": 8e-05,
"loss": 1.4418,
"step": 1684
},
{
"epoch": 0.40942777305309197,
"grad_norm": 0.45639127492904663,
"learning_rate": 8e-05,
"loss": 1.7726,
"step": 1685
},
{
"epoch": 0.40967075689466653,
"grad_norm": 0.415913462638855,
"learning_rate": 8e-05,
"loss": 1.7282,
"step": 1686
},
{
"epoch": 0.40991374073624104,
"grad_norm": 0.4201204180717468,
"learning_rate": 8e-05,
"loss": 1.7433,
"step": 1687
},
{
"epoch": 0.4101567245778156,
"grad_norm": 0.44380196928977966,
"learning_rate": 8e-05,
"loss": 1.6076,
"step": 1688
},
{
"epoch": 0.4103997084193901,
"grad_norm": 0.4186372458934784,
"learning_rate": 8e-05,
"loss": 1.6594,
"step": 1689
},
{
"epoch": 0.41064269226096467,
"grad_norm": 0.4141177535057068,
"learning_rate": 8e-05,
"loss": 1.6433,
"step": 1690
},
{
"epoch": 0.4108856761025392,
"grad_norm": 0.4486232101917267,
"learning_rate": 8e-05,
"loss": 1.8352,
"step": 1691
},
{
"epoch": 0.41112865994411374,
"grad_norm": 0.41571044921875,
"learning_rate": 8e-05,
"loss": 1.744,
"step": 1692
},
{
"epoch": 0.41137164378568825,
"grad_norm": 0.4206002950668335,
"learning_rate": 8e-05,
"loss": 1.6404,
"step": 1693
},
{
"epoch": 0.4116146276272628,
"grad_norm": 0.41686561703681946,
"learning_rate": 8e-05,
"loss": 1.6114,
"step": 1694
},
{
"epoch": 0.4118576114688373,
"grad_norm": 0.41292232275009155,
"learning_rate": 8e-05,
"loss": 1.6059,
"step": 1695
},
{
"epoch": 0.4121005953104119,
"grad_norm": 0.4010278880596161,
"learning_rate": 8e-05,
"loss": 1.6745,
"step": 1696
},
{
"epoch": 0.4123435791519864,
"grad_norm": 0.40597307682037354,
"learning_rate": 8e-05,
"loss": 1.6319,
"step": 1697
},
{
"epoch": 0.41258656299356095,
"grad_norm": 0.41981416940689087,
"learning_rate": 8e-05,
"loss": 1.7802,
"step": 1698
},
{
"epoch": 0.41282954683513545,
"grad_norm": 0.4284166991710663,
"learning_rate": 8e-05,
"loss": 1.7544,
"step": 1699
},
{
"epoch": 0.41307253067671,
"grad_norm": 0.4121367037296295,
"learning_rate": 8e-05,
"loss": 1.6648,
"step": 1700
},
{
"epoch": 0.4133155145182845,
"grad_norm": 0.40503549575805664,
"learning_rate": 8e-05,
"loss": 1.7035,
"step": 1701
},
{
"epoch": 0.4135584983598591,
"grad_norm": 0.4065505564212799,
"learning_rate": 8e-05,
"loss": 1.7314,
"step": 1702
},
{
"epoch": 0.4138014822014336,
"grad_norm": 0.397203654050827,
"learning_rate": 8e-05,
"loss": 1.6609,
"step": 1703
},
{
"epoch": 0.41404446604300815,
"grad_norm": 0.38829803466796875,
"learning_rate": 8e-05,
"loss": 1.6124,
"step": 1704
},
{
"epoch": 0.41428744988458266,
"grad_norm": 0.42386242747306824,
"learning_rate": 8e-05,
"loss": 1.7027,
"step": 1705
},
{
"epoch": 0.4145304337261572,
"grad_norm": 0.44385597109794617,
"learning_rate": 8e-05,
"loss": 1.7924,
"step": 1706
},
{
"epoch": 0.41477341756773173,
"grad_norm": 0.39557069540023804,
"learning_rate": 8e-05,
"loss": 1.6454,
"step": 1707
},
{
"epoch": 0.4150164014093063,
"grad_norm": 0.4212467074394226,
"learning_rate": 8e-05,
"loss": 1.7743,
"step": 1708
},
{
"epoch": 0.4152593852508808,
"grad_norm": 0.4524325430393219,
"learning_rate": 8e-05,
"loss": 1.7753,
"step": 1709
},
{
"epoch": 0.41550236909245536,
"grad_norm": 0.39658069610595703,
"learning_rate": 8e-05,
"loss": 1.6624,
"step": 1710
},
{
"epoch": 0.41574535293402987,
"grad_norm": 0.43072929978370667,
"learning_rate": 8e-05,
"loss": 1.607,
"step": 1711
},
{
"epoch": 0.41598833677560443,
"grad_norm": 0.4198828935623169,
"learning_rate": 8e-05,
"loss": 1.7076,
"step": 1712
},
{
"epoch": 0.41623132061717893,
"grad_norm": 0.4408133029937744,
"learning_rate": 8e-05,
"loss": 1.6348,
"step": 1713
},
{
"epoch": 0.4164743044587535,
"grad_norm": 0.4172877073287964,
"learning_rate": 8e-05,
"loss": 1.7512,
"step": 1714
},
{
"epoch": 0.416717288300328,
"grad_norm": 0.4034704566001892,
"learning_rate": 8e-05,
"loss": 1.6529,
"step": 1715
},
{
"epoch": 0.41696027214190257,
"grad_norm": 0.4334101676940918,
"learning_rate": 8e-05,
"loss": 1.6583,
"step": 1716
},
{
"epoch": 0.4172032559834771,
"grad_norm": 0.4380352795124054,
"learning_rate": 8e-05,
"loss": 1.6863,
"step": 1717
},
{
"epoch": 0.41744623982505163,
"grad_norm": 0.4561653137207031,
"learning_rate": 8e-05,
"loss": 1.6515,
"step": 1718
},
{
"epoch": 0.4176892236666262,
"grad_norm": 0.41770467162132263,
"learning_rate": 8e-05,
"loss": 1.829,
"step": 1719
},
{
"epoch": 0.4179322075082007,
"grad_norm": 0.4551490843296051,
"learning_rate": 8e-05,
"loss": 1.9915,
"step": 1720
},
{
"epoch": 0.41817519134977527,
"grad_norm": 0.4066857397556305,
"learning_rate": 8e-05,
"loss": 1.7234,
"step": 1721
},
{
"epoch": 0.4184181751913498,
"grad_norm": 0.4038218557834625,
"learning_rate": 8e-05,
"loss": 1.5606,
"step": 1722
},
{
"epoch": 0.41866115903292433,
"grad_norm": 0.4305240213871002,
"learning_rate": 8e-05,
"loss": 1.7119,
"step": 1723
},
{
"epoch": 0.41890414287449884,
"grad_norm": 0.4003041386604309,
"learning_rate": 8e-05,
"loss": 1.5079,
"step": 1724
},
{
"epoch": 0.4191471267160734,
"grad_norm": 0.39947882294654846,
"learning_rate": 8e-05,
"loss": 1.6268,
"step": 1725
},
{
"epoch": 0.4193901105576479,
"grad_norm": 0.43016764521598816,
"learning_rate": 8e-05,
"loss": 1.5147,
"step": 1726
},
{
"epoch": 0.4196330943992225,
"grad_norm": 0.4064975082874298,
"learning_rate": 8e-05,
"loss": 1.6743,
"step": 1727
},
{
"epoch": 0.419876078240797,
"grad_norm": 0.40748926997184753,
"learning_rate": 8e-05,
"loss": 1.6469,
"step": 1728
},
{
"epoch": 0.42011906208237154,
"grad_norm": 0.40836912393569946,
"learning_rate": 8e-05,
"loss": 1.5056,
"step": 1729
},
{
"epoch": 0.42036204592394605,
"grad_norm": 0.4225461483001709,
"learning_rate": 8e-05,
"loss": 1.774,
"step": 1730
},
{
"epoch": 0.4206050297655206,
"grad_norm": 0.4032735824584961,
"learning_rate": 8e-05,
"loss": 1.6572,
"step": 1731
},
{
"epoch": 0.4208480136070951,
"grad_norm": 0.44586724042892456,
"learning_rate": 8e-05,
"loss": 1.7298,
"step": 1732
},
{
"epoch": 0.4210909974486697,
"grad_norm": 0.4447050094604492,
"learning_rate": 8e-05,
"loss": 1.7359,
"step": 1733
},
{
"epoch": 0.4213339812902442,
"grad_norm": 0.4337802231311798,
"learning_rate": 8e-05,
"loss": 1.7008,
"step": 1734
},
{
"epoch": 0.42157696513181875,
"grad_norm": 0.4286479353904724,
"learning_rate": 8e-05,
"loss": 1.5806,
"step": 1735
},
{
"epoch": 0.42181994897339326,
"grad_norm": 0.41877228021621704,
"learning_rate": 8e-05,
"loss": 1.54,
"step": 1736
},
{
"epoch": 0.4220629328149678,
"grad_norm": 0.4385734498500824,
"learning_rate": 8e-05,
"loss": 1.8678,
"step": 1737
},
{
"epoch": 0.4223059166565423,
"grad_norm": 0.4281429052352905,
"learning_rate": 8e-05,
"loss": 1.581,
"step": 1738
},
{
"epoch": 0.4225489004981169,
"grad_norm": 0.4258042573928833,
"learning_rate": 8e-05,
"loss": 1.8062,
"step": 1739
},
{
"epoch": 0.4227918843396914,
"grad_norm": 0.4337453246116638,
"learning_rate": 8e-05,
"loss": 1.637,
"step": 1740
},
{
"epoch": 0.42303486818126596,
"grad_norm": 0.3949694037437439,
"learning_rate": 8e-05,
"loss": 1.6143,
"step": 1741
},
{
"epoch": 0.42327785202284046,
"grad_norm": 0.45012426376342773,
"learning_rate": 8e-05,
"loss": 1.9184,
"step": 1742
},
{
"epoch": 0.423520835864415,
"grad_norm": 0.44694384932518005,
"learning_rate": 8e-05,
"loss": 1.7543,
"step": 1743
},
{
"epoch": 0.42376381970598953,
"grad_norm": 0.4394371211528778,
"learning_rate": 8e-05,
"loss": 1.7707,
"step": 1744
},
{
"epoch": 0.4240068035475641,
"grad_norm": 0.41531896591186523,
"learning_rate": 8e-05,
"loss": 1.7072,
"step": 1745
},
{
"epoch": 0.4242497873891386,
"grad_norm": 0.3919975757598877,
"learning_rate": 8e-05,
"loss": 1.5487,
"step": 1746
},
{
"epoch": 0.42449277123071316,
"grad_norm": 0.4876977503299713,
"learning_rate": 8e-05,
"loss": 1.819,
"step": 1747
},
{
"epoch": 0.42473575507228767,
"grad_norm": 0.4127189517021179,
"learning_rate": 8e-05,
"loss": 1.7448,
"step": 1748
},
{
"epoch": 0.42497873891386223,
"grad_norm": 0.4112125635147095,
"learning_rate": 8e-05,
"loss": 1.5441,
"step": 1749
},
{
"epoch": 0.42522172275543674,
"grad_norm": 0.39865005016326904,
"learning_rate": 8e-05,
"loss": 1.6114,
"step": 1750
},
{
"epoch": 0.4254647065970113,
"grad_norm": 0.41315123438835144,
"learning_rate": 8e-05,
"loss": 1.5477,
"step": 1751
},
{
"epoch": 0.42570769043858586,
"grad_norm": 0.4037797451019287,
"learning_rate": 8e-05,
"loss": 1.6564,
"step": 1752
},
{
"epoch": 0.42595067428016037,
"grad_norm": 0.4321102201938629,
"learning_rate": 8e-05,
"loss": 1.5817,
"step": 1753
},
{
"epoch": 0.42619365812173493,
"grad_norm": 0.434779554605484,
"learning_rate": 8e-05,
"loss": 1.8621,
"step": 1754
},
{
"epoch": 0.42643664196330944,
"grad_norm": 0.398060142993927,
"learning_rate": 8e-05,
"loss": 1.7119,
"step": 1755
},
{
"epoch": 0.426679625804884,
"grad_norm": 0.4613003432750702,
"learning_rate": 8e-05,
"loss": 1.7898,
"step": 1756
},
{
"epoch": 0.4269226096464585,
"grad_norm": 0.4197688698768616,
"learning_rate": 8e-05,
"loss": 1.5502,
"step": 1757
},
{
"epoch": 0.42716559348803307,
"grad_norm": 0.38842418789863586,
"learning_rate": 8e-05,
"loss": 1.598,
"step": 1758
},
{
"epoch": 0.4274085773296076,
"grad_norm": 0.42749500274658203,
"learning_rate": 8e-05,
"loss": 1.751,
"step": 1759
},
{
"epoch": 0.42765156117118214,
"grad_norm": 0.4103221595287323,
"learning_rate": 8e-05,
"loss": 1.5711,
"step": 1760
},
{
"epoch": 0.42789454501275664,
"grad_norm": 0.4110293388366699,
"learning_rate": 8e-05,
"loss": 1.6508,
"step": 1761
},
{
"epoch": 0.4281375288543312,
"grad_norm": 0.4347650408744812,
"learning_rate": 8e-05,
"loss": 1.707,
"step": 1762
},
{
"epoch": 0.4283805126959057,
"grad_norm": 0.42236021161079407,
"learning_rate": 8e-05,
"loss": 1.6019,
"step": 1763
},
{
"epoch": 0.4286234965374803,
"grad_norm": 0.4228074252605438,
"learning_rate": 8e-05,
"loss": 1.7376,
"step": 1764
},
{
"epoch": 0.4288664803790548,
"grad_norm": 0.43777650594711304,
"learning_rate": 8e-05,
"loss": 1.5644,
"step": 1765
},
{
"epoch": 0.42910946422062934,
"grad_norm": 0.43366578221321106,
"learning_rate": 8e-05,
"loss": 1.7939,
"step": 1766
},
{
"epoch": 0.42935244806220385,
"grad_norm": 0.4141087830066681,
"learning_rate": 8e-05,
"loss": 1.688,
"step": 1767
},
{
"epoch": 0.4295954319037784,
"grad_norm": 0.40956902503967285,
"learning_rate": 8e-05,
"loss": 1.6846,
"step": 1768
},
{
"epoch": 0.4298384157453529,
"grad_norm": 0.4060921370983124,
"learning_rate": 8e-05,
"loss": 1.7002,
"step": 1769
},
{
"epoch": 0.4300813995869275,
"grad_norm": 0.4277002215385437,
"learning_rate": 8e-05,
"loss": 1.7707,
"step": 1770
},
{
"epoch": 0.430324383428502,
"grad_norm": 0.4249592423439026,
"learning_rate": 8e-05,
"loss": 1.6289,
"step": 1771
},
{
"epoch": 0.43056736727007655,
"grad_norm": 0.41829076409339905,
"learning_rate": 8e-05,
"loss": 1.6168,
"step": 1772
},
{
"epoch": 0.43081035111165106,
"grad_norm": 0.4147033393383026,
"learning_rate": 8e-05,
"loss": 1.5973,
"step": 1773
},
{
"epoch": 0.4310533349532256,
"grad_norm": 0.43540245294570923,
"learning_rate": 8e-05,
"loss": 1.8268,
"step": 1774
},
{
"epoch": 0.4312963187948001,
"grad_norm": 0.40885448455810547,
"learning_rate": 8e-05,
"loss": 1.6481,
"step": 1775
},
{
"epoch": 0.4315393026363747,
"grad_norm": 0.41620758175849915,
"learning_rate": 8e-05,
"loss": 1.5479,
"step": 1776
},
{
"epoch": 0.4317822864779492,
"grad_norm": 0.4079805314540863,
"learning_rate": 8e-05,
"loss": 1.5982,
"step": 1777
},
{
"epoch": 0.43202527031952376,
"grad_norm": 0.446587473154068,
"learning_rate": 8e-05,
"loss": 1.7033,
"step": 1778
},
{
"epoch": 0.43226825416109826,
"grad_norm": 0.43002545833587646,
"learning_rate": 8e-05,
"loss": 1.6737,
"step": 1779
},
{
"epoch": 0.4325112380026728,
"grad_norm": 0.41290929913520813,
"learning_rate": 8e-05,
"loss": 1.755,
"step": 1780
},
{
"epoch": 0.43275422184424733,
"grad_norm": 0.4275924265384674,
"learning_rate": 8e-05,
"loss": 1.6903,
"step": 1781
},
{
"epoch": 0.4329972056858219,
"grad_norm": 0.4202406108379364,
"learning_rate": 8e-05,
"loss": 1.7452,
"step": 1782
},
{
"epoch": 0.4332401895273964,
"grad_norm": 0.4105161726474762,
"learning_rate": 8e-05,
"loss": 1.634,
"step": 1783
},
{
"epoch": 0.43348317336897096,
"grad_norm": 0.4252176284790039,
"learning_rate": 8e-05,
"loss": 1.678,
"step": 1784
},
{
"epoch": 0.43372615721054547,
"grad_norm": 0.41096925735473633,
"learning_rate": 8e-05,
"loss": 1.7689,
"step": 1785
},
{
"epoch": 0.43396914105212003,
"grad_norm": 0.4047700762748718,
"learning_rate": 8e-05,
"loss": 1.5749,
"step": 1786
},
{
"epoch": 0.4342121248936946,
"grad_norm": 0.44662734866142273,
"learning_rate": 8e-05,
"loss": 1.8453,
"step": 1787
},
{
"epoch": 0.4344551087352691,
"grad_norm": 0.41828441619873047,
"learning_rate": 8e-05,
"loss": 1.6347,
"step": 1788
},
{
"epoch": 0.43469809257684366,
"grad_norm": 0.4287610650062561,
"learning_rate": 8e-05,
"loss": 1.7657,
"step": 1789
},
{
"epoch": 0.43494107641841817,
"grad_norm": 0.4535541534423828,
"learning_rate": 8e-05,
"loss": 1.8044,
"step": 1790
},
{
"epoch": 0.43518406025999273,
"grad_norm": 0.4219343662261963,
"learning_rate": 8e-05,
"loss": 1.7335,
"step": 1791
},
{
"epoch": 0.43542704410156724,
"grad_norm": 0.42163732647895813,
"learning_rate": 8e-05,
"loss": 1.6463,
"step": 1792
},
{
"epoch": 0.4356700279431418,
"grad_norm": 0.43030476570129395,
"learning_rate": 8e-05,
"loss": 1.6985,
"step": 1793
},
{
"epoch": 0.4359130117847163,
"grad_norm": 0.4406364858150482,
"learning_rate": 8e-05,
"loss": 1.8768,
"step": 1794
},
{
"epoch": 0.43615599562629087,
"grad_norm": 0.4096386730670929,
"learning_rate": 8e-05,
"loss": 1.6918,
"step": 1795
},
{
"epoch": 0.4363989794678654,
"grad_norm": 0.418416291475296,
"learning_rate": 8e-05,
"loss": 1.6321,
"step": 1796
},
{
"epoch": 0.43664196330943994,
"grad_norm": 0.44120216369628906,
"learning_rate": 8e-05,
"loss": 1.5469,
"step": 1797
},
{
"epoch": 0.43688494715101445,
"grad_norm": 0.41503068804740906,
"learning_rate": 8e-05,
"loss": 1.5663,
"step": 1798
},
{
"epoch": 0.437127930992589,
"grad_norm": 0.3758297860622406,
"learning_rate": 8e-05,
"loss": 1.5509,
"step": 1799
},
{
"epoch": 0.4373709148341635,
"grad_norm": 0.41770219802856445,
"learning_rate": 8e-05,
"loss": 1.772,
"step": 1800
},
{
"epoch": 0.4376138986757381,
"grad_norm": 0.44644829630851746,
"learning_rate": 8e-05,
"loss": 1.8186,
"step": 1801
},
{
"epoch": 0.4378568825173126,
"grad_norm": 0.4562210738658905,
"learning_rate": 8e-05,
"loss": 1.8294,
"step": 1802
},
{
"epoch": 0.43809986635888715,
"grad_norm": 0.43727368116378784,
"learning_rate": 8e-05,
"loss": 1.7658,
"step": 1803
},
{
"epoch": 0.43834285020046165,
"grad_norm": 0.4097563326358795,
"learning_rate": 8e-05,
"loss": 1.6273,
"step": 1804
},
{
"epoch": 0.4385858340420362,
"grad_norm": 0.446963906288147,
"learning_rate": 8e-05,
"loss": 1.7977,
"step": 1805
},
{
"epoch": 0.4388288178836107,
"grad_norm": 0.4568128287792206,
"learning_rate": 8e-05,
"loss": 1.7073,
"step": 1806
},
{
"epoch": 0.4390718017251853,
"grad_norm": 0.4310816824436188,
"learning_rate": 8e-05,
"loss": 1.7771,
"step": 1807
},
{
"epoch": 0.4393147855667598,
"grad_norm": 0.42967939376831055,
"learning_rate": 8e-05,
"loss": 1.7093,
"step": 1808
},
{
"epoch": 0.43955776940833435,
"grad_norm": 0.46501290798187256,
"learning_rate": 8e-05,
"loss": 1.6964,
"step": 1809
},
{
"epoch": 0.43980075324990886,
"grad_norm": 0.41305580735206604,
"learning_rate": 8e-05,
"loss": 1.6825,
"step": 1810
},
{
"epoch": 0.4400437370914834,
"grad_norm": 0.43250951170921326,
"learning_rate": 8e-05,
"loss": 1.7467,
"step": 1811
},
{
"epoch": 0.44028672093305793,
"grad_norm": 0.41424018144607544,
"learning_rate": 8e-05,
"loss": 1.652,
"step": 1812
},
{
"epoch": 0.4405297047746325,
"grad_norm": 0.40682876110076904,
"learning_rate": 8e-05,
"loss": 1.6417,
"step": 1813
},
{
"epoch": 0.440772688616207,
"grad_norm": 0.4290339946746826,
"learning_rate": 8e-05,
"loss": 1.7227,
"step": 1814
},
{
"epoch": 0.44101567245778156,
"grad_norm": 0.4426180124282837,
"learning_rate": 8e-05,
"loss": 1.6162,
"step": 1815
},
{
"epoch": 0.44125865629935607,
"grad_norm": 0.4229072332382202,
"learning_rate": 8e-05,
"loss": 1.6379,
"step": 1816
},
{
"epoch": 0.44150164014093063,
"grad_norm": 0.41639086604118347,
"learning_rate": 8e-05,
"loss": 1.6388,
"step": 1817
},
{
"epoch": 0.44174462398250514,
"grad_norm": 0.41044601798057556,
"learning_rate": 8e-05,
"loss": 1.6975,
"step": 1818
},
{
"epoch": 0.4419876078240797,
"grad_norm": 0.41082099080085754,
"learning_rate": 8e-05,
"loss": 1.6688,
"step": 1819
},
{
"epoch": 0.44223059166565426,
"grad_norm": 0.4211667478084564,
"learning_rate": 8e-05,
"loss": 1.7065,
"step": 1820
},
{
"epoch": 0.44247357550722877,
"grad_norm": 0.41173863410949707,
"learning_rate": 8e-05,
"loss": 1.6185,
"step": 1821
},
{
"epoch": 0.44271655934880333,
"grad_norm": 0.40657758712768555,
"learning_rate": 8e-05,
"loss": 1.6782,
"step": 1822
},
{
"epoch": 0.44295954319037784,
"grad_norm": 0.4356505870819092,
"learning_rate": 8e-05,
"loss": 1.7657,
"step": 1823
},
{
"epoch": 0.4432025270319524,
"grad_norm": 0.42441052198410034,
"learning_rate": 8e-05,
"loss": 1.7506,
"step": 1824
},
{
"epoch": 0.4434455108735269,
"grad_norm": 0.42454221844673157,
"learning_rate": 8e-05,
"loss": 1.7831,
"step": 1825
},
{
"epoch": 0.44368849471510147,
"grad_norm": 0.41915127635002136,
"learning_rate": 8e-05,
"loss": 1.7567,
"step": 1826
},
{
"epoch": 0.443931478556676,
"grad_norm": 0.39879941940307617,
"learning_rate": 8e-05,
"loss": 1.6248,
"step": 1827
},
{
"epoch": 0.44417446239825054,
"grad_norm": 0.43807122111320496,
"learning_rate": 8e-05,
"loss": 1.7819,
"step": 1828
},
{
"epoch": 0.44441744623982504,
"grad_norm": 0.4479237496852875,
"learning_rate": 8e-05,
"loss": 1.7599,
"step": 1829
},
{
"epoch": 0.4446604300813996,
"grad_norm": 0.4309643805027008,
"learning_rate": 8e-05,
"loss": 1.7599,
"step": 1830
},
{
"epoch": 0.4449034139229741,
"grad_norm": 0.426151841878891,
"learning_rate": 8e-05,
"loss": 1.7414,
"step": 1831
},
{
"epoch": 0.4451463977645487,
"grad_norm": 0.45159122347831726,
"learning_rate": 8e-05,
"loss": 1.8247,
"step": 1832
},
{
"epoch": 0.4453893816061232,
"grad_norm": 0.4200173020362854,
"learning_rate": 8e-05,
"loss": 1.6472,
"step": 1833
},
{
"epoch": 0.44563236544769774,
"grad_norm": 0.45498377084732056,
"learning_rate": 8e-05,
"loss": 1.9266,
"step": 1834
},
{
"epoch": 0.44587534928927225,
"grad_norm": 0.4178752899169922,
"learning_rate": 8e-05,
"loss": 1.7454,
"step": 1835
},
{
"epoch": 0.4461183331308468,
"grad_norm": 0.4127987325191498,
"learning_rate": 8e-05,
"loss": 1.582,
"step": 1836
},
{
"epoch": 0.4463613169724213,
"grad_norm": 0.3950411379337311,
"learning_rate": 8e-05,
"loss": 1.6114,
"step": 1837
},
{
"epoch": 0.4466043008139959,
"grad_norm": 0.4156026840209961,
"learning_rate": 8e-05,
"loss": 1.6781,
"step": 1838
},
{
"epoch": 0.4468472846555704,
"grad_norm": 0.4114047586917877,
"learning_rate": 8e-05,
"loss": 1.7528,
"step": 1839
},
{
"epoch": 0.44709026849714495,
"grad_norm": 0.40903884172439575,
"learning_rate": 8e-05,
"loss": 1.6078,
"step": 1840
},
{
"epoch": 0.44733325233871946,
"grad_norm": 0.4279289245605469,
"learning_rate": 8e-05,
"loss": 1.7351,
"step": 1841
},
{
"epoch": 0.447576236180294,
"grad_norm": 0.4179602861404419,
"learning_rate": 8e-05,
"loss": 1.647,
"step": 1842
},
{
"epoch": 0.4478192200218685,
"grad_norm": 0.4204224646091461,
"learning_rate": 8e-05,
"loss": 1.7088,
"step": 1843
},
{
"epoch": 0.4480622038634431,
"grad_norm": 0.4532315135002136,
"learning_rate": 8e-05,
"loss": 1.6527,
"step": 1844
},
{
"epoch": 0.4483051877050176,
"grad_norm": 0.42938992381095886,
"learning_rate": 8e-05,
"loss": 1.6554,
"step": 1845
},
{
"epoch": 0.44854817154659216,
"grad_norm": 0.5924028158187866,
"learning_rate": 8e-05,
"loss": 1.7198,
"step": 1846
},
{
"epoch": 0.44879115538816666,
"grad_norm": 0.4050762355327606,
"learning_rate": 8e-05,
"loss": 1.5864,
"step": 1847
},
{
"epoch": 0.4490341392297412,
"grad_norm": 0.4267370402812958,
"learning_rate": 8e-05,
"loss": 1.6891,
"step": 1848
},
{
"epoch": 0.44927712307131573,
"grad_norm": 0.4041881263256073,
"learning_rate": 8e-05,
"loss": 1.5839,
"step": 1849
},
{
"epoch": 0.4495201069128903,
"grad_norm": 0.4267805814743042,
"learning_rate": 8e-05,
"loss": 1.7747,
"step": 1850
},
{
"epoch": 0.4497630907544648,
"grad_norm": 0.414093941450119,
"learning_rate": 8e-05,
"loss": 1.5164,
"step": 1851
},
{
"epoch": 0.45000607459603936,
"grad_norm": 0.43252500891685486,
"learning_rate": 8e-05,
"loss": 1.6689,
"step": 1852
},
{
"epoch": 0.4502490584376139,
"grad_norm": 0.42252010107040405,
"learning_rate": 8e-05,
"loss": 1.7299,
"step": 1853
},
{
"epoch": 0.45049204227918843,
"grad_norm": 0.4307325482368469,
"learning_rate": 8e-05,
"loss": 1.7401,
"step": 1854
},
{
"epoch": 0.450735026120763,
"grad_norm": 0.41866233944892883,
"learning_rate": 8e-05,
"loss": 1.5479,
"step": 1855
},
{
"epoch": 0.4509780099623375,
"grad_norm": 0.4151945412158966,
"learning_rate": 8e-05,
"loss": 1.6247,
"step": 1856
},
{
"epoch": 0.45122099380391206,
"grad_norm": 0.4459803104400635,
"learning_rate": 8e-05,
"loss": 1.6037,
"step": 1857
},
{
"epoch": 0.45146397764548657,
"grad_norm": 0.4404464662075043,
"learning_rate": 8e-05,
"loss": 1.8168,
"step": 1858
},
{
"epoch": 0.45170696148706113,
"grad_norm": 0.43660715222358704,
"learning_rate": 8e-05,
"loss": 1.691,
"step": 1859
},
{
"epoch": 0.45194994532863564,
"grad_norm": 0.4186517000198364,
"learning_rate": 8e-05,
"loss": 1.6794,
"step": 1860
},
{
"epoch": 0.4521929291702102,
"grad_norm": 0.46955999732017517,
"learning_rate": 8e-05,
"loss": 1.6914,
"step": 1861
},
{
"epoch": 0.4524359130117847,
"grad_norm": 0.3999137878417969,
"learning_rate": 8e-05,
"loss": 1.5466,
"step": 1862
},
{
"epoch": 0.45267889685335927,
"grad_norm": 0.39286574721336365,
"learning_rate": 8e-05,
"loss": 1.6017,
"step": 1863
},
{
"epoch": 0.4529218806949338,
"grad_norm": 0.4328494071960449,
"learning_rate": 8e-05,
"loss": 1.7226,
"step": 1864
},
{
"epoch": 0.45316486453650834,
"grad_norm": 0.4277777075767517,
"learning_rate": 8e-05,
"loss": 1.6639,
"step": 1865
},
{
"epoch": 0.45340784837808285,
"grad_norm": 0.4166783392429352,
"learning_rate": 8e-05,
"loss": 1.7208,
"step": 1866
},
{
"epoch": 0.4536508322196574,
"grad_norm": 0.439242959022522,
"learning_rate": 8e-05,
"loss": 1.6635,
"step": 1867
},
{
"epoch": 0.4538938160612319,
"grad_norm": 0.40764227509498596,
"learning_rate": 8e-05,
"loss": 1.6346,
"step": 1868
},
{
"epoch": 0.4541367999028065,
"grad_norm": 0.4133087992668152,
"learning_rate": 8e-05,
"loss": 1.7251,
"step": 1869
},
{
"epoch": 0.454379783744381,
"grad_norm": 0.4220365583896637,
"learning_rate": 8e-05,
"loss": 1.6892,
"step": 1870
},
{
"epoch": 0.45462276758595555,
"grad_norm": 0.4321078360080719,
"learning_rate": 8e-05,
"loss": 1.8009,
"step": 1871
},
{
"epoch": 0.45486575142753005,
"grad_norm": 0.43721655011177063,
"learning_rate": 8e-05,
"loss": 1.7187,
"step": 1872
},
{
"epoch": 0.4551087352691046,
"grad_norm": 0.43518969416618347,
"learning_rate": 8e-05,
"loss": 1.8886,
"step": 1873
},
{
"epoch": 0.4553517191106791,
"grad_norm": 0.4107803702354431,
"learning_rate": 8e-05,
"loss": 1.7483,
"step": 1874
},
{
"epoch": 0.4555947029522537,
"grad_norm": 0.4013316333293915,
"learning_rate": 8e-05,
"loss": 1.6816,
"step": 1875
},
{
"epoch": 0.4558376867938282,
"grad_norm": 0.41884687542915344,
"learning_rate": 8e-05,
"loss": 1.7584,
"step": 1876
},
{
"epoch": 0.45608067063540275,
"grad_norm": 0.4207857549190521,
"learning_rate": 8e-05,
"loss": 1.8294,
"step": 1877
},
{
"epoch": 0.45632365447697726,
"grad_norm": 0.4228103458881378,
"learning_rate": 8e-05,
"loss": 1.6824,
"step": 1878
},
{
"epoch": 0.4565666383185518,
"grad_norm": 0.4231003522872925,
"learning_rate": 8e-05,
"loss": 1.7698,
"step": 1879
},
{
"epoch": 0.4568096221601263,
"grad_norm": 0.45048895478248596,
"learning_rate": 8e-05,
"loss": 1.6999,
"step": 1880
},
{
"epoch": 0.4570526060017009,
"grad_norm": 0.4198697507381439,
"learning_rate": 8e-05,
"loss": 1.7461,
"step": 1881
},
{
"epoch": 0.4572955898432754,
"grad_norm": 0.4145090878009796,
"learning_rate": 8e-05,
"loss": 1.5858,
"step": 1882
},
{
"epoch": 0.45753857368484996,
"grad_norm": 0.442281574010849,
"learning_rate": 8e-05,
"loss": 1.6529,
"step": 1883
},
{
"epoch": 0.45778155752642447,
"grad_norm": 0.41116663813591003,
"learning_rate": 8e-05,
"loss": 1.6314,
"step": 1884
},
{
"epoch": 0.458024541367999,
"grad_norm": 0.42355984449386597,
"learning_rate": 8e-05,
"loss": 1.6578,
"step": 1885
},
{
"epoch": 0.4582675252095736,
"grad_norm": 0.44513821601867676,
"learning_rate": 8e-05,
"loss": 1.5976,
"step": 1886
},
{
"epoch": 0.4585105090511481,
"grad_norm": 0.47635918855667114,
"learning_rate": 8e-05,
"loss": 1.6657,
"step": 1887
},
{
"epoch": 0.45875349289272266,
"grad_norm": 0.4152633845806122,
"learning_rate": 8e-05,
"loss": 1.6731,
"step": 1888
},
{
"epoch": 0.45899647673429717,
"grad_norm": 0.5451321601867676,
"learning_rate": 8e-05,
"loss": 1.7647,
"step": 1889
},
{
"epoch": 0.4592394605758717,
"grad_norm": 0.4499322772026062,
"learning_rate": 8e-05,
"loss": 1.6998,
"step": 1890
},
{
"epoch": 0.45948244441744623,
"grad_norm": 0.42485854029655457,
"learning_rate": 8e-05,
"loss": 1.7115,
"step": 1891
},
{
"epoch": 0.4597254282590208,
"grad_norm": 0.4471605122089386,
"learning_rate": 8e-05,
"loss": 1.699,
"step": 1892
},
{
"epoch": 0.4599684121005953,
"grad_norm": 0.43966439366340637,
"learning_rate": 8e-05,
"loss": 1.6775,
"step": 1893
},
{
"epoch": 0.46021139594216987,
"grad_norm": 0.4498927593231201,
"learning_rate": 8e-05,
"loss": 1.7707,
"step": 1894
},
{
"epoch": 0.46045437978374437,
"grad_norm": 0.4585667550563812,
"learning_rate": 8e-05,
"loss": 1.6981,
"step": 1895
},
{
"epoch": 0.46069736362531893,
"grad_norm": 0.4273296296596527,
"learning_rate": 8e-05,
"loss": 1.7448,
"step": 1896
},
{
"epoch": 0.46094034746689344,
"grad_norm": 0.42229199409484863,
"learning_rate": 8e-05,
"loss": 1.6637,
"step": 1897
},
{
"epoch": 0.461183331308468,
"grad_norm": 0.4054836928844452,
"learning_rate": 8e-05,
"loss": 1.641,
"step": 1898
},
{
"epoch": 0.4614263151500425,
"grad_norm": 0.39605993032455444,
"learning_rate": 8e-05,
"loss": 1.5741,
"step": 1899
},
{
"epoch": 0.46166929899161707,
"grad_norm": 0.41373077034950256,
"learning_rate": 8e-05,
"loss": 1.5628,
"step": 1900
},
{
"epoch": 0.4619122828331916,
"grad_norm": 0.4387223720550537,
"learning_rate": 8e-05,
"loss": 1.7039,
"step": 1901
},
{
"epoch": 0.46215526667476614,
"grad_norm": 0.40724092721939087,
"learning_rate": 8e-05,
"loss": 1.5232,
"step": 1902
},
{
"epoch": 0.46239825051634065,
"grad_norm": 0.4248490631580353,
"learning_rate": 8e-05,
"loss": 1.7604,
"step": 1903
},
{
"epoch": 0.4626412343579152,
"grad_norm": 0.4261873960494995,
"learning_rate": 8e-05,
"loss": 1.6812,
"step": 1904
},
{
"epoch": 0.4628842181994897,
"grad_norm": 0.405947208404541,
"learning_rate": 8e-05,
"loss": 1.6778,
"step": 1905
},
{
"epoch": 0.4631272020410643,
"grad_norm": 0.43242183327674866,
"learning_rate": 8e-05,
"loss": 1.7289,
"step": 1906
},
{
"epoch": 0.4633701858826388,
"grad_norm": 0.42454537749290466,
"learning_rate": 8e-05,
"loss": 1.7662,
"step": 1907
},
{
"epoch": 0.46361316972421335,
"grad_norm": 0.43956276774406433,
"learning_rate": 8e-05,
"loss": 1.7926,
"step": 1908
},
{
"epoch": 0.46385615356578785,
"grad_norm": 0.4338242709636688,
"learning_rate": 8e-05,
"loss": 1.8531,
"step": 1909
},
{
"epoch": 0.4640991374073624,
"grad_norm": 0.40287795662879944,
"learning_rate": 8e-05,
"loss": 1.7194,
"step": 1910
},
{
"epoch": 0.4643421212489369,
"grad_norm": 0.40767234563827515,
"learning_rate": 8e-05,
"loss": 1.6324,
"step": 1911
},
{
"epoch": 0.4645851050905115,
"grad_norm": 0.4035324156284332,
"learning_rate": 8e-05,
"loss": 1.6616,
"step": 1912
},
{
"epoch": 0.464828088932086,
"grad_norm": 0.43699395656585693,
"learning_rate": 8e-05,
"loss": 1.7027,
"step": 1913
},
{
"epoch": 0.46507107277366055,
"grad_norm": 0.41658124327659607,
"learning_rate": 8e-05,
"loss": 1.6999,
"step": 1914
},
{
"epoch": 0.46531405661523506,
"grad_norm": 0.41529208421707153,
"learning_rate": 8e-05,
"loss": 1.7192,
"step": 1915
},
{
"epoch": 0.4655570404568096,
"grad_norm": 0.4511304795742035,
"learning_rate": 8e-05,
"loss": 1.8472,
"step": 1916
},
{
"epoch": 0.46580002429838413,
"grad_norm": 0.470687597990036,
"learning_rate": 8e-05,
"loss": 1.9462,
"step": 1917
},
{
"epoch": 0.4660430081399587,
"grad_norm": 0.41925495862960815,
"learning_rate": 8e-05,
"loss": 1.6032,
"step": 1918
},
{
"epoch": 0.46628599198153325,
"grad_norm": 0.41691452264785767,
"learning_rate": 8e-05,
"loss": 1.5893,
"step": 1919
},
{
"epoch": 0.46652897582310776,
"grad_norm": 0.44769784808158875,
"learning_rate": 8e-05,
"loss": 1.6974,
"step": 1920
},
{
"epoch": 0.4667719596646823,
"grad_norm": 0.45075252652168274,
"learning_rate": 8e-05,
"loss": 1.9332,
"step": 1921
},
{
"epoch": 0.46701494350625683,
"grad_norm": 0.42499783635139465,
"learning_rate": 8e-05,
"loss": 1.6581,
"step": 1922
},
{
"epoch": 0.4672579273478314,
"grad_norm": 0.4107694923877716,
"learning_rate": 8e-05,
"loss": 1.7142,
"step": 1923
},
{
"epoch": 0.4675009111894059,
"grad_norm": 0.386578232049942,
"learning_rate": 8e-05,
"loss": 1.5295,
"step": 1924
},
{
"epoch": 0.46774389503098046,
"grad_norm": 0.39867669343948364,
"learning_rate": 8e-05,
"loss": 1.5795,
"step": 1925
},
{
"epoch": 0.46798687887255497,
"grad_norm": 0.43957293033599854,
"learning_rate": 8e-05,
"loss": 1.7702,
"step": 1926
},
{
"epoch": 0.46822986271412953,
"grad_norm": 0.4263380169868469,
"learning_rate": 8e-05,
"loss": 1.5876,
"step": 1927
},
{
"epoch": 0.46847284655570404,
"grad_norm": 0.4032500982284546,
"learning_rate": 8e-05,
"loss": 1.5244,
"step": 1928
},
{
"epoch": 0.4687158303972786,
"grad_norm": 0.42151403427124023,
"learning_rate": 8e-05,
"loss": 1.7173,
"step": 1929
},
{
"epoch": 0.4689588142388531,
"grad_norm": 0.4025190770626068,
"learning_rate": 8e-05,
"loss": 1.6149,
"step": 1930
},
{
"epoch": 0.46920179808042767,
"grad_norm": 0.4104991853237152,
"learning_rate": 8e-05,
"loss": 1.6278,
"step": 1931
},
{
"epoch": 0.4694447819220022,
"grad_norm": 0.41498640179634094,
"learning_rate": 8e-05,
"loss": 1.6384,
"step": 1932
},
{
"epoch": 0.46968776576357674,
"grad_norm": 0.426425576210022,
"learning_rate": 8e-05,
"loss": 1.7485,
"step": 1933
},
{
"epoch": 0.46993074960515124,
"grad_norm": 0.4150788187980652,
"learning_rate": 8e-05,
"loss": 1.6396,
"step": 1934
},
{
"epoch": 0.4701737334467258,
"grad_norm": 0.3989652693271637,
"learning_rate": 8e-05,
"loss": 1.5937,
"step": 1935
},
{
"epoch": 0.4704167172883003,
"grad_norm": 0.42093151807785034,
"learning_rate": 8e-05,
"loss": 1.6827,
"step": 1936
},
{
"epoch": 0.4706597011298749,
"grad_norm": 0.4280698299407959,
"learning_rate": 8e-05,
"loss": 1.6352,
"step": 1937
},
{
"epoch": 0.4709026849714494,
"grad_norm": 0.4426701068878174,
"learning_rate": 8e-05,
"loss": 1.8398,
"step": 1938
},
{
"epoch": 0.47114566881302394,
"grad_norm": 0.4073024392127991,
"learning_rate": 8e-05,
"loss": 1.5365,
"step": 1939
},
{
"epoch": 0.47138865265459845,
"grad_norm": 0.4276946783065796,
"learning_rate": 8e-05,
"loss": 1.7999,
"step": 1940
},
{
"epoch": 0.471631636496173,
"grad_norm": 0.3996224105358124,
"learning_rate": 8e-05,
"loss": 1.4843,
"step": 1941
},
{
"epoch": 0.4718746203377475,
"grad_norm": 0.4300505220890045,
"learning_rate": 8e-05,
"loss": 1.8833,
"step": 1942
},
{
"epoch": 0.4721176041793221,
"grad_norm": 0.3979191482067108,
"learning_rate": 8e-05,
"loss": 1.5753,
"step": 1943
},
{
"epoch": 0.4723605880208966,
"grad_norm": 0.44216442108154297,
"learning_rate": 8e-05,
"loss": 1.6425,
"step": 1944
},
{
"epoch": 0.47260357186247115,
"grad_norm": 0.4506628215312958,
"learning_rate": 8e-05,
"loss": 1.8527,
"step": 1945
},
{
"epoch": 0.47284655570404566,
"grad_norm": 0.42636045813560486,
"learning_rate": 8e-05,
"loss": 1.6937,
"step": 1946
},
{
"epoch": 0.4730895395456202,
"grad_norm": 0.44184669852256775,
"learning_rate": 8e-05,
"loss": 1.8348,
"step": 1947
},
{
"epoch": 0.4733325233871947,
"grad_norm": 0.4105511009693146,
"learning_rate": 8e-05,
"loss": 1.5969,
"step": 1948
},
{
"epoch": 0.4735755072287693,
"grad_norm": 0.449083149433136,
"learning_rate": 8e-05,
"loss": 1.7385,
"step": 1949
},
{
"epoch": 0.4738184910703438,
"grad_norm": 0.43708378076553345,
"learning_rate": 8e-05,
"loss": 1.7683,
"step": 1950
},
{
"epoch": 0.47406147491191836,
"grad_norm": 0.5191806554794312,
"learning_rate": 8e-05,
"loss": 1.6373,
"step": 1951
},
{
"epoch": 0.4743044587534929,
"grad_norm": 0.42893365025520325,
"learning_rate": 8e-05,
"loss": 1.605,
"step": 1952
},
{
"epoch": 0.4745474425950674,
"grad_norm": 0.5133644342422485,
"learning_rate": 8e-05,
"loss": 1.6852,
"step": 1953
},
{
"epoch": 0.474790426436642,
"grad_norm": 0.42633509635925293,
"learning_rate": 8e-05,
"loss": 1.6697,
"step": 1954
},
{
"epoch": 0.4750334102782165,
"grad_norm": 0.4043295383453369,
"learning_rate": 8e-05,
"loss": 1.5707,
"step": 1955
},
{
"epoch": 0.47527639411979106,
"grad_norm": 0.4424762725830078,
"learning_rate": 8e-05,
"loss": 1.7666,
"step": 1956
},
{
"epoch": 0.47551937796136556,
"grad_norm": 0.4177377223968506,
"learning_rate": 8e-05,
"loss": 1.6077,
"step": 1957
},
{
"epoch": 0.4757623618029401,
"grad_norm": 0.4313500225543976,
"learning_rate": 8e-05,
"loss": 1.7924,
"step": 1958
},
{
"epoch": 0.47600534564451463,
"grad_norm": 0.4116400182247162,
"learning_rate": 8e-05,
"loss": 1.6701,
"step": 1959
},
{
"epoch": 0.4762483294860892,
"grad_norm": 0.43881407380104065,
"learning_rate": 8e-05,
"loss": 1.8674,
"step": 1960
},
{
"epoch": 0.4764913133276637,
"grad_norm": 0.4389816224575043,
"learning_rate": 8e-05,
"loss": 1.7691,
"step": 1961
},
{
"epoch": 0.47673429716923826,
"grad_norm": 0.41141510009765625,
"learning_rate": 8e-05,
"loss": 1.7051,
"step": 1962
},
{
"epoch": 0.47697728101081277,
"grad_norm": 0.45347926020622253,
"learning_rate": 8e-05,
"loss": 1.7938,
"step": 1963
},
{
"epoch": 0.47722026485238733,
"grad_norm": 0.44667941331863403,
"learning_rate": 8e-05,
"loss": 1.6149,
"step": 1964
},
{
"epoch": 0.47746324869396184,
"grad_norm": 0.427518367767334,
"learning_rate": 8e-05,
"loss": 1.6841,
"step": 1965
},
{
"epoch": 0.4777062325355364,
"grad_norm": 0.4317607283592224,
"learning_rate": 8e-05,
"loss": 1.7672,
"step": 1966
},
{
"epoch": 0.4779492163771109,
"grad_norm": 0.46663787961006165,
"learning_rate": 8e-05,
"loss": 1.7708,
"step": 1967
},
{
"epoch": 0.47819220021868547,
"grad_norm": 0.44258251786231995,
"learning_rate": 8e-05,
"loss": 1.7246,
"step": 1968
},
{
"epoch": 0.47843518406026,
"grad_norm": 0.4117269814014435,
"learning_rate": 8e-05,
"loss": 1.5849,
"step": 1969
},
{
"epoch": 0.47867816790183454,
"grad_norm": 0.4820593595504761,
"learning_rate": 8e-05,
"loss": 1.7651,
"step": 1970
},
{
"epoch": 0.47892115174340905,
"grad_norm": 0.4440200626850128,
"learning_rate": 8e-05,
"loss": 1.7023,
"step": 1971
},
{
"epoch": 0.4791641355849836,
"grad_norm": 0.4371243715286255,
"learning_rate": 8e-05,
"loss": 1.6899,
"step": 1972
},
{
"epoch": 0.4794071194265581,
"grad_norm": 0.4171777069568634,
"learning_rate": 8e-05,
"loss": 1.5713,
"step": 1973
},
{
"epoch": 0.4796501032681327,
"grad_norm": 0.4147702753543854,
"learning_rate": 8e-05,
"loss": 1.6902,
"step": 1974
},
{
"epoch": 0.4798930871097072,
"grad_norm": 0.4282766878604889,
"learning_rate": 8e-05,
"loss": 1.6509,
"step": 1975
},
{
"epoch": 0.48013607095128175,
"grad_norm": 0.4245712459087372,
"learning_rate": 8e-05,
"loss": 1.6241,
"step": 1976
},
{
"epoch": 0.48037905479285625,
"grad_norm": 0.427929550409317,
"learning_rate": 8e-05,
"loss": 1.7021,
"step": 1977
},
{
"epoch": 0.4806220386344308,
"grad_norm": 0.4307423532009125,
"learning_rate": 8e-05,
"loss": 1.6829,
"step": 1978
},
{
"epoch": 0.4808650224760053,
"grad_norm": 0.41224634647369385,
"learning_rate": 8e-05,
"loss": 1.7136,
"step": 1979
},
{
"epoch": 0.4811080063175799,
"grad_norm": 0.4373134970664978,
"learning_rate": 8e-05,
"loss": 1.8249,
"step": 1980
},
{
"epoch": 0.4813509901591544,
"grad_norm": 0.41638660430908203,
"learning_rate": 8e-05,
"loss": 1.697,
"step": 1981
},
{
"epoch": 0.48159397400072895,
"grad_norm": 0.40082883834838867,
"learning_rate": 8e-05,
"loss": 1.5816,
"step": 1982
},
{
"epoch": 0.48183695784230346,
"grad_norm": 0.4369618594646454,
"learning_rate": 8e-05,
"loss": 1.763,
"step": 1983
},
{
"epoch": 0.482079941683878,
"grad_norm": 0.4087012708187103,
"learning_rate": 8e-05,
"loss": 1.5982,
"step": 1984
},
{
"epoch": 0.4823229255254526,
"grad_norm": 0.408522367477417,
"learning_rate": 8e-05,
"loss": 1.6309,
"step": 1985
},
{
"epoch": 0.4825659093670271,
"grad_norm": 0.44090622663497925,
"learning_rate": 8e-05,
"loss": 1.761,
"step": 1986
},
{
"epoch": 0.48280889320860165,
"grad_norm": 0.4151834547519684,
"learning_rate": 8e-05,
"loss": 1.7185,
"step": 1987
},
{
"epoch": 0.48305187705017616,
"grad_norm": 0.44949987530708313,
"learning_rate": 8e-05,
"loss": 1.8235,
"step": 1988
},
{
"epoch": 0.4832948608917507,
"grad_norm": 0.42424458265304565,
"learning_rate": 8e-05,
"loss": 1.615,
"step": 1989
},
{
"epoch": 0.48353784473332523,
"grad_norm": 0.45849746465682983,
"learning_rate": 8e-05,
"loss": 1.8579,
"step": 1990
},
{
"epoch": 0.4837808285748998,
"grad_norm": 0.4297718405723572,
"learning_rate": 8e-05,
"loss": 1.6901,
"step": 1991
},
{
"epoch": 0.4840238124164743,
"grad_norm": 0.4386619031429291,
"learning_rate": 8e-05,
"loss": 1.659,
"step": 1992
},
{
"epoch": 0.48426679625804886,
"grad_norm": 0.4270291030406952,
"learning_rate": 8e-05,
"loss": 1.7944,
"step": 1993
},
{
"epoch": 0.48450978009962337,
"grad_norm": 0.4325200319290161,
"learning_rate": 8e-05,
"loss": 1.5759,
"step": 1994
},
{
"epoch": 0.48475276394119793,
"grad_norm": 0.43285664916038513,
"learning_rate": 8e-05,
"loss": 1.6821,
"step": 1995
},
{
"epoch": 0.48499574778277244,
"grad_norm": 0.4182582199573517,
"learning_rate": 8e-05,
"loss": 1.6951,
"step": 1996
},
{
"epoch": 0.485238731624347,
"grad_norm": 0.4355359673500061,
"learning_rate": 8e-05,
"loss": 1.7369,
"step": 1997
},
{
"epoch": 0.4854817154659215,
"grad_norm": 0.4323694705963135,
"learning_rate": 8e-05,
"loss": 1.7227,
"step": 1998
},
{
"epoch": 0.48572469930749607,
"grad_norm": 0.40371614694595337,
"learning_rate": 8e-05,
"loss": 1.7561,
"step": 1999
},
{
"epoch": 0.4859676831490706,
"grad_norm": 0.4079720675945282,
"learning_rate": 8e-05,
"loss": 1.6815,
"step": 2000
}
],
"logging_steps": 1,
"max_steps": 4115,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 3.81251563290624e+18,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}