pick_brush_test / trainer_state.json
Dongkkka's picture
Upload folder using huggingface_hub
3546ad0 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 13.166556945358789,
"eval_steps": 500,
"global_step": 20000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0065832784726793945,
"grad_norm": 6.109508037567139,
"learning_rate": 1.8e-07,
"loss": 0.8093,
"step": 10
},
{
"epoch": 0.013166556945358789,
"grad_norm": 5.816650390625,
"learning_rate": 3.8e-07,
"loss": 0.814,
"step": 20
},
{
"epoch": 0.019749835418038184,
"grad_norm": 5.560344219207764,
"learning_rate": 5.8e-07,
"loss": 0.8663,
"step": 30
},
{
"epoch": 0.026333113890717578,
"grad_norm": 5.82420539855957,
"learning_rate": 7.8e-07,
"loss": 0.8012,
"step": 40
},
{
"epoch": 0.032916392363396975,
"grad_norm": 5.164933681488037,
"learning_rate": 9.8e-07,
"loss": 0.7202,
"step": 50
},
{
"epoch": 0.03949967083607637,
"grad_norm": 3.8185598850250244,
"learning_rate": 1.18e-06,
"loss": 0.644,
"step": 60
},
{
"epoch": 0.04608294930875576,
"grad_norm": 1.9919044971466064,
"learning_rate": 1.3800000000000001e-06,
"loss": 0.444,
"step": 70
},
{
"epoch": 0.052666227781435156,
"grad_norm": 2.2428810596466064,
"learning_rate": 1.5800000000000003e-06,
"loss": 0.4627,
"step": 80
},
{
"epoch": 0.05924950625411455,
"grad_norm": 1.7347915172576904,
"learning_rate": 1.7800000000000001e-06,
"loss": 0.3394,
"step": 90
},
{
"epoch": 0.06583278472679395,
"grad_norm": 1.4494456052780151,
"learning_rate": 1.98e-06,
"loss": 0.2937,
"step": 100
},
{
"epoch": 0.07241606319947334,
"grad_norm": 0.8749158382415771,
"learning_rate": 2.1800000000000003e-06,
"loss": 0.2514,
"step": 110
},
{
"epoch": 0.07899934167215274,
"grad_norm": 1.3043384552001953,
"learning_rate": 2.38e-06,
"loss": 0.2434,
"step": 120
},
{
"epoch": 0.08558262014483213,
"grad_norm": 0.9992497563362122,
"learning_rate": 2.5800000000000003e-06,
"loss": 0.2177,
"step": 130
},
{
"epoch": 0.09216589861751152,
"grad_norm": 1.2492196559906006,
"learning_rate": 2.78e-06,
"loss": 0.1994,
"step": 140
},
{
"epoch": 0.09874917709019092,
"grad_norm": 1.3398922681808472,
"learning_rate": 2.9800000000000003e-06,
"loss": 0.2074,
"step": 150
},
{
"epoch": 0.10533245556287031,
"grad_norm": 1.106767177581787,
"learning_rate": 3.1800000000000005e-06,
"loss": 0.1774,
"step": 160
},
{
"epoch": 0.1119157340355497,
"grad_norm": 1.0735636949539185,
"learning_rate": 3.38e-06,
"loss": 0.1705,
"step": 170
},
{
"epoch": 0.1184990125082291,
"grad_norm": 1.557204008102417,
"learning_rate": 3.58e-06,
"loss": 0.1699,
"step": 180
},
{
"epoch": 0.1250822909809085,
"grad_norm": 1.0118534564971924,
"learning_rate": 3.7800000000000002e-06,
"loss": 0.1689,
"step": 190
},
{
"epoch": 0.1316655694535879,
"grad_norm": 1.1536340713500977,
"learning_rate": 3.98e-06,
"loss": 0.1582,
"step": 200
},
{
"epoch": 0.1382488479262673,
"grad_norm": 1.0887510776519775,
"learning_rate": 4.18e-06,
"loss": 0.1487,
"step": 210
},
{
"epoch": 0.1448321263989467,
"grad_norm": 0.8139439225196838,
"learning_rate": 4.38e-06,
"loss": 0.149,
"step": 220
},
{
"epoch": 0.15141540487162608,
"grad_norm": 1.3026080131530762,
"learning_rate": 4.58e-06,
"loss": 0.1345,
"step": 230
},
{
"epoch": 0.15799868334430547,
"grad_norm": 1.1991429328918457,
"learning_rate": 4.780000000000001e-06,
"loss": 0.1376,
"step": 240
},
{
"epoch": 0.16458196181698487,
"grad_norm": 1.412582278251648,
"learning_rate": 4.98e-06,
"loss": 0.1305,
"step": 250
},
{
"epoch": 0.17116524028966426,
"grad_norm": 1.7994115352630615,
"learning_rate": 5.18e-06,
"loss": 0.1232,
"step": 260
},
{
"epoch": 0.17774851876234365,
"grad_norm": 1.389769196510315,
"learning_rate": 5.38e-06,
"loss": 0.1157,
"step": 270
},
{
"epoch": 0.18433179723502305,
"grad_norm": 0.8111817836761475,
"learning_rate": 5.580000000000001e-06,
"loss": 0.1206,
"step": 280
},
{
"epoch": 0.19091507570770244,
"grad_norm": 1.2347253561019897,
"learning_rate": 5.78e-06,
"loss": 0.0954,
"step": 290
},
{
"epoch": 0.19749835418038184,
"grad_norm": 0.7645024061203003,
"learning_rate": 5.98e-06,
"loss": 0.1003,
"step": 300
},
{
"epoch": 0.20408163265306123,
"grad_norm": 1.3699753284454346,
"learning_rate": 6.18e-06,
"loss": 0.1052,
"step": 310
},
{
"epoch": 0.21066491112574062,
"grad_norm": 1.1985740661621094,
"learning_rate": 6.38e-06,
"loss": 0.0862,
"step": 320
},
{
"epoch": 0.21724818959842002,
"grad_norm": 0.9838375449180603,
"learning_rate": 6.58e-06,
"loss": 0.0932,
"step": 330
},
{
"epoch": 0.2238314680710994,
"grad_norm": 0.9791699647903442,
"learning_rate": 6.78e-06,
"loss": 0.0816,
"step": 340
},
{
"epoch": 0.2304147465437788,
"grad_norm": 0.82407546043396,
"learning_rate": 6.98e-06,
"loss": 0.0762,
"step": 350
},
{
"epoch": 0.2369980250164582,
"grad_norm": 0.9801005721092224,
"learning_rate": 7.180000000000001e-06,
"loss": 0.0801,
"step": 360
},
{
"epoch": 0.2435813034891376,
"grad_norm": 0.6593508124351501,
"learning_rate": 7.3800000000000005e-06,
"loss": 0.0662,
"step": 370
},
{
"epoch": 0.250164581961817,
"grad_norm": 0.5676178932189941,
"learning_rate": 7.580000000000001e-06,
"loss": 0.0687,
"step": 380
},
{
"epoch": 0.2567478604344964,
"grad_norm": 0.8020779490470886,
"learning_rate": 7.78e-06,
"loss": 0.0737,
"step": 390
},
{
"epoch": 0.2633311389071758,
"grad_norm": 0.8782005310058594,
"learning_rate": 7.98e-06,
"loss": 0.061,
"step": 400
},
{
"epoch": 0.26991441737985516,
"grad_norm": 0.7987250089645386,
"learning_rate": 8.18e-06,
"loss": 0.0643,
"step": 410
},
{
"epoch": 0.2764976958525346,
"grad_norm": 0.8046519160270691,
"learning_rate": 8.380000000000001e-06,
"loss": 0.0758,
"step": 420
},
{
"epoch": 0.28308097432521395,
"grad_norm": 0.7970651984214783,
"learning_rate": 8.580000000000001e-06,
"loss": 0.066,
"step": 430
},
{
"epoch": 0.2896642527978934,
"grad_norm": 0.6445233225822449,
"learning_rate": 8.78e-06,
"loss": 0.0565,
"step": 440
},
{
"epoch": 0.29624753127057274,
"grad_norm": 0.9595804214477539,
"learning_rate": 8.98e-06,
"loss": 0.0611,
"step": 450
},
{
"epoch": 0.30283080974325216,
"grad_norm": 0.8208001852035522,
"learning_rate": 9.180000000000002e-06,
"loss": 0.0542,
"step": 460
},
{
"epoch": 0.3094140882159315,
"grad_norm": 0.819534957408905,
"learning_rate": 9.38e-06,
"loss": 0.0599,
"step": 470
},
{
"epoch": 0.31599736668861095,
"grad_norm": 0.6280789971351624,
"learning_rate": 9.58e-06,
"loss": 0.049,
"step": 480
},
{
"epoch": 0.3225806451612903,
"grad_norm": 0.9189261794090271,
"learning_rate": 9.78e-06,
"loss": 0.0547,
"step": 490
},
{
"epoch": 0.32916392363396973,
"grad_norm": 0.7780017256736755,
"learning_rate": 9.980000000000001e-06,
"loss": 0.0441,
"step": 500
},
{
"epoch": 0.3357472021066491,
"grad_norm": 0.9516146779060364,
"learning_rate": 1.018e-05,
"loss": 0.0518,
"step": 510
},
{
"epoch": 0.3423304805793285,
"grad_norm": 0.6099593639373779,
"learning_rate": 1.038e-05,
"loss": 0.0411,
"step": 520
},
{
"epoch": 0.3489137590520079,
"grad_norm": 0.8724141716957092,
"learning_rate": 1.058e-05,
"loss": 0.0495,
"step": 530
},
{
"epoch": 0.3554970375246873,
"grad_norm": 0.9118814468383789,
"learning_rate": 1.0780000000000002e-05,
"loss": 0.0652,
"step": 540
},
{
"epoch": 0.3620803159973667,
"grad_norm": 0.7452401518821716,
"learning_rate": 1.098e-05,
"loss": 0.0454,
"step": 550
},
{
"epoch": 0.3686635944700461,
"grad_norm": 0.5637426376342773,
"learning_rate": 1.118e-05,
"loss": 0.0495,
"step": 560
},
{
"epoch": 0.37524687294272546,
"grad_norm": 0.8102228045463562,
"learning_rate": 1.1380000000000001e-05,
"loss": 0.0578,
"step": 570
},
{
"epoch": 0.3818301514154049,
"grad_norm": 0.6218069791793823,
"learning_rate": 1.1580000000000001e-05,
"loss": 0.0488,
"step": 580
},
{
"epoch": 0.38841342988808425,
"grad_norm": 0.8140692114830017,
"learning_rate": 1.178e-05,
"loss": 0.0421,
"step": 590
},
{
"epoch": 0.39499670836076367,
"grad_norm": 0.6461628079414368,
"learning_rate": 1.198e-05,
"loss": 0.049,
"step": 600
},
{
"epoch": 0.40157998683344304,
"grad_norm": 0.7383702397346497,
"learning_rate": 1.2180000000000002e-05,
"loss": 0.0506,
"step": 610
},
{
"epoch": 0.40816326530612246,
"grad_norm": 0.7723643779754639,
"learning_rate": 1.238e-05,
"loss": 0.0469,
"step": 620
},
{
"epoch": 0.4147465437788018,
"grad_norm": 0.7685538530349731,
"learning_rate": 1.258e-05,
"loss": 0.0473,
"step": 630
},
{
"epoch": 0.42132982225148125,
"grad_norm": 0.8282983303070068,
"learning_rate": 1.278e-05,
"loss": 0.0376,
"step": 640
},
{
"epoch": 0.4279131007241606,
"grad_norm": 0.6809481382369995,
"learning_rate": 1.2980000000000001e-05,
"loss": 0.0442,
"step": 650
},
{
"epoch": 0.43449637919684003,
"grad_norm": 0.6351516842842102,
"learning_rate": 1.3180000000000001e-05,
"loss": 0.0429,
"step": 660
},
{
"epoch": 0.4410796576695194,
"grad_norm": 0.6493868827819824,
"learning_rate": 1.338e-05,
"loss": 0.0339,
"step": 670
},
{
"epoch": 0.4476629361421988,
"grad_norm": 0.67973792552948,
"learning_rate": 1.358e-05,
"loss": 0.0481,
"step": 680
},
{
"epoch": 0.4542462146148782,
"grad_norm": 0.6788127422332764,
"learning_rate": 1.3780000000000002e-05,
"loss": 0.0327,
"step": 690
},
{
"epoch": 0.4608294930875576,
"grad_norm": 0.6731234192848206,
"learning_rate": 1.3980000000000002e-05,
"loss": 0.0363,
"step": 700
},
{
"epoch": 0.467412771560237,
"grad_norm": 0.884258508682251,
"learning_rate": 1.4180000000000001e-05,
"loss": 0.0383,
"step": 710
},
{
"epoch": 0.4739960500329164,
"grad_norm": 0.8611034154891968,
"learning_rate": 1.4380000000000001e-05,
"loss": 0.0347,
"step": 720
},
{
"epoch": 0.48057932850559576,
"grad_norm": 0.7222265005111694,
"learning_rate": 1.4580000000000003e-05,
"loss": 0.041,
"step": 730
},
{
"epoch": 0.4871626069782752,
"grad_norm": 0.6114519238471985,
"learning_rate": 1.4779999999999999e-05,
"loss": 0.0374,
"step": 740
},
{
"epoch": 0.4937458854509546,
"grad_norm": 0.6579416990280151,
"learning_rate": 1.4979999999999999e-05,
"loss": 0.035,
"step": 750
},
{
"epoch": 0.500329163923634,
"grad_norm": 0.8596507906913757,
"learning_rate": 1.518e-05,
"loss": 0.0372,
"step": 760
},
{
"epoch": 0.5069124423963134,
"grad_norm": 0.7170683145523071,
"learning_rate": 1.538e-05,
"loss": 0.0375,
"step": 770
},
{
"epoch": 0.5134957208689928,
"grad_norm": 0.639025092124939,
"learning_rate": 1.558e-05,
"loss": 0.0381,
"step": 780
},
{
"epoch": 0.5200789993416721,
"grad_norm": 1.0290495157241821,
"learning_rate": 1.578e-05,
"loss": 0.0299,
"step": 790
},
{
"epoch": 0.5266622778143516,
"grad_norm": 0.6768186092376709,
"learning_rate": 1.598e-05,
"loss": 0.0388,
"step": 800
},
{
"epoch": 0.533245556287031,
"grad_norm": 0.6404261589050293,
"learning_rate": 1.618e-05,
"loss": 0.0313,
"step": 810
},
{
"epoch": 0.5398288347597103,
"grad_norm": 0.8173214793205261,
"learning_rate": 1.6380000000000002e-05,
"loss": 0.0537,
"step": 820
},
{
"epoch": 0.5464121132323897,
"grad_norm": 0.6500388383865356,
"learning_rate": 1.658e-05,
"loss": 0.0391,
"step": 830
},
{
"epoch": 0.5529953917050692,
"grad_norm": 0.7786275148391724,
"learning_rate": 1.6780000000000002e-05,
"loss": 0.0303,
"step": 840
},
{
"epoch": 0.5595786701777485,
"grad_norm": 0.5436859726905823,
"learning_rate": 1.698e-05,
"loss": 0.0322,
"step": 850
},
{
"epoch": 0.5661619486504279,
"grad_norm": 0.6951898336410522,
"learning_rate": 1.718e-05,
"loss": 0.0398,
"step": 860
},
{
"epoch": 0.5727452271231073,
"grad_norm": 0.7615037560462952,
"learning_rate": 1.7380000000000003e-05,
"loss": 0.0327,
"step": 870
},
{
"epoch": 0.5793285055957867,
"grad_norm": 0.7827300429344177,
"learning_rate": 1.758e-05,
"loss": 0.0367,
"step": 880
},
{
"epoch": 0.5859117840684661,
"grad_norm": 1.0183961391448975,
"learning_rate": 1.7780000000000003e-05,
"loss": 0.0379,
"step": 890
},
{
"epoch": 0.5924950625411455,
"grad_norm": 0.8651902675628662,
"learning_rate": 1.798e-05,
"loss": 0.0338,
"step": 900
},
{
"epoch": 0.5990783410138248,
"grad_norm": 0.7739461660385132,
"learning_rate": 1.818e-05,
"loss": 0.0344,
"step": 910
},
{
"epoch": 0.6056616194865043,
"grad_norm": 0.6005399227142334,
"learning_rate": 1.838e-05,
"loss": 0.0407,
"step": 920
},
{
"epoch": 0.6122448979591837,
"grad_norm": 0.732636570930481,
"learning_rate": 1.858e-05,
"loss": 0.0471,
"step": 930
},
{
"epoch": 0.618828176431863,
"grad_norm": 0.9913577437400818,
"learning_rate": 1.878e-05,
"loss": 0.0308,
"step": 940
},
{
"epoch": 0.6254114549045424,
"grad_norm": 0.6545664072036743,
"learning_rate": 1.898e-05,
"loss": 0.0327,
"step": 950
},
{
"epoch": 0.6319947333772219,
"grad_norm": 0.6586413979530334,
"learning_rate": 1.918e-05,
"loss": 0.0397,
"step": 960
},
{
"epoch": 0.6385780118499013,
"grad_norm": 1.1130317449569702,
"learning_rate": 1.938e-05,
"loss": 0.0321,
"step": 970
},
{
"epoch": 0.6451612903225806,
"grad_norm": 0.636530876159668,
"learning_rate": 1.9580000000000002e-05,
"loss": 0.035,
"step": 980
},
{
"epoch": 0.65174456879526,
"grad_norm": 0.5563338398933411,
"learning_rate": 1.978e-05,
"loss": 0.0292,
"step": 990
},
{
"epoch": 0.6583278472679395,
"grad_norm": 0.924923300743103,
"learning_rate": 1.9980000000000002e-05,
"loss": 0.0304,
"step": 1000
},
{
"epoch": 0.6649111257406188,
"grad_norm": 0.5255535840988159,
"learning_rate": 2.0180000000000003e-05,
"loss": 0.0383,
"step": 1010
},
{
"epoch": 0.6714944042132982,
"grad_norm": 0.5128383636474609,
"learning_rate": 2.038e-05,
"loss": 0.0314,
"step": 1020
},
{
"epoch": 0.6780776826859776,
"grad_norm": 0.7940648794174194,
"learning_rate": 2.0580000000000003e-05,
"loss": 0.0323,
"step": 1030
},
{
"epoch": 0.684660961158657,
"grad_norm": 0.6323310732841492,
"learning_rate": 2.078e-05,
"loss": 0.0275,
"step": 1040
},
{
"epoch": 0.6912442396313364,
"grad_norm": 0.6572607755661011,
"learning_rate": 2.098e-05,
"loss": 0.0299,
"step": 1050
},
{
"epoch": 0.6978275181040158,
"grad_norm": 0.919588029384613,
"learning_rate": 2.118e-05,
"loss": 0.0385,
"step": 1060
},
{
"epoch": 0.7044107965766951,
"grad_norm": 0.4392774999141693,
"learning_rate": 2.138e-05,
"loss": 0.0267,
"step": 1070
},
{
"epoch": 0.7109940750493746,
"grad_norm": 0.6294967532157898,
"learning_rate": 2.158e-05,
"loss": 0.0331,
"step": 1080
},
{
"epoch": 0.717577353522054,
"grad_norm": 0.7768688797950745,
"learning_rate": 2.178e-05,
"loss": 0.0302,
"step": 1090
},
{
"epoch": 0.7241606319947334,
"grad_norm": 0.43419012427330017,
"learning_rate": 2.198e-05,
"loss": 0.0257,
"step": 1100
},
{
"epoch": 0.7307439104674127,
"grad_norm": 0.7664608359336853,
"learning_rate": 2.218e-05,
"loss": 0.0267,
"step": 1110
},
{
"epoch": 0.7373271889400922,
"grad_norm": 0.46237802505493164,
"learning_rate": 2.2380000000000003e-05,
"loss": 0.0265,
"step": 1120
},
{
"epoch": 0.7439104674127716,
"grad_norm": 0.5145699381828308,
"learning_rate": 2.258e-05,
"loss": 0.0309,
"step": 1130
},
{
"epoch": 0.7504937458854509,
"grad_norm": 0.7017679810523987,
"learning_rate": 2.2780000000000002e-05,
"loss": 0.026,
"step": 1140
},
{
"epoch": 0.7570770243581304,
"grad_norm": 0.6170619130134583,
"learning_rate": 2.298e-05,
"loss": 0.0317,
"step": 1150
},
{
"epoch": 0.7636603028308098,
"grad_norm": 0.3856109380722046,
"learning_rate": 2.318e-05,
"loss": 0.0225,
"step": 1160
},
{
"epoch": 0.7702435813034891,
"grad_norm": 0.5162245631217957,
"learning_rate": 2.3380000000000003e-05,
"loss": 0.03,
"step": 1170
},
{
"epoch": 0.7768268597761685,
"grad_norm": 0.6680182218551636,
"learning_rate": 2.358e-05,
"loss": 0.0285,
"step": 1180
},
{
"epoch": 0.783410138248848,
"grad_norm": 0.5039960145950317,
"learning_rate": 2.3780000000000003e-05,
"loss": 0.028,
"step": 1190
},
{
"epoch": 0.7899934167215273,
"grad_norm": 0.5530655980110168,
"learning_rate": 2.398e-05,
"loss": 0.0281,
"step": 1200
},
{
"epoch": 0.7965766951942067,
"grad_norm": 0.5797922015190125,
"learning_rate": 2.418e-05,
"loss": 0.0371,
"step": 1210
},
{
"epoch": 0.8031599736668861,
"grad_norm": 0.8076927065849304,
"learning_rate": 2.438e-05,
"loss": 0.0342,
"step": 1220
},
{
"epoch": 0.8097432521395656,
"grad_norm": 0.5163640975952148,
"learning_rate": 2.4580000000000002e-05,
"loss": 0.027,
"step": 1230
},
{
"epoch": 0.8163265306122449,
"grad_norm": 0.7976269125938416,
"learning_rate": 2.478e-05,
"loss": 0.0342,
"step": 1240
},
{
"epoch": 0.8229098090849243,
"grad_norm": 0.6813076138496399,
"learning_rate": 2.498e-05,
"loss": 0.025,
"step": 1250
},
{
"epoch": 0.8294930875576036,
"grad_norm": 0.49693650007247925,
"learning_rate": 2.5180000000000003e-05,
"loss": 0.0248,
"step": 1260
},
{
"epoch": 0.8360763660302831,
"grad_norm": 0.76878422498703,
"learning_rate": 2.5380000000000004e-05,
"loss": 0.0272,
"step": 1270
},
{
"epoch": 0.8426596445029625,
"grad_norm": 0.6460802555084229,
"learning_rate": 2.5580000000000002e-05,
"loss": 0.0305,
"step": 1280
},
{
"epoch": 0.8492429229756419,
"grad_norm": 0.5432111620903015,
"learning_rate": 2.5779999999999997e-05,
"loss": 0.0283,
"step": 1290
},
{
"epoch": 0.8558262014483212,
"grad_norm": 0.7741048336029053,
"learning_rate": 2.598e-05,
"loss": 0.0343,
"step": 1300
},
{
"epoch": 0.8624094799210007,
"grad_norm": 0.4764859080314636,
"learning_rate": 2.618e-05,
"loss": 0.0263,
"step": 1310
},
{
"epoch": 0.8689927583936801,
"grad_norm": 0.6883785724639893,
"learning_rate": 2.6379999999999998e-05,
"loss": 0.0341,
"step": 1320
},
{
"epoch": 0.8755760368663594,
"grad_norm": 0.7809045910835266,
"learning_rate": 2.658e-05,
"loss": 0.0326,
"step": 1330
},
{
"epoch": 0.8821593153390388,
"grad_norm": 0.6559585332870483,
"learning_rate": 2.678e-05,
"loss": 0.0392,
"step": 1340
},
{
"epoch": 0.8887425938117183,
"grad_norm": 0.5097976326942444,
"learning_rate": 2.698e-05,
"loss": 0.0297,
"step": 1350
},
{
"epoch": 0.8953258722843976,
"grad_norm": 0.6472064256668091,
"learning_rate": 2.718e-05,
"loss": 0.0346,
"step": 1360
},
{
"epoch": 0.901909150757077,
"grad_norm": 0.6113309264183044,
"learning_rate": 2.738e-05,
"loss": 0.0269,
"step": 1370
},
{
"epoch": 0.9084924292297564,
"grad_norm": 0.6883881092071533,
"learning_rate": 2.758e-05,
"loss": 0.0299,
"step": 1380
},
{
"epoch": 0.9150757077024358,
"grad_norm": 0.6283571124076843,
"learning_rate": 2.778e-05,
"loss": 0.0342,
"step": 1390
},
{
"epoch": 0.9216589861751152,
"grad_norm": 0.7837399840354919,
"learning_rate": 2.798e-05,
"loss": 0.0283,
"step": 1400
},
{
"epoch": 0.9282422646477946,
"grad_norm": 0.6661757230758667,
"learning_rate": 2.818e-05,
"loss": 0.039,
"step": 1410
},
{
"epoch": 0.934825543120474,
"grad_norm": 0.6134967803955078,
"learning_rate": 2.8380000000000003e-05,
"loss": 0.03,
"step": 1420
},
{
"epoch": 0.9414088215931534,
"grad_norm": 0.6038786172866821,
"learning_rate": 2.858e-05,
"loss": 0.0308,
"step": 1430
},
{
"epoch": 0.9479921000658328,
"grad_norm": 0.80397629737854,
"learning_rate": 2.8780000000000002e-05,
"loss": 0.0265,
"step": 1440
},
{
"epoch": 0.9545753785385122,
"grad_norm": 0.6314727067947388,
"learning_rate": 2.898e-05,
"loss": 0.0331,
"step": 1450
},
{
"epoch": 0.9611586570111915,
"grad_norm": 0.49580225348472595,
"learning_rate": 2.9180000000000002e-05,
"loss": 0.0264,
"step": 1460
},
{
"epoch": 0.967741935483871,
"grad_norm": 0.6174570322036743,
"learning_rate": 2.9380000000000003e-05,
"loss": 0.0213,
"step": 1470
},
{
"epoch": 0.9743252139565504,
"grad_norm": 0.47390204668045044,
"learning_rate": 2.958e-05,
"loss": 0.0265,
"step": 1480
},
{
"epoch": 0.9809084924292297,
"grad_norm": 0.640872061252594,
"learning_rate": 2.9780000000000003e-05,
"loss": 0.0254,
"step": 1490
},
{
"epoch": 0.9874917709019092,
"grad_norm": 0.5013565421104431,
"learning_rate": 2.998e-05,
"loss": 0.0302,
"step": 1500
},
{
"epoch": 0.9940750493745886,
"grad_norm": 0.3904229998588562,
"learning_rate": 3.0180000000000002e-05,
"loss": 0.0214,
"step": 1510
},
{
"epoch": 1.000658327847268,
"grad_norm": 0.8522512912750244,
"learning_rate": 3.0380000000000004e-05,
"loss": 0.0265,
"step": 1520
},
{
"epoch": 1.0072416063199474,
"grad_norm": 0.4965933859348297,
"learning_rate": 3.058e-05,
"loss": 0.0245,
"step": 1530
},
{
"epoch": 1.0138248847926268,
"grad_norm": 0.5680098533630371,
"learning_rate": 3.078e-05,
"loss": 0.0246,
"step": 1540
},
{
"epoch": 1.0204081632653061,
"grad_norm": 0.4275563657283783,
"learning_rate": 3.0980000000000005e-05,
"loss": 0.0278,
"step": 1550
},
{
"epoch": 1.0269914417379855,
"grad_norm": 0.30552053451538086,
"learning_rate": 3.118e-05,
"loss": 0.0193,
"step": 1560
},
{
"epoch": 1.0335747202106649,
"grad_norm": 0.5475063323974609,
"learning_rate": 3.138e-05,
"loss": 0.0204,
"step": 1570
},
{
"epoch": 1.0401579986833442,
"grad_norm": 0.614340603351593,
"learning_rate": 3.1580000000000006e-05,
"loss": 0.0229,
"step": 1580
},
{
"epoch": 1.0467412771560236,
"grad_norm": 0.612955629825592,
"learning_rate": 3.1780000000000004e-05,
"loss": 0.0318,
"step": 1590
},
{
"epoch": 1.0533245556287032,
"grad_norm": 0.579261064529419,
"learning_rate": 3.198e-05,
"loss": 0.0252,
"step": 1600
},
{
"epoch": 1.0599078341013826,
"grad_norm": 0.6995881199836731,
"learning_rate": 3.218e-05,
"loss": 0.0309,
"step": 1610
},
{
"epoch": 1.066491112574062,
"grad_norm": 0.5042811632156372,
"learning_rate": 3.238e-05,
"loss": 0.0256,
"step": 1620
},
{
"epoch": 1.0730743910467413,
"grad_norm": 0.8101025223731995,
"learning_rate": 3.2579999999999996e-05,
"loss": 0.0286,
"step": 1630
},
{
"epoch": 1.0796576695194207,
"grad_norm": 0.47654229402542114,
"learning_rate": 3.278e-05,
"loss": 0.0221,
"step": 1640
},
{
"epoch": 1.0862409479921,
"grad_norm": 0.6447846293449402,
"learning_rate": 3.298e-05,
"loss": 0.0208,
"step": 1650
},
{
"epoch": 1.0928242264647794,
"grad_norm": 0.4716355800628662,
"learning_rate": 3.318e-05,
"loss": 0.033,
"step": 1660
},
{
"epoch": 1.0994075049374588,
"grad_norm": 0.7551379799842834,
"learning_rate": 3.338e-05,
"loss": 0.023,
"step": 1670
},
{
"epoch": 1.1059907834101383,
"grad_norm": 0.5727908611297607,
"learning_rate": 3.358e-05,
"loss": 0.0237,
"step": 1680
},
{
"epoch": 1.1125740618828177,
"grad_norm": 0.6794710755348206,
"learning_rate": 3.378e-05,
"loss": 0.0377,
"step": 1690
},
{
"epoch": 1.119157340355497,
"grad_norm": 0.8649814128875732,
"learning_rate": 3.398e-05,
"loss": 0.0265,
"step": 1700
},
{
"epoch": 1.1257406188281764,
"grad_norm": 0.7347370386123657,
"learning_rate": 3.418e-05,
"loss": 0.0246,
"step": 1710
},
{
"epoch": 1.1323238973008558,
"grad_norm": 0.48861828446388245,
"learning_rate": 3.438e-05,
"loss": 0.0288,
"step": 1720
},
{
"epoch": 1.1389071757735352,
"grad_norm": 0.5244237780570984,
"learning_rate": 3.4580000000000004e-05,
"loss": 0.0237,
"step": 1730
},
{
"epoch": 1.1454904542462145,
"grad_norm": 0.6242116689682007,
"learning_rate": 3.478e-05,
"loss": 0.0333,
"step": 1740
},
{
"epoch": 1.1520737327188941,
"grad_norm": 0.5760864615440369,
"learning_rate": 3.498e-05,
"loss": 0.023,
"step": 1750
},
{
"epoch": 1.1586570111915735,
"grad_norm": 0.622750461101532,
"learning_rate": 3.518e-05,
"loss": 0.0231,
"step": 1760
},
{
"epoch": 1.1652402896642529,
"grad_norm": 0.5885168313980103,
"learning_rate": 3.5380000000000003e-05,
"loss": 0.0239,
"step": 1770
},
{
"epoch": 1.1718235681369322,
"grad_norm": 0.5404341816902161,
"learning_rate": 3.558e-05,
"loss": 0.0211,
"step": 1780
},
{
"epoch": 1.1784068466096116,
"grad_norm": 0.654180645942688,
"learning_rate": 3.578e-05,
"loss": 0.0307,
"step": 1790
},
{
"epoch": 1.184990125082291,
"grad_norm": 0.5858113765716553,
"learning_rate": 3.5980000000000004e-05,
"loss": 0.0313,
"step": 1800
},
{
"epoch": 1.1915734035549703,
"grad_norm": 0.4892871677875519,
"learning_rate": 3.618e-05,
"loss": 0.0247,
"step": 1810
},
{
"epoch": 1.1981566820276497,
"grad_norm": 0.4681117534637451,
"learning_rate": 3.638e-05,
"loss": 0.0207,
"step": 1820
},
{
"epoch": 1.204739960500329,
"grad_norm": 0.6474509835243225,
"learning_rate": 3.6580000000000006e-05,
"loss": 0.0251,
"step": 1830
},
{
"epoch": 1.2113232389730086,
"grad_norm": 0.6035652756690979,
"learning_rate": 3.6780000000000004e-05,
"loss": 0.0268,
"step": 1840
},
{
"epoch": 1.217906517445688,
"grad_norm": 0.5153449773788452,
"learning_rate": 3.698e-05,
"loss": 0.0237,
"step": 1850
},
{
"epoch": 1.2244897959183674,
"grad_norm": 0.6607559323310852,
"learning_rate": 3.7180000000000007e-05,
"loss": 0.027,
"step": 1860
},
{
"epoch": 1.2310730743910467,
"grad_norm": 0.6436736583709717,
"learning_rate": 3.7380000000000005e-05,
"loss": 0.0217,
"step": 1870
},
{
"epoch": 1.237656352863726,
"grad_norm": 0.43171143531799316,
"learning_rate": 3.758e-05,
"loss": 0.0259,
"step": 1880
},
{
"epoch": 1.2442396313364055,
"grad_norm": 0.5309972167015076,
"learning_rate": 3.778000000000001e-05,
"loss": 0.0292,
"step": 1890
},
{
"epoch": 1.2508229098090848,
"grad_norm": 0.6137271523475647,
"learning_rate": 3.7980000000000006e-05,
"loss": 0.0312,
"step": 1900
},
{
"epoch": 1.2574061882817644,
"grad_norm": 0.8185164332389832,
"learning_rate": 3.818e-05,
"loss": 0.0268,
"step": 1910
},
{
"epoch": 1.2639894667544438,
"grad_norm": 0.4287111163139343,
"learning_rate": 3.838e-05,
"loss": 0.0219,
"step": 1920
},
{
"epoch": 1.2705727452271232,
"grad_norm": 0.6617496013641357,
"learning_rate": 3.858e-05,
"loss": 0.0282,
"step": 1930
},
{
"epoch": 1.2771560236998025,
"grad_norm": 0.4894842803478241,
"learning_rate": 3.878e-05,
"loss": 0.0202,
"step": 1940
},
{
"epoch": 1.2837393021724819,
"grad_norm": 0.5442696213722229,
"learning_rate": 3.898e-05,
"loss": 0.0283,
"step": 1950
},
{
"epoch": 1.2903225806451613,
"grad_norm": 0.597413182258606,
"learning_rate": 3.918e-05,
"loss": 0.0277,
"step": 1960
},
{
"epoch": 1.2969058591178406,
"grad_norm": 0.49053552746772766,
"learning_rate": 3.938e-05,
"loss": 0.0249,
"step": 1970
},
{
"epoch": 1.3034891375905202,
"grad_norm": 0.475212037563324,
"learning_rate": 3.958e-05,
"loss": 0.0291,
"step": 1980
},
{
"epoch": 1.3100724160631994,
"grad_norm": 0.6037053465843201,
"learning_rate": 3.978e-05,
"loss": 0.0235,
"step": 1990
},
{
"epoch": 1.316655694535879,
"grad_norm": 0.5971664190292358,
"learning_rate": 3.998e-05,
"loss": 0.0278,
"step": 2000
},
{
"epoch": 1.3232389730085583,
"grad_norm": 0.5502041578292847,
"learning_rate": 4.018e-05,
"loss": 0.0257,
"step": 2010
},
{
"epoch": 1.3298222514812377,
"grad_norm": 0.5469876527786255,
"learning_rate": 4.038e-05,
"loss": 0.0231,
"step": 2020
},
{
"epoch": 1.336405529953917,
"grad_norm": 0.5384500622749329,
"learning_rate": 4.058e-05,
"loss": 0.0252,
"step": 2030
},
{
"epoch": 1.3429888084265964,
"grad_norm": 0.5213717222213745,
"learning_rate": 4.078e-05,
"loss": 0.0236,
"step": 2040
},
{
"epoch": 1.3495720868992758,
"grad_norm": 0.5849093198776245,
"learning_rate": 4.0980000000000004e-05,
"loss": 0.0239,
"step": 2050
},
{
"epoch": 1.3561553653719551,
"grad_norm": 0.4954252541065216,
"learning_rate": 4.118e-05,
"loss": 0.0276,
"step": 2060
},
{
"epoch": 1.3627386438446347,
"grad_norm": 0.5582159757614136,
"learning_rate": 4.138e-05,
"loss": 0.0262,
"step": 2070
},
{
"epoch": 1.369321922317314,
"grad_norm": 0.5717566013336182,
"learning_rate": 4.1580000000000005e-05,
"loss": 0.0274,
"step": 2080
},
{
"epoch": 1.3759052007899935,
"grad_norm": 0.6252191066741943,
"learning_rate": 4.178e-05,
"loss": 0.0235,
"step": 2090
},
{
"epoch": 1.3824884792626728,
"grad_norm": 0.546804666519165,
"learning_rate": 4.198e-05,
"loss": 0.023,
"step": 2100
},
{
"epoch": 1.3890717577353522,
"grad_norm": 0.4263172149658203,
"learning_rate": 4.2180000000000006e-05,
"loss": 0.0284,
"step": 2110
},
{
"epoch": 1.3956550362080316,
"grad_norm": 0.5470091104507446,
"learning_rate": 4.2380000000000004e-05,
"loss": 0.0357,
"step": 2120
},
{
"epoch": 1.402238314680711,
"grad_norm": 0.6108530163764954,
"learning_rate": 4.258e-05,
"loss": 0.0284,
"step": 2130
},
{
"epoch": 1.4088215931533905,
"grad_norm": 0.4740239977836609,
"learning_rate": 4.278e-05,
"loss": 0.0229,
"step": 2140
},
{
"epoch": 1.4154048716260696,
"grad_norm": 0.5250336527824402,
"learning_rate": 4.2980000000000005e-05,
"loss": 0.0216,
"step": 2150
},
{
"epoch": 1.4219881500987492,
"grad_norm": 0.4905194938182831,
"learning_rate": 4.318e-05,
"loss": 0.0317,
"step": 2160
},
{
"epoch": 1.4285714285714286,
"grad_norm": 0.46446430683135986,
"learning_rate": 4.338e-05,
"loss": 0.0203,
"step": 2170
},
{
"epoch": 1.435154707044108,
"grad_norm": 0.3479264974594116,
"learning_rate": 4.3580000000000006e-05,
"loss": 0.0165,
"step": 2180
},
{
"epoch": 1.4417379855167873,
"grad_norm": 0.5235578417778015,
"learning_rate": 4.3780000000000004e-05,
"loss": 0.0222,
"step": 2190
},
{
"epoch": 1.4483212639894667,
"grad_norm": 0.35637351870536804,
"learning_rate": 4.398e-05,
"loss": 0.0229,
"step": 2200
},
{
"epoch": 1.454904542462146,
"grad_norm": 0.4556591510772705,
"learning_rate": 4.418000000000001e-05,
"loss": 0.0269,
"step": 2210
},
{
"epoch": 1.4614878209348254,
"grad_norm": 0.6395583748817444,
"learning_rate": 4.438e-05,
"loss": 0.0263,
"step": 2220
},
{
"epoch": 1.468071099407505,
"grad_norm": 0.6540268063545227,
"learning_rate": 4.458e-05,
"loss": 0.0301,
"step": 2230
},
{
"epoch": 1.4746543778801844,
"grad_norm": 0.3856101334095001,
"learning_rate": 4.478e-05,
"loss": 0.0254,
"step": 2240
},
{
"epoch": 1.4812376563528638,
"grad_norm": 0.3780037760734558,
"learning_rate": 4.498e-05,
"loss": 0.0251,
"step": 2250
},
{
"epoch": 1.4878209348255431,
"grad_norm": 0.5442138314247131,
"learning_rate": 4.518e-05,
"loss": 0.0334,
"step": 2260
},
{
"epoch": 1.4944042132982225,
"grad_norm": 0.44116953015327454,
"learning_rate": 4.538e-05,
"loss": 0.0309,
"step": 2270
},
{
"epoch": 1.500987491770902,
"grad_norm": 0.6030621528625488,
"learning_rate": 4.558e-05,
"loss": 0.0279,
"step": 2280
},
{
"epoch": 1.5075707702435812,
"grad_norm": 0.500163197517395,
"learning_rate": 4.578e-05,
"loss": 0.0311,
"step": 2290
},
{
"epoch": 1.5141540487162608,
"grad_norm": 0.5039265155792236,
"learning_rate": 4.5980000000000004e-05,
"loss": 0.0262,
"step": 2300
},
{
"epoch": 1.52073732718894,
"grad_norm": 0.6298384666442871,
"learning_rate": 4.618e-05,
"loss": 0.0241,
"step": 2310
},
{
"epoch": 1.5273206056616195,
"grad_norm": 0.6595116853713989,
"learning_rate": 4.638e-05,
"loss": 0.0298,
"step": 2320
},
{
"epoch": 1.533903884134299,
"grad_norm": 0.41362178325653076,
"learning_rate": 4.6580000000000005e-05,
"loss": 0.0256,
"step": 2330
},
{
"epoch": 1.5404871626069783,
"grad_norm": 0.3955932557582855,
"learning_rate": 4.678e-05,
"loss": 0.0268,
"step": 2340
},
{
"epoch": 1.5470704410796576,
"grad_norm": 0.6363015174865723,
"learning_rate": 4.698e-05,
"loss": 0.0276,
"step": 2350
},
{
"epoch": 1.553653719552337,
"grad_norm": 0.5182825326919556,
"learning_rate": 4.718e-05,
"loss": 0.0254,
"step": 2360
},
{
"epoch": 1.5602369980250166,
"grad_norm": 0.46310341358184814,
"learning_rate": 4.7380000000000004e-05,
"loss": 0.0282,
"step": 2370
},
{
"epoch": 1.5668202764976957,
"grad_norm": 0.4318999946117401,
"learning_rate": 4.758e-05,
"loss": 0.0232,
"step": 2380
},
{
"epoch": 1.5734035549703753,
"grad_norm": 0.42124977707862854,
"learning_rate": 4.778e-05,
"loss": 0.0274,
"step": 2390
},
{
"epoch": 1.5799868334430547,
"grad_norm": 0.39340540766716003,
"learning_rate": 4.7980000000000005e-05,
"loss": 0.0223,
"step": 2400
},
{
"epoch": 1.586570111915734,
"grad_norm": 0.5991842150688171,
"learning_rate": 4.818e-05,
"loss": 0.0296,
"step": 2410
},
{
"epoch": 1.5931533903884134,
"grad_norm": 0.47497838735580444,
"learning_rate": 4.838e-05,
"loss": 0.0286,
"step": 2420
},
{
"epoch": 1.5997366688610928,
"grad_norm": 0.5395841002464294,
"learning_rate": 4.8580000000000006e-05,
"loss": 0.0214,
"step": 2430
},
{
"epoch": 1.6063199473337724,
"grad_norm": 0.620520293712616,
"learning_rate": 4.8780000000000004e-05,
"loss": 0.0256,
"step": 2440
},
{
"epoch": 1.6129032258064515,
"grad_norm": 0.567721962928772,
"learning_rate": 4.898e-05,
"loss": 0.0308,
"step": 2450
},
{
"epoch": 1.619486504279131,
"grad_norm": 0.6244537234306335,
"learning_rate": 4.918000000000001e-05,
"loss": 0.0264,
"step": 2460
},
{
"epoch": 1.6260697827518102,
"grad_norm": 0.616301953792572,
"learning_rate": 4.9380000000000005e-05,
"loss": 0.0225,
"step": 2470
},
{
"epoch": 1.6326530612244898,
"grad_norm": 0.3518458306789398,
"learning_rate": 4.958e-05,
"loss": 0.0248,
"step": 2480
},
{
"epoch": 1.6392363396971692,
"grad_norm": 0.5337648391723633,
"learning_rate": 4.978e-05,
"loss": 0.0343,
"step": 2490
},
{
"epoch": 1.6458196181698486,
"grad_norm": 0.5066148042678833,
"learning_rate": 4.9980000000000006e-05,
"loss": 0.0284,
"step": 2500
},
{
"epoch": 1.652402896642528,
"grad_norm": 0.6494326591491699,
"learning_rate": 5.0180000000000004e-05,
"loss": 0.0255,
"step": 2510
},
{
"epoch": 1.6589861751152073,
"grad_norm": 0.5532274842262268,
"learning_rate": 5.038e-05,
"loss": 0.0237,
"step": 2520
},
{
"epoch": 1.6655694535878869,
"grad_norm": 0.3537006974220276,
"learning_rate": 5.058000000000001e-05,
"loss": 0.0211,
"step": 2530
},
{
"epoch": 1.672152732060566,
"grad_norm": 0.4848959147930145,
"learning_rate": 5.0780000000000005e-05,
"loss": 0.0317,
"step": 2540
},
{
"epoch": 1.6787360105332456,
"grad_norm": 0.5956111550331116,
"learning_rate": 5.098e-05,
"loss": 0.0226,
"step": 2550
},
{
"epoch": 1.685319289005925,
"grad_norm": 0.43243521451950073,
"learning_rate": 5.118000000000001e-05,
"loss": 0.0297,
"step": 2560
},
{
"epoch": 1.6919025674786043,
"grad_norm": 0.3945485055446625,
"learning_rate": 5.1380000000000006e-05,
"loss": 0.0292,
"step": 2570
},
{
"epoch": 1.6984858459512837,
"grad_norm": 0.6458433270454407,
"learning_rate": 5.1580000000000004e-05,
"loss": 0.0295,
"step": 2580
},
{
"epoch": 1.705069124423963,
"grad_norm": 0.4463520646095276,
"learning_rate": 5.178000000000001e-05,
"loss": 0.0222,
"step": 2590
},
{
"epoch": 1.7116524028966427,
"grad_norm": 0.429592490196228,
"learning_rate": 5.198000000000001e-05,
"loss": 0.0302,
"step": 2600
},
{
"epoch": 1.7182356813693218,
"grad_norm": 0.47969916462898254,
"learning_rate": 5.2180000000000005e-05,
"loss": 0.0258,
"step": 2610
},
{
"epoch": 1.7248189598420014,
"grad_norm": 0.43585821986198425,
"learning_rate": 5.238000000000001e-05,
"loss": 0.0237,
"step": 2620
},
{
"epoch": 1.7314022383146808,
"grad_norm": 0.531635046005249,
"learning_rate": 5.258000000000001e-05,
"loss": 0.0269,
"step": 2630
},
{
"epoch": 1.7379855167873601,
"grad_norm": 0.4729037880897522,
"learning_rate": 5.2780000000000006e-05,
"loss": 0.0268,
"step": 2640
},
{
"epoch": 1.7445687952600395,
"grad_norm": 0.4483349025249481,
"learning_rate": 5.2980000000000004e-05,
"loss": 0.0219,
"step": 2650
},
{
"epoch": 1.7511520737327189,
"grad_norm": 0.4026774764060974,
"learning_rate": 5.318000000000001e-05,
"loss": 0.024,
"step": 2660
},
{
"epoch": 1.7577353522053984,
"grad_norm": 0.5400267243385315,
"learning_rate": 5.338000000000001e-05,
"loss": 0.0316,
"step": 2670
},
{
"epoch": 1.7643186306780776,
"grad_norm": 0.5105156898498535,
"learning_rate": 5.3580000000000005e-05,
"loss": 0.0224,
"step": 2680
},
{
"epoch": 1.7709019091507572,
"grad_norm": 0.5390294194221497,
"learning_rate": 5.378e-05,
"loss": 0.0222,
"step": 2690
},
{
"epoch": 1.7774851876234363,
"grad_norm": 0.42962446808815,
"learning_rate": 5.3979999999999995e-05,
"loss": 0.0244,
"step": 2700
},
{
"epoch": 1.784068466096116,
"grad_norm": 0.4586690366268158,
"learning_rate": 5.418e-05,
"loss": 0.0216,
"step": 2710
},
{
"epoch": 1.7906517445687953,
"grad_norm": 0.6453498601913452,
"learning_rate": 5.438e-05,
"loss": 0.0304,
"step": 2720
},
{
"epoch": 1.7972350230414746,
"grad_norm": 0.535807192325592,
"learning_rate": 5.4579999999999996e-05,
"loss": 0.0194,
"step": 2730
},
{
"epoch": 1.803818301514154,
"grad_norm": 0.5880657434463501,
"learning_rate": 5.478e-05,
"loss": 0.0228,
"step": 2740
},
{
"epoch": 1.8104015799868334,
"grad_norm": 0.6001516580581665,
"learning_rate": 5.498e-05,
"loss": 0.0245,
"step": 2750
},
{
"epoch": 1.816984858459513,
"grad_norm": 0.5279771089553833,
"learning_rate": 5.518e-05,
"loss": 0.0256,
"step": 2760
},
{
"epoch": 1.823568136932192,
"grad_norm": 0.4337400794029236,
"learning_rate": 5.538e-05,
"loss": 0.0205,
"step": 2770
},
{
"epoch": 1.8301514154048717,
"grad_norm": 0.5139671564102173,
"learning_rate": 5.558e-05,
"loss": 0.0224,
"step": 2780
},
{
"epoch": 1.836734693877551,
"grad_norm": 0.5578183531761169,
"learning_rate": 5.578e-05,
"loss": 0.0276,
"step": 2790
},
{
"epoch": 1.8433179723502304,
"grad_norm": 0.5291348099708557,
"learning_rate": 5.5979999999999996e-05,
"loss": 0.0258,
"step": 2800
},
{
"epoch": 1.8499012508229098,
"grad_norm": 0.5978127717971802,
"learning_rate": 5.618e-05,
"loss": 0.0225,
"step": 2810
},
{
"epoch": 1.8564845292955892,
"grad_norm": 0.5148075222969055,
"learning_rate": 5.638e-05,
"loss": 0.0201,
"step": 2820
},
{
"epoch": 1.8630678077682687,
"grad_norm": 0.3937118947505951,
"learning_rate": 5.658e-05,
"loss": 0.031,
"step": 2830
},
{
"epoch": 1.869651086240948,
"grad_norm": 0.6889894604682922,
"learning_rate": 5.678e-05,
"loss": 0.028,
"step": 2840
},
{
"epoch": 1.8762343647136275,
"grad_norm": 0.543656587600708,
"learning_rate": 5.698e-05,
"loss": 0.0247,
"step": 2850
},
{
"epoch": 1.8828176431863066,
"grad_norm": 0.4746217131614685,
"learning_rate": 5.718e-05,
"loss": 0.0275,
"step": 2860
},
{
"epoch": 1.8894009216589862,
"grad_norm": 0.5628865361213684,
"learning_rate": 5.738e-05,
"loss": 0.0328,
"step": 2870
},
{
"epoch": 1.8959842001316656,
"grad_norm": 0.41161057353019714,
"learning_rate": 5.758e-05,
"loss": 0.0232,
"step": 2880
},
{
"epoch": 1.902567478604345,
"grad_norm": 0.6279869675636292,
"learning_rate": 5.778e-05,
"loss": 0.0287,
"step": 2890
},
{
"epoch": 1.9091507570770243,
"grad_norm": 0.3861440122127533,
"learning_rate": 5.7980000000000004e-05,
"loss": 0.0249,
"step": 2900
},
{
"epoch": 1.9157340355497037,
"grad_norm": 0.5990503430366516,
"learning_rate": 5.818e-05,
"loss": 0.0237,
"step": 2910
},
{
"epoch": 1.9223173140223833,
"grad_norm": 0.4768123924732208,
"learning_rate": 5.838e-05,
"loss": 0.0245,
"step": 2920
},
{
"epoch": 1.9289005924950624,
"grad_norm": 0.4223645031452179,
"learning_rate": 5.858e-05,
"loss": 0.0227,
"step": 2930
},
{
"epoch": 1.935483870967742,
"grad_norm": 0.4944128692150116,
"learning_rate": 5.878e-05,
"loss": 0.03,
"step": 2940
},
{
"epoch": 1.9420671494404214,
"grad_norm": 0.3858146667480469,
"learning_rate": 5.898e-05,
"loss": 0.0337,
"step": 2950
},
{
"epoch": 1.9486504279131007,
"grad_norm": 0.41634422540664673,
"learning_rate": 5.918e-05,
"loss": 0.0234,
"step": 2960
},
{
"epoch": 1.95523370638578,
"grad_norm": 0.5550604462623596,
"learning_rate": 5.9380000000000004e-05,
"loss": 0.0236,
"step": 2970
},
{
"epoch": 1.9618169848584595,
"grad_norm": 0.427051842212677,
"learning_rate": 5.958e-05,
"loss": 0.0221,
"step": 2980
},
{
"epoch": 1.968400263331139,
"grad_norm": 0.48112764954566956,
"learning_rate": 5.978e-05,
"loss": 0.0272,
"step": 2990
},
{
"epoch": 1.9749835418038182,
"grad_norm": 0.6862286329269409,
"learning_rate": 5.9980000000000005e-05,
"loss": 0.0382,
"step": 3000
},
{
"epoch": 1.9815668202764978,
"grad_norm": 0.4057901203632355,
"learning_rate": 6.018e-05,
"loss": 0.0243,
"step": 3010
},
{
"epoch": 1.9881500987491771,
"grad_norm": 0.4857819378376007,
"learning_rate": 6.038e-05,
"loss": 0.0253,
"step": 3020
},
{
"epoch": 1.9947333772218565,
"grad_norm": 0.43498826026916504,
"learning_rate": 6.0580000000000006e-05,
"loss": 0.0249,
"step": 3030
},
{
"epoch": 2.001316655694536,
"grad_norm": 0.6843571066856384,
"learning_rate": 6.0780000000000004e-05,
"loss": 0.0279,
"step": 3040
},
{
"epoch": 2.0078999341672152,
"grad_norm": 0.5893099308013916,
"learning_rate": 6.098e-05,
"loss": 0.0308,
"step": 3050
},
{
"epoch": 2.014483212639895,
"grad_norm": 0.46817436814308167,
"learning_rate": 6.118000000000001e-05,
"loss": 0.0253,
"step": 3060
},
{
"epoch": 2.021066491112574,
"grad_norm": 0.6119463443756104,
"learning_rate": 6.138e-05,
"loss": 0.0276,
"step": 3070
},
{
"epoch": 2.0276497695852536,
"grad_norm": 0.6109670996665955,
"learning_rate": 6.158e-05,
"loss": 0.0385,
"step": 3080
},
{
"epoch": 2.0342330480579327,
"grad_norm": 0.4686706066131592,
"learning_rate": 6.178000000000001e-05,
"loss": 0.0275,
"step": 3090
},
{
"epoch": 2.0408163265306123,
"grad_norm": 0.36242279410362244,
"learning_rate": 6.198e-05,
"loss": 0.0246,
"step": 3100
},
{
"epoch": 2.0473996050032914,
"grad_norm": 0.5305617451667786,
"learning_rate": 6.218e-05,
"loss": 0.0252,
"step": 3110
},
{
"epoch": 2.053982883475971,
"grad_norm": 0.5172412395477295,
"learning_rate": 6.238000000000001e-05,
"loss": 0.0266,
"step": 3120
},
{
"epoch": 2.0605661619486506,
"grad_norm": 0.4706704914569855,
"learning_rate": 6.258e-05,
"loss": 0.0231,
"step": 3130
},
{
"epoch": 2.0671494404213298,
"grad_norm": 0.6271998286247253,
"learning_rate": 6.278e-05,
"loss": 0.0225,
"step": 3140
},
{
"epoch": 2.0737327188940093,
"grad_norm": 0.44038310647010803,
"learning_rate": 6.298000000000001e-05,
"loss": 0.0255,
"step": 3150
},
{
"epoch": 2.0803159973666885,
"grad_norm": 0.5149315595626831,
"learning_rate": 6.318e-05,
"loss": 0.0219,
"step": 3160
},
{
"epoch": 2.086899275839368,
"grad_norm": 0.4264054000377655,
"learning_rate": 6.338e-05,
"loss": 0.0245,
"step": 3170
},
{
"epoch": 2.093482554312047,
"grad_norm": 0.4959770441055298,
"learning_rate": 6.358000000000001e-05,
"loss": 0.0224,
"step": 3180
},
{
"epoch": 2.100065832784727,
"grad_norm": 0.5339881777763367,
"learning_rate": 6.378e-05,
"loss": 0.0196,
"step": 3190
},
{
"epoch": 2.1066491112574064,
"grad_norm": 0.41315099596977234,
"learning_rate": 6.398000000000001e-05,
"loss": 0.0281,
"step": 3200
},
{
"epoch": 2.1132323897300855,
"grad_norm": 0.5381911993026733,
"learning_rate": 6.418000000000001e-05,
"loss": 0.0244,
"step": 3210
},
{
"epoch": 2.119815668202765,
"grad_norm": 0.4526052176952362,
"learning_rate": 6.438e-05,
"loss": 0.0231,
"step": 3220
},
{
"epoch": 2.1263989466754443,
"grad_norm": 0.313303679227829,
"learning_rate": 6.458000000000001e-05,
"loss": 0.0222,
"step": 3230
},
{
"epoch": 2.132982225148124,
"grad_norm": 0.4752238094806671,
"learning_rate": 6.478000000000001e-05,
"loss": 0.0219,
"step": 3240
},
{
"epoch": 2.139565503620803,
"grad_norm": 0.36124691367149353,
"learning_rate": 6.498e-05,
"loss": 0.026,
"step": 3250
},
{
"epoch": 2.1461487820934826,
"grad_norm": 0.5557790398597717,
"learning_rate": 6.518000000000001e-05,
"loss": 0.0188,
"step": 3260
},
{
"epoch": 2.152732060566162,
"grad_norm": 0.4486756920814514,
"learning_rate": 6.538000000000001e-05,
"loss": 0.0269,
"step": 3270
},
{
"epoch": 2.1593153390388413,
"grad_norm": 0.4497973322868347,
"learning_rate": 6.558e-05,
"loss": 0.0238,
"step": 3280
},
{
"epoch": 2.165898617511521,
"grad_norm": 0.4984229505062103,
"learning_rate": 6.578000000000001e-05,
"loss": 0.0264,
"step": 3290
},
{
"epoch": 2.1724818959842,
"grad_norm": 0.36803939938545227,
"learning_rate": 6.598e-05,
"loss": 0.031,
"step": 3300
},
{
"epoch": 2.1790651744568796,
"grad_norm": 0.3225577473640442,
"learning_rate": 6.618e-05,
"loss": 0.0182,
"step": 3310
},
{
"epoch": 2.185648452929559,
"grad_norm": 0.3707975149154663,
"learning_rate": 6.638e-05,
"loss": 0.0205,
"step": 3320
},
{
"epoch": 2.1922317314022384,
"grad_norm": 0.46778491139411926,
"learning_rate": 6.658e-05,
"loss": 0.0239,
"step": 3330
},
{
"epoch": 2.1988150098749175,
"grad_norm": 0.363540381193161,
"learning_rate": 6.678e-05,
"loss": 0.0217,
"step": 3340
},
{
"epoch": 2.205398288347597,
"grad_norm": 0.49262064695358276,
"learning_rate": 6.698e-05,
"loss": 0.0245,
"step": 3350
},
{
"epoch": 2.2119815668202767,
"grad_norm": 0.3035878539085388,
"learning_rate": 6.718e-05,
"loss": 0.0265,
"step": 3360
},
{
"epoch": 2.218564845292956,
"grad_norm": 0.4190710484981537,
"learning_rate": 6.738e-05,
"loss": 0.0313,
"step": 3370
},
{
"epoch": 2.2251481237656354,
"grad_norm": 0.3806004524230957,
"learning_rate": 6.758e-05,
"loss": 0.021,
"step": 3380
},
{
"epoch": 2.2317314022383146,
"grad_norm": 0.48420360684394836,
"learning_rate": 6.778e-05,
"loss": 0.0225,
"step": 3390
},
{
"epoch": 2.238314680710994,
"grad_norm": 0.4714139699935913,
"learning_rate": 6.798e-05,
"loss": 0.0251,
"step": 3400
},
{
"epoch": 2.2448979591836733,
"grad_norm": 0.613018274307251,
"learning_rate": 6.818e-05,
"loss": 0.0237,
"step": 3410
},
{
"epoch": 2.251481237656353,
"grad_norm": 0.4343436360359192,
"learning_rate": 6.838e-05,
"loss": 0.0269,
"step": 3420
},
{
"epoch": 2.258064516129032,
"grad_norm": 0.6064520478248596,
"learning_rate": 6.858e-05,
"loss": 0.0269,
"step": 3430
},
{
"epoch": 2.2646477946017116,
"grad_norm": 0.3919238746166229,
"learning_rate": 6.878e-05,
"loss": 0.033,
"step": 3440
},
{
"epoch": 2.271231073074391,
"grad_norm": 0.39970511198043823,
"learning_rate": 6.898e-05,
"loss": 0.0258,
"step": 3450
},
{
"epoch": 2.2778143515470703,
"grad_norm": 0.5027624368667603,
"learning_rate": 6.918e-05,
"loss": 0.0261,
"step": 3460
},
{
"epoch": 2.28439763001975,
"grad_norm": 0.5257875323295593,
"learning_rate": 6.938e-05,
"loss": 0.026,
"step": 3470
},
{
"epoch": 2.290980908492429,
"grad_norm": 0.3369908928871155,
"learning_rate": 6.958e-05,
"loss": 0.0228,
"step": 3480
},
{
"epoch": 2.2975641869651087,
"grad_norm": 0.6159877181053162,
"learning_rate": 6.978e-05,
"loss": 0.0313,
"step": 3490
},
{
"epoch": 2.3041474654377883,
"grad_norm": 0.3474142253398895,
"learning_rate": 6.998e-05,
"loss": 0.0227,
"step": 3500
},
{
"epoch": 2.3107307439104674,
"grad_norm": 0.5793073177337646,
"learning_rate": 7.018e-05,
"loss": 0.0263,
"step": 3510
},
{
"epoch": 2.317314022383147,
"grad_norm": 0.39322030544281006,
"learning_rate": 7.038e-05,
"loss": 0.0286,
"step": 3520
},
{
"epoch": 2.323897300855826,
"grad_norm": 0.457241415977478,
"learning_rate": 7.058e-05,
"loss": 0.0199,
"step": 3530
},
{
"epoch": 2.3304805793285057,
"grad_norm": 0.5033375024795532,
"learning_rate": 7.078e-05,
"loss": 0.0315,
"step": 3540
},
{
"epoch": 2.337063857801185,
"grad_norm": 0.3997373580932617,
"learning_rate": 7.098e-05,
"loss": 0.0237,
"step": 3550
},
{
"epoch": 2.3436471362738645,
"grad_norm": 0.3675520718097687,
"learning_rate": 7.118e-05,
"loss": 0.0255,
"step": 3560
},
{
"epoch": 2.3502304147465436,
"grad_norm": 0.35251176357269287,
"learning_rate": 7.138e-05,
"loss": 0.0335,
"step": 3570
},
{
"epoch": 2.356813693219223,
"grad_norm": 0.4453986883163452,
"learning_rate": 7.158e-05,
"loss": 0.0296,
"step": 3580
},
{
"epoch": 2.3633969716919028,
"grad_norm": 0.4350111186504364,
"learning_rate": 7.178000000000001e-05,
"loss": 0.0231,
"step": 3590
},
{
"epoch": 2.369980250164582,
"grad_norm": 0.44233962893486023,
"learning_rate": 7.198e-05,
"loss": 0.0251,
"step": 3600
},
{
"epoch": 2.3765635286372615,
"grad_norm": 0.4645010828971863,
"learning_rate": 7.218e-05,
"loss": 0.0271,
"step": 3610
},
{
"epoch": 2.3831468071099406,
"grad_norm": 0.49188438057899475,
"learning_rate": 7.238000000000001e-05,
"loss": 0.0247,
"step": 3620
},
{
"epoch": 2.3897300855826202,
"grad_norm": 0.5354231595993042,
"learning_rate": 7.258e-05,
"loss": 0.0273,
"step": 3630
},
{
"epoch": 2.3963133640552994,
"grad_norm": 0.4806993901729584,
"learning_rate": 7.278e-05,
"loss": 0.022,
"step": 3640
},
{
"epoch": 2.402896642527979,
"grad_norm": 0.37118756771087646,
"learning_rate": 7.298000000000001e-05,
"loss": 0.0262,
"step": 3650
},
{
"epoch": 2.409479921000658,
"grad_norm": 0.39136067032814026,
"learning_rate": 7.318e-05,
"loss": 0.0205,
"step": 3660
},
{
"epoch": 2.4160631994733377,
"grad_norm": 0.44690921902656555,
"learning_rate": 7.338e-05,
"loss": 0.0261,
"step": 3670
},
{
"epoch": 2.4226464779460173,
"grad_norm": 0.3490082025527954,
"learning_rate": 7.358000000000001e-05,
"loss": 0.0212,
"step": 3680
},
{
"epoch": 2.4292297564186964,
"grad_norm": 0.3267335295677185,
"learning_rate": 7.378e-05,
"loss": 0.0218,
"step": 3690
},
{
"epoch": 2.435813034891376,
"grad_norm": 0.48554444313049316,
"learning_rate": 7.398e-05,
"loss": 0.0279,
"step": 3700
},
{
"epoch": 2.442396313364055,
"grad_norm": 0.4306657016277313,
"learning_rate": 7.418000000000001e-05,
"loss": 0.0279,
"step": 3710
},
{
"epoch": 2.4489795918367347,
"grad_norm": 0.5217123627662659,
"learning_rate": 7.438e-05,
"loss": 0.0262,
"step": 3720
},
{
"epoch": 2.4555628703094143,
"grad_norm": 0.4703887701034546,
"learning_rate": 7.458000000000001e-05,
"loss": 0.0234,
"step": 3730
},
{
"epoch": 2.4621461487820935,
"grad_norm": 0.528122067451477,
"learning_rate": 7.478e-05,
"loss": 0.0297,
"step": 3740
},
{
"epoch": 2.468729427254773,
"grad_norm": 0.4418627619743347,
"learning_rate": 7.498e-05,
"loss": 0.0242,
"step": 3750
},
{
"epoch": 2.475312705727452,
"grad_norm": 0.3054906129837036,
"learning_rate": 7.518000000000001e-05,
"loss": 0.0208,
"step": 3760
},
{
"epoch": 2.481895984200132,
"grad_norm": 0.40181198716163635,
"learning_rate": 7.538e-05,
"loss": 0.0219,
"step": 3770
},
{
"epoch": 2.488479262672811,
"grad_norm": 0.41410279273986816,
"learning_rate": 7.558e-05,
"loss": 0.0218,
"step": 3780
},
{
"epoch": 2.4950625411454905,
"grad_norm": 0.34109625220298767,
"learning_rate": 7.578000000000001e-05,
"loss": 0.0252,
"step": 3790
},
{
"epoch": 2.5016458196181697,
"grad_norm": 0.4903218150138855,
"learning_rate": 7.598e-05,
"loss": 0.0306,
"step": 3800
},
{
"epoch": 2.5082290980908493,
"grad_norm": 0.4036240875720978,
"learning_rate": 7.618e-05,
"loss": 0.026,
"step": 3810
},
{
"epoch": 2.514812376563529,
"grad_norm": 0.512852668762207,
"learning_rate": 7.638000000000001e-05,
"loss": 0.0277,
"step": 3820
},
{
"epoch": 2.521395655036208,
"grad_norm": 0.31915414333343506,
"learning_rate": 7.658e-05,
"loss": 0.0232,
"step": 3830
},
{
"epoch": 2.5279789335088876,
"grad_norm": 0.3975728452205658,
"learning_rate": 7.678000000000001e-05,
"loss": 0.027,
"step": 3840
},
{
"epoch": 2.5345622119815667,
"grad_norm": 0.4016415774822235,
"learning_rate": 7.698000000000001e-05,
"loss": 0.0218,
"step": 3850
},
{
"epoch": 2.5411454904542463,
"grad_norm": 0.6336414813995361,
"learning_rate": 7.718e-05,
"loss": 0.033,
"step": 3860
},
{
"epoch": 2.5477287689269255,
"grad_norm": 0.3426223397254944,
"learning_rate": 7.738000000000001e-05,
"loss": 0.0282,
"step": 3870
},
{
"epoch": 2.554312047399605,
"grad_norm": 0.5409531593322754,
"learning_rate": 7.758000000000001e-05,
"loss": 0.0247,
"step": 3880
},
{
"epoch": 2.560895325872284,
"grad_norm": 0.4174785017967224,
"learning_rate": 7.778e-05,
"loss": 0.0298,
"step": 3890
},
{
"epoch": 2.5674786043449638,
"grad_norm": 0.4867883026599884,
"learning_rate": 7.798000000000001e-05,
"loss": 0.0239,
"step": 3900
},
{
"epoch": 2.5740618828176434,
"grad_norm": 0.3274104595184326,
"learning_rate": 7.818000000000001e-05,
"loss": 0.0182,
"step": 3910
},
{
"epoch": 2.5806451612903225,
"grad_norm": 0.34040573239326477,
"learning_rate": 7.838e-05,
"loss": 0.0212,
"step": 3920
},
{
"epoch": 2.587228439763002,
"grad_norm": 0.5152034163475037,
"learning_rate": 7.858000000000001e-05,
"loss": 0.0233,
"step": 3930
},
{
"epoch": 2.5938117182356812,
"grad_norm": 0.5163257718086243,
"learning_rate": 7.878e-05,
"loss": 0.0253,
"step": 3940
},
{
"epoch": 2.600394996708361,
"grad_norm": 0.3330592215061188,
"learning_rate": 7.897999999999999e-05,
"loss": 0.0265,
"step": 3950
},
{
"epoch": 2.6069782751810404,
"grad_norm": 0.5234596729278564,
"learning_rate": 7.918e-05,
"loss": 0.0355,
"step": 3960
},
{
"epoch": 2.6135615536537196,
"grad_norm": 0.46524351835250854,
"learning_rate": 7.938e-05,
"loss": 0.0236,
"step": 3970
},
{
"epoch": 2.6201448321263987,
"grad_norm": 0.448681503534317,
"learning_rate": 7.958e-05,
"loss": 0.0225,
"step": 3980
},
{
"epoch": 2.6267281105990783,
"grad_norm": 0.4007005989551544,
"learning_rate": 7.978e-05,
"loss": 0.021,
"step": 3990
},
{
"epoch": 2.633311389071758,
"grad_norm": 0.5299978852272034,
"learning_rate": 7.998e-05,
"loss": 0.0201,
"step": 4000
},
{
"epoch": 2.639894667544437,
"grad_norm": 0.40486276149749756,
"learning_rate": 8.018e-05,
"loss": 0.0224,
"step": 4010
},
{
"epoch": 2.6464779460171166,
"grad_norm": 0.46100348234176636,
"learning_rate": 8.038e-05,
"loss": 0.0208,
"step": 4020
},
{
"epoch": 2.6530612244897958,
"grad_norm": 0.36736199259757996,
"learning_rate": 8.058e-05,
"loss": 0.0242,
"step": 4030
},
{
"epoch": 2.6596445029624753,
"grad_norm": 0.4617960453033447,
"learning_rate": 8.078e-05,
"loss": 0.0312,
"step": 4040
},
{
"epoch": 2.666227781435155,
"grad_norm": 0.3892560303211212,
"learning_rate": 8.098e-05,
"loss": 0.0297,
"step": 4050
},
{
"epoch": 2.672811059907834,
"grad_norm": 0.4406382143497467,
"learning_rate": 8.118e-05,
"loss": 0.0281,
"step": 4060
},
{
"epoch": 2.679394338380513,
"grad_norm": 0.3723593056201935,
"learning_rate": 8.138e-05,
"loss": 0.0349,
"step": 4070
},
{
"epoch": 2.685977616853193,
"grad_norm": 0.49104979634284973,
"learning_rate": 8.158e-05,
"loss": 0.0287,
"step": 4080
},
{
"epoch": 2.6925608953258724,
"grad_norm": 0.44458192586898804,
"learning_rate": 8.178e-05,
"loss": 0.0246,
"step": 4090
},
{
"epoch": 2.6991441737985515,
"grad_norm": 0.4994174838066101,
"learning_rate": 8.198e-05,
"loss": 0.0245,
"step": 4100
},
{
"epoch": 2.705727452271231,
"grad_norm": 0.3042447566986084,
"learning_rate": 8.218e-05,
"loss": 0.0214,
"step": 4110
},
{
"epoch": 2.7123107307439103,
"grad_norm": 0.4393254220485687,
"learning_rate": 8.238000000000001e-05,
"loss": 0.0225,
"step": 4120
},
{
"epoch": 2.71889400921659,
"grad_norm": 0.44685810804367065,
"learning_rate": 8.258e-05,
"loss": 0.0195,
"step": 4130
},
{
"epoch": 2.7254772876892694,
"grad_norm": 0.441112220287323,
"learning_rate": 8.278e-05,
"loss": 0.0271,
"step": 4140
},
{
"epoch": 2.7320605661619486,
"grad_norm": 0.42679381370544434,
"learning_rate": 8.298000000000001e-05,
"loss": 0.0234,
"step": 4150
},
{
"epoch": 2.738643844634628,
"grad_norm": 0.32695186138153076,
"learning_rate": 8.318e-05,
"loss": 0.0173,
"step": 4160
},
{
"epoch": 2.7452271231073073,
"grad_norm": 0.3302655816078186,
"learning_rate": 8.338e-05,
"loss": 0.0223,
"step": 4170
},
{
"epoch": 2.751810401579987,
"grad_norm": 0.39856135845184326,
"learning_rate": 8.358e-05,
"loss": 0.0237,
"step": 4180
},
{
"epoch": 2.7583936800526665,
"grad_norm": 0.3753248453140259,
"learning_rate": 8.378e-05,
"loss": 0.0214,
"step": 4190
},
{
"epoch": 2.7649769585253456,
"grad_norm": 0.39876648783683777,
"learning_rate": 8.398e-05,
"loss": 0.0173,
"step": 4200
},
{
"epoch": 2.771560236998025,
"grad_norm": 0.4092113971710205,
"learning_rate": 8.418e-05,
"loss": 0.02,
"step": 4210
},
{
"epoch": 2.7781435154707044,
"grad_norm": 0.3626212179660797,
"learning_rate": 8.438e-05,
"loss": 0.0202,
"step": 4220
},
{
"epoch": 2.784726793943384,
"grad_norm": 0.5081978440284729,
"learning_rate": 8.458e-05,
"loss": 0.0226,
"step": 4230
},
{
"epoch": 2.791310072416063,
"grad_norm": 0.45574530959129333,
"learning_rate": 8.478e-05,
"loss": 0.019,
"step": 4240
},
{
"epoch": 2.7978933508887427,
"grad_norm": 0.43424704670906067,
"learning_rate": 8.498e-05,
"loss": 0.0286,
"step": 4250
},
{
"epoch": 2.804476629361422,
"grad_norm": 0.23703710734844208,
"learning_rate": 8.518000000000001e-05,
"loss": 0.0194,
"step": 4260
},
{
"epoch": 2.8110599078341014,
"grad_norm": 0.4405156373977661,
"learning_rate": 8.538e-05,
"loss": 0.026,
"step": 4270
},
{
"epoch": 2.817643186306781,
"grad_norm": 0.4267120957374573,
"learning_rate": 8.558e-05,
"loss": 0.026,
"step": 4280
},
{
"epoch": 2.82422646477946,
"grad_norm": 0.6016601920127869,
"learning_rate": 8.578000000000001e-05,
"loss": 0.0398,
"step": 4290
},
{
"epoch": 2.8308097432521393,
"grad_norm": 0.4687979519367218,
"learning_rate": 8.598e-05,
"loss": 0.0234,
"step": 4300
},
{
"epoch": 2.837393021724819,
"grad_norm": 0.4563385248184204,
"learning_rate": 8.618e-05,
"loss": 0.0274,
"step": 4310
},
{
"epoch": 2.8439763001974985,
"grad_norm": 0.5656633377075195,
"learning_rate": 8.638000000000001e-05,
"loss": 0.025,
"step": 4320
},
{
"epoch": 2.8505595786701776,
"grad_norm": 0.41460227966308594,
"learning_rate": 8.658e-05,
"loss": 0.0256,
"step": 4330
},
{
"epoch": 2.857142857142857,
"grad_norm": 0.3687985837459564,
"learning_rate": 8.678e-05,
"loss": 0.0187,
"step": 4340
},
{
"epoch": 2.8637261356155364,
"grad_norm": 0.44120946526527405,
"learning_rate": 8.698000000000001e-05,
"loss": 0.0199,
"step": 4350
},
{
"epoch": 2.870309414088216,
"grad_norm": 0.5328232645988464,
"learning_rate": 8.718e-05,
"loss": 0.0335,
"step": 4360
},
{
"epoch": 2.8768926925608955,
"grad_norm": 0.4234803318977356,
"learning_rate": 8.738000000000001e-05,
"loss": 0.0205,
"step": 4370
},
{
"epoch": 2.8834759710335747,
"grad_norm": 0.3861204981803894,
"learning_rate": 8.758000000000001e-05,
"loss": 0.0242,
"step": 4380
},
{
"epoch": 2.8900592495062543,
"grad_norm": 0.4176269769668579,
"learning_rate": 8.778e-05,
"loss": 0.0293,
"step": 4390
},
{
"epoch": 2.8966425279789334,
"grad_norm": 0.4220450818538666,
"learning_rate": 8.798000000000001e-05,
"loss": 0.0196,
"step": 4400
},
{
"epoch": 2.903225806451613,
"grad_norm": 0.40692248940467834,
"learning_rate": 8.818000000000001e-05,
"loss": 0.0221,
"step": 4410
},
{
"epoch": 2.909809084924292,
"grad_norm": 0.39401960372924805,
"learning_rate": 8.838e-05,
"loss": 0.0226,
"step": 4420
},
{
"epoch": 2.9163923633969717,
"grad_norm": 0.7437530755996704,
"learning_rate": 8.858000000000001e-05,
"loss": 0.0213,
"step": 4430
},
{
"epoch": 2.922975641869651,
"grad_norm": 0.4770195484161377,
"learning_rate": 8.878000000000001e-05,
"loss": 0.0276,
"step": 4440
},
{
"epoch": 2.9295589203423305,
"grad_norm": 0.4626404643058777,
"learning_rate": 8.898e-05,
"loss": 0.026,
"step": 4450
},
{
"epoch": 2.93614219881501,
"grad_norm": 0.6171532869338989,
"learning_rate": 8.918000000000001e-05,
"loss": 0.0274,
"step": 4460
},
{
"epoch": 2.942725477287689,
"grad_norm": 0.2920059561729431,
"learning_rate": 8.938e-05,
"loss": 0.022,
"step": 4470
},
{
"epoch": 2.9493087557603688,
"grad_norm": 0.3998858630657196,
"learning_rate": 8.958e-05,
"loss": 0.0199,
"step": 4480
},
{
"epoch": 2.955892034233048,
"grad_norm": 0.4221383035182953,
"learning_rate": 8.978000000000001e-05,
"loss": 0.0224,
"step": 4490
},
{
"epoch": 2.9624753127057275,
"grad_norm": 0.3771093487739563,
"learning_rate": 8.998e-05,
"loss": 0.0342,
"step": 4500
},
{
"epoch": 2.969058591178407,
"grad_norm": 0.39176174998283386,
"learning_rate": 9.018000000000001e-05,
"loss": 0.0235,
"step": 4510
},
{
"epoch": 2.9756418696510862,
"grad_norm": 0.5010409951210022,
"learning_rate": 9.038000000000001e-05,
"loss": 0.0315,
"step": 4520
},
{
"epoch": 2.9822251481237654,
"grad_norm": 0.47405004501342773,
"learning_rate": 9.058e-05,
"loss": 0.0272,
"step": 4530
},
{
"epoch": 2.988808426596445,
"grad_norm": 0.36223021149635315,
"learning_rate": 9.078000000000001e-05,
"loss": 0.0193,
"step": 4540
},
{
"epoch": 2.9953917050691246,
"grad_norm": 0.35093408823013306,
"learning_rate": 9.098000000000001e-05,
"loss": 0.0274,
"step": 4550
},
{
"epoch": 3.0019749835418037,
"grad_norm": 0.4487496316432953,
"learning_rate": 9.118e-05,
"loss": 0.026,
"step": 4560
},
{
"epoch": 3.0085582620144833,
"grad_norm": 0.47169044613838196,
"learning_rate": 9.138e-05,
"loss": 0.0236,
"step": 4570
},
{
"epoch": 3.0151415404871624,
"grad_norm": 0.39601606130599976,
"learning_rate": 9.158e-05,
"loss": 0.025,
"step": 4580
},
{
"epoch": 3.021724818959842,
"grad_norm": 0.2740744352340698,
"learning_rate": 9.178e-05,
"loss": 0.0211,
"step": 4590
},
{
"epoch": 3.0283080974325216,
"grad_norm": 0.4494052231311798,
"learning_rate": 9.198e-05,
"loss": 0.0221,
"step": 4600
},
{
"epoch": 3.0348913759052007,
"grad_norm": 0.5228054523468018,
"learning_rate": 9.218e-05,
"loss": 0.0304,
"step": 4610
},
{
"epoch": 3.0414746543778803,
"grad_norm": 0.3318790793418884,
"learning_rate": 9.238e-05,
"loss": 0.0272,
"step": 4620
},
{
"epoch": 3.0480579328505595,
"grad_norm": 0.3457318842411041,
"learning_rate": 9.258e-05,
"loss": 0.0186,
"step": 4630
},
{
"epoch": 3.054641211323239,
"grad_norm": 0.35129034519195557,
"learning_rate": 9.278e-05,
"loss": 0.0237,
"step": 4640
},
{
"epoch": 3.061224489795918,
"grad_norm": 0.4268430471420288,
"learning_rate": 9.298e-05,
"loss": 0.0343,
"step": 4650
},
{
"epoch": 3.067807768268598,
"grad_norm": 0.5308719873428345,
"learning_rate": 9.318e-05,
"loss": 0.0323,
"step": 4660
},
{
"epoch": 3.074391046741277,
"grad_norm": 0.45605698227882385,
"learning_rate": 9.338e-05,
"loss": 0.0259,
"step": 4670
},
{
"epoch": 3.0809743252139565,
"grad_norm": 0.33121126890182495,
"learning_rate": 9.358e-05,
"loss": 0.0224,
"step": 4680
},
{
"epoch": 3.087557603686636,
"grad_norm": 0.4248153865337372,
"learning_rate": 9.378e-05,
"loss": 0.0292,
"step": 4690
},
{
"epoch": 3.0941408821593153,
"grad_norm": 0.6298911571502686,
"learning_rate": 9.398e-05,
"loss": 0.0275,
"step": 4700
},
{
"epoch": 3.100724160631995,
"grad_norm": 0.387172669172287,
"learning_rate": 9.418e-05,
"loss": 0.0271,
"step": 4710
},
{
"epoch": 3.107307439104674,
"grad_norm": 0.4898911118507385,
"learning_rate": 9.438e-05,
"loss": 0.0233,
"step": 4720
},
{
"epoch": 3.1138907175773536,
"grad_norm": 0.5377641916275024,
"learning_rate": 9.458e-05,
"loss": 0.0216,
"step": 4730
},
{
"epoch": 3.1204739960500327,
"grad_norm": 0.36603569984436035,
"learning_rate": 9.478e-05,
"loss": 0.0277,
"step": 4740
},
{
"epoch": 3.1270572745227123,
"grad_norm": 0.3127041161060333,
"learning_rate": 9.498e-05,
"loss": 0.0206,
"step": 4750
},
{
"epoch": 3.133640552995392,
"grad_norm": 0.3451925814151764,
"learning_rate": 9.518000000000001e-05,
"loss": 0.0228,
"step": 4760
},
{
"epoch": 3.140223831468071,
"grad_norm": 0.5276954174041748,
"learning_rate": 9.538e-05,
"loss": 0.0263,
"step": 4770
},
{
"epoch": 3.1468071099407506,
"grad_norm": 0.3454340398311615,
"learning_rate": 9.558e-05,
"loss": 0.0253,
"step": 4780
},
{
"epoch": 3.15339038841343,
"grad_norm": 0.5219067335128784,
"learning_rate": 9.578000000000001e-05,
"loss": 0.0336,
"step": 4790
},
{
"epoch": 3.1599736668861094,
"grad_norm": 0.5261627435684204,
"learning_rate": 9.598e-05,
"loss": 0.025,
"step": 4800
},
{
"epoch": 3.1665569453587885,
"grad_norm": 0.3191069960594177,
"learning_rate": 9.618e-05,
"loss": 0.0256,
"step": 4810
},
{
"epoch": 3.173140223831468,
"grad_norm": 0.4737350642681122,
"learning_rate": 9.638000000000001e-05,
"loss": 0.0206,
"step": 4820
},
{
"epoch": 3.1797235023041477,
"grad_norm": 0.4775357246398926,
"learning_rate": 9.658e-05,
"loss": 0.0233,
"step": 4830
},
{
"epoch": 3.186306780776827,
"grad_norm": 0.3223651051521301,
"learning_rate": 9.678e-05,
"loss": 0.0238,
"step": 4840
},
{
"epoch": 3.1928900592495064,
"grad_norm": 0.4690217971801758,
"learning_rate": 9.698000000000001e-05,
"loss": 0.0214,
"step": 4850
},
{
"epoch": 3.1994733377221856,
"grad_norm": 0.5464542508125305,
"learning_rate": 9.718e-05,
"loss": 0.026,
"step": 4860
},
{
"epoch": 3.206056616194865,
"grad_norm": 0.38083896040916443,
"learning_rate": 9.738e-05,
"loss": 0.0262,
"step": 4870
},
{
"epoch": 3.2126398946675443,
"grad_norm": 0.5447069406509399,
"learning_rate": 9.758000000000001e-05,
"loss": 0.0235,
"step": 4880
},
{
"epoch": 3.219223173140224,
"grad_norm": 0.4364519417285919,
"learning_rate": 9.778e-05,
"loss": 0.0264,
"step": 4890
},
{
"epoch": 3.225806451612903,
"grad_norm": 0.3915967345237732,
"learning_rate": 9.798000000000001e-05,
"loss": 0.0221,
"step": 4900
},
{
"epoch": 3.2323897300855826,
"grad_norm": 0.4115281403064728,
"learning_rate": 9.818000000000001e-05,
"loss": 0.0265,
"step": 4910
},
{
"epoch": 3.238973008558262,
"grad_norm": 0.487048864364624,
"learning_rate": 9.838e-05,
"loss": 0.0293,
"step": 4920
},
{
"epoch": 3.2455562870309413,
"grad_norm": 0.3517211377620697,
"learning_rate": 9.858000000000001e-05,
"loss": 0.0228,
"step": 4930
},
{
"epoch": 3.252139565503621,
"grad_norm": 0.4840381443500519,
"learning_rate": 9.878e-05,
"loss": 0.0246,
"step": 4940
},
{
"epoch": 3.2587228439763,
"grad_norm": 0.38539987802505493,
"learning_rate": 9.898e-05,
"loss": 0.0201,
"step": 4950
},
{
"epoch": 3.2653061224489797,
"grad_norm": 0.42269837856292725,
"learning_rate": 9.918000000000001e-05,
"loss": 0.0261,
"step": 4960
},
{
"epoch": 3.271889400921659,
"grad_norm": 0.3298608064651489,
"learning_rate": 9.938e-05,
"loss": 0.0208,
"step": 4970
},
{
"epoch": 3.2784726793943384,
"grad_norm": 0.4724372625350952,
"learning_rate": 9.958e-05,
"loss": 0.0204,
"step": 4980
},
{
"epoch": 3.2850559578670175,
"grad_norm": 0.5129626989364624,
"learning_rate": 9.978000000000001e-05,
"loss": 0.0257,
"step": 4990
},
{
"epoch": 3.291639236339697,
"grad_norm": 0.28145211935043335,
"learning_rate": 9.998e-05,
"loss": 0.0294,
"step": 5000
},
{
"epoch": 3.2982225148123767,
"grad_norm": 0.4508993625640869,
"learning_rate": 9.999999778549045e-05,
"loss": 0.0275,
"step": 5010
},
{
"epoch": 3.304805793285056,
"grad_norm": 0.6209483742713928,
"learning_rate": 9.999999013039593e-05,
"loss": 0.0276,
"step": 5020
},
{
"epoch": 3.3113890717577354,
"grad_norm": 0.4727887809276581,
"learning_rate": 9.999997700737766e-05,
"loss": 0.0292,
"step": 5030
},
{
"epoch": 3.3179723502304146,
"grad_norm": 0.4151676595211029,
"learning_rate": 9.999995841643709e-05,
"loss": 0.0226,
"step": 5040
},
{
"epoch": 3.324555628703094,
"grad_norm": 0.5513929128646851,
"learning_rate": 9.999993435757623e-05,
"loss": 0.0251,
"step": 5050
},
{
"epoch": 3.3311389071757738,
"grad_norm": 0.4034462869167328,
"learning_rate": 9.999990483079773e-05,
"loss": 0.0199,
"step": 5060
},
{
"epoch": 3.337722185648453,
"grad_norm": 0.44716188311576843,
"learning_rate": 9.999986983610481e-05,
"loss": 0.0257,
"step": 5070
},
{
"epoch": 3.3443054641211325,
"grad_norm": 0.6094006896018982,
"learning_rate": 9.99998293735013e-05,
"loss": 0.0283,
"step": 5080
},
{
"epoch": 3.3508887425938116,
"grad_norm": 0.4081748425960541,
"learning_rate": 9.999978344299161e-05,
"loss": 0.0277,
"step": 5090
},
{
"epoch": 3.3574720210664912,
"grad_norm": 0.43902039527893066,
"learning_rate": 9.99997320445808e-05,
"loss": 0.0292,
"step": 5100
},
{
"epoch": 3.3640552995391704,
"grad_norm": 0.35768774151802063,
"learning_rate": 9.999967517827444e-05,
"loss": 0.0255,
"step": 5110
},
{
"epoch": 3.37063857801185,
"grad_norm": 0.36117279529571533,
"learning_rate": 9.999961284407879e-05,
"loss": 0.027,
"step": 5120
},
{
"epoch": 3.377221856484529,
"grad_norm": 0.40951865911483765,
"learning_rate": 9.999954504200067e-05,
"loss": 0.0199,
"step": 5130
},
{
"epoch": 3.3838051349572087,
"grad_norm": 0.3361895978450775,
"learning_rate": 9.999947177204744e-05,
"loss": 0.0249,
"step": 5140
},
{
"epoch": 3.3903884134298883,
"grad_norm": 0.5306394696235657,
"learning_rate": 9.999939303422718e-05,
"loss": 0.0271,
"step": 5150
},
{
"epoch": 3.3969716919025674,
"grad_norm": 0.439833402633667,
"learning_rate": 9.999930882854847e-05,
"loss": 0.023,
"step": 5160
},
{
"epoch": 3.403554970375247,
"grad_norm": 0.6075140237808228,
"learning_rate": 9.999921915502051e-05,
"loss": 0.0247,
"step": 5170
},
{
"epoch": 3.410138248847926,
"grad_norm": 0.3841625452041626,
"learning_rate": 9.99991240136531e-05,
"loss": 0.0245,
"step": 5180
},
{
"epoch": 3.4167215273206057,
"grad_norm": 0.38033556938171387,
"learning_rate": 9.999902340445668e-05,
"loss": 0.0226,
"step": 5190
},
{
"epoch": 3.423304805793285,
"grad_norm": 0.25364062190055847,
"learning_rate": 9.999891732744224e-05,
"loss": 0.0226,
"step": 5200
},
{
"epoch": 3.4298880842659645,
"grad_norm": 0.4260493218898773,
"learning_rate": 9.999880578262135e-05,
"loss": 0.0246,
"step": 5210
},
{
"epoch": 3.4364713627386436,
"grad_norm": 0.33678051829338074,
"learning_rate": 9.999868877000624e-05,
"loss": 0.0196,
"step": 5220
},
{
"epoch": 3.443054641211323,
"grad_norm": 0.4172752797603607,
"learning_rate": 9.99985662896097e-05,
"loss": 0.0267,
"step": 5230
},
{
"epoch": 3.449637919684003,
"grad_norm": 0.2745722532272339,
"learning_rate": 9.999843834144513e-05,
"loss": 0.0223,
"step": 5240
},
{
"epoch": 3.456221198156682,
"grad_norm": 0.41593703627586365,
"learning_rate": 9.99983049255265e-05,
"loss": 0.0182,
"step": 5250
},
{
"epoch": 3.4628044766293615,
"grad_norm": 0.31720176339149475,
"learning_rate": 9.999816604186843e-05,
"loss": 0.0251,
"step": 5260
},
{
"epoch": 3.4693877551020407,
"grad_norm": 0.2906181216239929,
"learning_rate": 9.999802169048609e-05,
"loss": 0.0222,
"step": 5270
},
{
"epoch": 3.4759710335747203,
"grad_norm": 0.3803907334804535,
"learning_rate": 9.999787187139527e-05,
"loss": 0.0179,
"step": 5280
},
{
"epoch": 3.4825543120474,
"grad_norm": 0.41219231486320496,
"learning_rate": 9.999771658461234e-05,
"loss": 0.0223,
"step": 5290
},
{
"epoch": 3.489137590520079,
"grad_norm": 0.3526517450809479,
"learning_rate": 9.999755583015431e-05,
"loss": 0.031,
"step": 5300
},
{
"epoch": 3.4957208689927586,
"grad_norm": 0.3900449573993683,
"learning_rate": 9.999738960803874e-05,
"loss": 0.0386,
"step": 5310
},
{
"epoch": 3.5023041474654377,
"grad_norm": 0.3482288718223572,
"learning_rate": 9.99972179182838e-05,
"loss": 0.0224,
"step": 5320
},
{
"epoch": 3.5088874259381173,
"grad_norm": 0.4343709349632263,
"learning_rate": 9.99970407609083e-05,
"loss": 0.0285,
"step": 5330
},
{
"epoch": 3.5154707044107965,
"grad_norm": 0.3820982277393341,
"learning_rate": 9.999685813593159e-05,
"loss": 0.0236,
"step": 5340
},
{
"epoch": 3.522053982883476,
"grad_norm": 0.3641161322593689,
"learning_rate": 9.999667004337362e-05,
"loss": 0.0335,
"step": 5350
},
{
"epoch": 3.528637261356155,
"grad_norm": 0.3691553771495819,
"learning_rate": 9.9996476483255e-05,
"loss": 0.0275,
"step": 5360
},
{
"epoch": 3.5352205398288348,
"grad_norm": 0.3383963406085968,
"learning_rate": 9.999627745559688e-05,
"loss": 0.0191,
"step": 5370
},
{
"epoch": 3.5418038183015144,
"grad_norm": 0.4110528826713562,
"learning_rate": 9.999607296042101e-05,
"loss": 0.0266,
"step": 5380
},
{
"epoch": 3.5483870967741935,
"grad_norm": 0.45626935362815857,
"learning_rate": 9.99958629977498e-05,
"loss": 0.0299,
"step": 5390
},
{
"epoch": 3.554970375246873,
"grad_norm": 0.4285246431827545,
"learning_rate": 9.999564756760615e-05,
"loss": 0.0266,
"step": 5400
},
{
"epoch": 3.5615536537195522,
"grad_norm": 0.41502803564071655,
"learning_rate": 9.999542667001366e-05,
"loss": 0.0242,
"step": 5410
},
{
"epoch": 3.568136932192232,
"grad_norm": 0.4082993268966675,
"learning_rate": 9.999520030499647e-05,
"loss": 0.0272,
"step": 5420
},
{
"epoch": 3.574720210664911,
"grad_norm": 0.47983601689338684,
"learning_rate": 9.999496847257936e-05,
"loss": 0.0219,
"step": 5430
},
{
"epoch": 3.5813034891375906,
"grad_norm": 0.4671506881713867,
"learning_rate": 9.999473117278764e-05,
"loss": 0.0223,
"step": 5440
},
{
"epoch": 3.5878867676102697,
"grad_norm": 0.573223888874054,
"learning_rate": 9.999448840564731e-05,
"loss": 0.0253,
"step": 5450
},
{
"epoch": 3.5944700460829493,
"grad_norm": 0.47985541820526123,
"learning_rate": 9.999424017118488e-05,
"loss": 0.0219,
"step": 5460
},
{
"epoch": 3.601053324555629,
"grad_norm": 0.49612581729888916,
"learning_rate": 9.999398646942751e-05,
"loss": 0.0286,
"step": 5470
},
{
"epoch": 3.607636603028308,
"grad_norm": 0.41392359137535095,
"learning_rate": 9.999372730040296e-05,
"loss": 0.0189,
"step": 5480
},
{
"epoch": 3.6142198815009876,
"grad_norm": 0.30266517400741577,
"learning_rate": 9.999346266413953e-05,
"loss": 0.0198,
"step": 5490
},
{
"epoch": 3.6208031599736668,
"grad_norm": 0.29903119802474976,
"learning_rate": 9.99931925606662e-05,
"loss": 0.0201,
"step": 5500
},
{
"epoch": 3.6273864384463463,
"grad_norm": 0.36555612087249756,
"learning_rate": 9.99929169900125e-05,
"loss": 0.0216,
"step": 5510
},
{
"epoch": 3.633969716919026,
"grad_norm": 0.361990749835968,
"learning_rate": 9.999263595220855e-05,
"loss": 0.021,
"step": 5520
},
{
"epoch": 3.640552995391705,
"grad_norm": 0.2887035608291626,
"learning_rate": 9.99923494472851e-05,
"loss": 0.0199,
"step": 5530
},
{
"epoch": 3.647136273864384,
"grad_norm": 0.3344179689884186,
"learning_rate": 9.999205747527348e-05,
"loss": 0.0173,
"step": 5540
},
{
"epoch": 3.653719552337064,
"grad_norm": 0.3673497438430786,
"learning_rate": 9.999176003620561e-05,
"loss": 0.0195,
"step": 5550
},
{
"epoch": 3.6603028308097434,
"grad_norm": 0.32160887122154236,
"learning_rate": 9.999145713011405e-05,
"loss": 0.0201,
"step": 5560
},
{
"epoch": 3.6668861092824225,
"grad_norm": 0.2807530164718628,
"learning_rate": 9.999114875703186e-05,
"loss": 0.0226,
"step": 5570
},
{
"epoch": 3.673469387755102,
"grad_norm": 0.4456839859485626,
"learning_rate": 9.999083491699281e-05,
"loss": 0.0187,
"step": 5580
},
{
"epoch": 3.6800526662277813,
"grad_norm": 0.5123438239097595,
"learning_rate": 9.999051561003123e-05,
"loss": 0.0211,
"step": 5590
},
{
"epoch": 3.686635944700461,
"grad_norm": 0.384736031293869,
"learning_rate": 9.999019083618202e-05,
"loss": 0.0252,
"step": 5600
},
{
"epoch": 3.6932192231731404,
"grad_norm": 0.2919522523880005,
"learning_rate": 9.99898605954807e-05,
"loss": 0.0237,
"step": 5610
},
{
"epoch": 3.6998025016458196,
"grad_norm": 0.34822607040405273,
"learning_rate": 9.998952488796338e-05,
"loss": 0.0238,
"step": 5620
},
{
"epoch": 3.706385780118499,
"grad_norm": 0.38299793004989624,
"learning_rate": 9.998918371366676e-05,
"loss": 0.0162,
"step": 5630
},
{
"epoch": 3.7129690585911783,
"grad_norm": 0.440780907869339,
"learning_rate": 9.99888370726282e-05,
"loss": 0.0254,
"step": 5640
},
{
"epoch": 3.719552337063858,
"grad_norm": 0.31846708059310913,
"learning_rate": 9.998848496488556e-05,
"loss": 0.0218,
"step": 5650
},
{
"epoch": 3.726135615536537,
"grad_norm": 0.32942384481430054,
"learning_rate": 9.998812739047736e-05,
"loss": 0.0215,
"step": 5660
},
{
"epoch": 3.7327188940092166,
"grad_norm": 0.3526286780834198,
"learning_rate": 9.99877643494427e-05,
"loss": 0.0183,
"step": 5670
},
{
"epoch": 3.739302172481896,
"grad_norm": 0.4554576873779297,
"learning_rate": 9.998739584182128e-05,
"loss": 0.0279,
"step": 5680
},
{
"epoch": 3.7458854509545754,
"grad_norm": 0.3991876244544983,
"learning_rate": 9.998702186765342e-05,
"loss": 0.02,
"step": 5690
},
{
"epoch": 3.752468729427255,
"grad_norm": 0.43987610936164856,
"learning_rate": 9.998664242698e-05,
"loss": 0.0239,
"step": 5700
},
{
"epoch": 3.759052007899934,
"grad_norm": 0.2586442232131958,
"learning_rate": 9.998625751984251e-05,
"loss": 0.0237,
"step": 5710
},
{
"epoch": 3.7656352863726137,
"grad_norm": 0.3601788878440857,
"learning_rate": 9.998586714628307e-05,
"loss": 0.0161,
"step": 5720
},
{
"epoch": 3.772218564845293,
"grad_norm": 0.4342748820781708,
"learning_rate": 9.998547130634432e-05,
"loss": 0.0253,
"step": 5730
},
{
"epoch": 3.7788018433179724,
"grad_norm": 0.3642992377281189,
"learning_rate": 9.99850700000696e-05,
"loss": 0.0198,
"step": 5740
},
{
"epoch": 3.785385121790652,
"grad_norm": 0.3224031627178192,
"learning_rate": 9.998466322750278e-05,
"loss": 0.0223,
"step": 5750
},
{
"epoch": 3.791968400263331,
"grad_norm": 0.4167894721031189,
"learning_rate": 9.998425098868834e-05,
"loss": 0.0191,
"step": 5760
},
{
"epoch": 3.7985516787360103,
"grad_norm": 0.4189382493495941,
"learning_rate": 9.998383328367136e-05,
"loss": 0.0199,
"step": 5770
},
{
"epoch": 3.80513495720869,
"grad_norm": 0.468149334192276,
"learning_rate": 9.99834101124975e-05,
"loss": 0.0231,
"step": 5780
},
{
"epoch": 3.8117182356813695,
"grad_norm": 0.41158705949783325,
"learning_rate": 9.998298147521309e-05,
"loss": 0.0244,
"step": 5790
},
{
"epoch": 3.8183015141540486,
"grad_norm": 0.376764714717865,
"learning_rate": 9.998254737186496e-05,
"loss": 0.0222,
"step": 5800
},
{
"epoch": 3.824884792626728,
"grad_norm": 0.36619943380355835,
"learning_rate": 9.99821078025006e-05,
"loss": 0.0312,
"step": 5810
},
{
"epoch": 3.8314680710994073,
"grad_norm": 0.24391573667526245,
"learning_rate": 9.998166276716807e-05,
"loss": 0.0276,
"step": 5820
},
{
"epoch": 3.838051349572087,
"grad_norm": 0.3048151731491089,
"learning_rate": 9.998121226591606e-05,
"loss": 0.0231,
"step": 5830
},
{
"epoch": 3.8446346280447665,
"grad_norm": 0.38544580340385437,
"learning_rate": 9.998075629879382e-05,
"loss": 0.0196,
"step": 5840
},
{
"epoch": 3.8512179065174457,
"grad_norm": 0.2672668695449829,
"learning_rate": 9.99802948658512e-05,
"loss": 0.0218,
"step": 5850
},
{
"epoch": 3.857801184990125,
"grad_norm": 0.4527229368686676,
"learning_rate": 9.99798279671387e-05,
"loss": 0.021,
"step": 5860
},
{
"epoch": 3.8643844634628044,
"grad_norm": 0.3108569085597992,
"learning_rate": 9.997935560270734e-05,
"loss": 0.019,
"step": 5870
},
{
"epoch": 3.870967741935484,
"grad_norm": 0.29073500633239746,
"learning_rate": 9.997887777260879e-05,
"loss": 0.0248,
"step": 5880
},
{
"epoch": 3.877551020408163,
"grad_norm": 0.34856799244880676,
"learning_rate": 9.997839447689532e-05,
"loss": 0.0217,
"step": 5890
},
{
"epoch": 3.8841342988808427,
"grad_norm": 0.3232513666152954,
"learning_rate": 9.997790571561978e-05,
"loss": 0.0203,
"step": 5900
},
{
"epoch": 3.890717577353522,
"grad_norm": 0.31828194856643677,
"learning_rate": 9.99774114888356e-05,
"loss": 0.0264,
"step": 5910
},
{
"epoch": 3.8973008558262014,
"grad_norm": 0.323972225189209,
"learning_rate": 9.997691179659684e-05,
"loss": 0.0241,
"step": 5920
},
{
"epoch": 3.903884134298881,
"grad_norm": 0.21808934211730957,
"learning_rate": 9.997640663895815e-05,
"loss": 0.0211,
"step": 5930
},
{
"epoch": 3.91046741277156,
"grad_norm": 0.29870593547821045,
"learning_rate": 9.997589601597477e-05,
"loss": 0.0235,
"step": 5940
},
{
"epoch": 3.9170506912442398,
"grad_norm": 0.36733514070510864,
"learning_rate": 9.997537992770252e-05,
"loss": 0.0205,
"step": 5950
},
{
"epoch": 3.923633969716919,
"grad_norm": 0.451858252286911,
"learning_rate": 9.997485837419788e-05,
"loss": 0.0211,
"step": 5960
},
{
"epoch": 3.9302172481895985,
"grad_norm": 0.361368328332901,
"learning_rate": 9.997433135551786e-05,
"loss": 0.0221,
"step": 5970
},
{
"epoch": 3.936800526662278,
"grad_norm": 0.40851062536239624,
"learning_rate": 9.997379887172009e-05,
"loss": 0.0176,
"step": 5980
},
{
"epoch": 3.9433838051349572,
"grad_norm": 0.3659604787826538,
"learning_rate": 9.997326092286281e-05,
"loss": 0.0244,
"step": 5990
},
{
"epoch": 3.9499670836076364,
"grad_norm": 0.38411015272140503,
"learning_rate": 9.997271750900486e-05,
"loss": 0.0259,
"step": 6000
},
{
"epoch": 3.956550362080316,
"grad_norm": 0.33671897649765015,
"learning_rate": 9.997216863020565e-05,
"loss": 0.0207,
"step": 6010
},
{
"epoch": 3.9631336405529956,
"grad_norm": 0.3913155496120453,
"learning_rate": 9.99716142865252e-05,
"loss": 0.0217,
"step": 6020
},
{
"epoch": 3.9697169190256747,
"grad_norm": 0.2653172016143799,
"learning_rate": 9.997105447802415e-05,
"loss": 0.0241,
"step": 6030
},
{
"epoch": 3.9763001974983543,
"grad_norm": 0.36002129316329956,
"learning_rate": 9.997048920476373e-05,
"loss": 0.0266,
"step": 6040
},
{
"epoch": 3.9828834759710334,
"grad_norm": 0.4489007294178009,
"learning_rate": 9.996991846680572e-05,
"loss": 0.0217,
"step": 6050
},
{
"epoch": 3.989466754443713,
"grad_norm": 0.4268869161605835,
"learning_rate": 9.996934226421257e-05,
"loss": 0.019,
"step": 6060
},
{
"epoch": 3.9960500329163926,
"grad_norm": 0.3032863736152649,
"learning_rate": 9.996876059704726e-05,
"loss": 0.0212,
"step": 6070
},
{
"epoch": 4.002633311389072,
"grad_norm": 0.3210552930831909,
"learning_rate": 9.996817346537343e-05,
"loss": 0.0176,
"step": 6080
},
{
"epoch": 4.009216589861751,
"grad_norm": 0.3281209468841553,
"learning_rate": 9.996758086925526e-05,
"loss": 0.0244,
"step": 6090
},
{
"epoch": 4.0157998683344305,
"grad_norm": 0.46323278546333313,
"learning_rate": 9.996698280875759e-05,
"loss": 0.023,
"step": 6100
},
{
"epoch": 4.02238314680711,
"grad_norm": 0.32954826951026917,
"learning_rate": 9.99663792839458e-05,
"loss": 0.0263,
"step": 6110
},
{
"epoch": 4.02896642527979,
"grad_norm": 0.3930584788322449,
"learning_rate": 9.99657702948859e-05,
"loss": 0.0224,
"step": 6120
},
{
"epoch": 4.035549703752468,
"grad_norm": 0.2861038148403168,
"learning_rate": 9.996515584164448e-05,
"loss": 0.026,
"step": 6130
},
{
"epoch": 4.042132982225148,
"grad_norm": 0.2715913653373718,
"learning_rate": 9.996453592428873e-05,
"loss": 0.0209,
"step": 6140
},
{
"epoch": 4.0487162606978275,
"grad_norm": 0.34742867946624756,
"learning_rate": 9.996391054288646e-05,
"loss": 0.0198,
"step": 6150
},
{
"epoch": 4.055299539170507,
"grad_norm": 0.3816830515861511,
"learning_rate": 9.996327969750605e-05,
"loss": 0.0251,
"step": 6160
},
{
"epoch": 4.061882817643187,
"grad_norm": 0.3666089177131653,
"learning_rate": 9.996264338821649e-05,
"loss": 0.0242,
"step": 6170
},
{
"epoch": 4.068466096115865,
"grad_norm": 0.4186146855354309,
"learning_rate": 9.996200161508735e-05,
"loss": 0.0236,
"step": 6180
},
{
"epoch": 4.075049374588545,
"grad_norm": 0.41126886010169983,
"learning_rate": 9.996135437818885e-05,
"loss": 0.0194,
"step": 6190
},
{
"epoch": 4.081632653061225,
"grad_norm": 0.35650280117988586,
"learning_rate": 9.996070167759175e-05,
"loss": 0.025,
"step": 6200
},
{
"epoch": 4.088215931533904,
"grad_norm": 0.3541823923587799,
"learning_rate": 9.996004351336743e-05,
"loss": 0.0223,
"step": 6210
},
{
"epoch": 4.094799210006583,
"grad_norm": 0.37730729579925537,
"learning_rate": 9.995937988558785e-05,
"loss": 0.0312,
"step": 6220
},
{
"epoch": 4.1013824884792625,
"grad_norm": 0.36955150961875916,
"learning_rate": 9.995871079432561e-05,
"loss": 0.0259,
"step": 6230
},
{
"epoch": 4.107965766951942,
"grad_norm": 0.293836385011673,
"learning_rate": 9.995803623965389e-05,
"loss": 0.017,
"step": 6240
},
{
"epoch": 4.114549045424622,
"grad_norm": 0.3713253438472748,
"learning_rate": 9.995735622164641e-05,
"loss": 0.0208,
"step": 6250
},
{
"epoch": 4.121132323897301,
"grad_norm": 0.4140655994415283,
"learning_rate": 9.995667074037758e-05,
"loss": 0.0259,
"step": 6260
},
{
"epoch": 4.12771560236998,
"grad_norm": 0.48406219482421875,
"learning_rate": 9.995597979592232e-05,
"loss": 0.0254,
"step": 6270
},
{
"epoch": 4.1342988808426595,
"grad_norm": 0.5829688310623169,
"learning_rate": 9.995528338835625e-05,
"loss": 0.0298,
"step": 6280
},
{
"epoch": 4.140882159315339,
"grad_norm": 0.42463943362236023,
"learning_rate": 9.995458151775547e-05,
"loss": 0.0257,
"step": 6290
},
{
"epoch": 4.147465437788019,
"grad_norm": 0.3564859926700592,
"learning_rate": 9.995387418419677e-05,
"loss": 0.0295,
"step": 6300
},
{
"epoch": 4.154048716260698,
"grad_norm": 0.3211365044116974,
"learning_rate": 9.99531613877575e-05,
"loss": 0.0213,
"step": 6310
},
{
"epoch": 4.160631994733377,
"grad_norm": 0.45185497403144836,
"learning_rate": 9.995244312851559e-05,
"loss": 0.0272,
"step": 6320
},
{
"epoch": 4.167215273206057,
"grad_norm": 0.48442375659942627,
"learning_rate": 9.995171940654961e-05,
"loss": 0.0273,
"step": 6330
},
{
"epoch": 4.173798551678736,
"grad_norm": 0.24598322808742523,
"learning_rate": 9.995099022193871e-05,
"loss": 0.0284,
"step": 6340
},
{
"epoch": 4.180381830151416,
"grad_norm": 0.39929965138435364,
"learning_rate": 9.995025557476261e-05,
"loss": 0.0245,
"step": 6350
},
{
"epoch": 4.186965108624094,
"grad_norm": 0.3389293849468231,
"learning_rate": 9.994951546510165e-05,
"loss": 0.0249,
"step": 6360
},
{
"epoch": 4.193548387096774,
"grad_norm": 0.3898114860057831,
"learning_rate": 9.994876989303679e-05,
"loss": 0.0265,
"step": 6370
},
{
"epoch": 4.200131665569454,
"grad_norm": 0.36348646879196167,
"learning_rate": 9.994801885864955e-05,
"loss": 0.0239,
"step": 6380
},
{
"epoch": 4.206714944042133,
"grad_norm": 0.2853721082210541,
"learning_rate": 9.994726236202205e-05,
"loss": 0.0291,
"step": 6390
},
{
"epoch": 4.213298222514813,
"grad_norm": 0.3526742458343506,
"learning_rate": 9.994650040323704e-05,
"loss": 0.0265,
"step": 6400
},
{
"epoch": 4.2198815009874915,
"grad_norm": 0.3583781123161316,
"learning_rate": 9.994573298237784e-05,
"loss": 0.0257,
"step": 6410
},
{
"epoch": 4.226464779460171,
"grad_norm": 0.38797321915626526,
"learning_rate": 9.994496009952837e-05,
"loss": 0.0249,
"step": 6420
},
{
"epoch": 4.233048057932851,
"grad_norm": 0.5112622976303101,
"learning_rate": 9.994418175477316e-05,
"loss": 0.0275,
"step": 6430
},
{
"epoch": 4.23963133640553,
"grad_norm": 0.5210843682289124,
"learning_rate": 9.994339794819733e-05,
"loss": 0.0213,
"step": 6440
},
{
"epoch": 4.246214614878209,
"grad_norm": 0.30685919523239136,
"learning_rate": 9.994260867988658e-05,
"loss": 0.0265,
"step": 6450
},
{
"epoch": 4.2527978933508885,
"grad_norm": 0.35485345125198364,
"learning_rate": 9.994181394992723e-05,
"loss": 0.017,
"step": 6460
},
{
"epoch": 4.259381171823568,
"grad_norm": 0.28581544756889343,
"learning_rate": 9.994101375840618e-05,
"loss": 0.0234,
"step": 6470
},
{
"epoch": 4.265964450296248,
"grad_norm": 0.3446362018585205,
"learning_rate": 9.994020810541098e-05,
"loss": 0.0169,
"step": 6480
},
{
"epoch": 4.272547728768927,
"grad_norm": 0.3929535746574402,
"learning_rate": 9.99393969910297e-05,
"loss": 0.0178,
"step": 6490
},
{
"epoch": 4.279131007241606,
"grad_norm": 0.3449273109436035,
"learning_rate": 9.993858041535104e-05,
"loss": 0.0187,
"step": 6500
},
{
"epoch": 4.285714285714286,
"grad_norm": 0.4571034908294678,
"learning_rate": 9.99377583784643e-05,
"loss": 0.0287,
"step": 6510
},
{
"epoch": 4.292297564186965,
"grad_norm": 0.3422394394874573,
"learning_rate": 9.993693088045939e-05,
"loss": 0.0221,
"step": 6520
},
{
"epoch": 4.298880842659645,
"grad_norm": 0.31809157133102417,
"learning_rate": 9.99360979214268e-05,
"loss": 0.0181,
"step": 6530
},
{
"epoch": 4.305464121132324,
"grad_norm": 0.36174312233924866,
"learning_rate": 9.99352595014576e-05,
"loss": 0.0259,
"step": 6540
},
{
"epoch": 4.312047399605003,
"grad_norm": 0.3008595407009125,
"learning_rate": 9.993441562064354e-05,
"loss": 0.0212,
"step": 6550
},
{
"epoch": 4.318630678077683,
"grad_norm": 0.3762459456920624,
"learning_rate": 9.993356627907685e-05,
"loss": 0.0174,
"step": 6560
},
{
"epoch": 4.325213956550362,
"grad_norm": 0.6818409562110901,
"learning_rate": 9.99327114768504e-05,
"loss": 0.0252,
"step": 6570
},
{
"epoch": 4.331797235023042,
"grad_norm": 0.3158642649650574,
"learning_rate": 9.99318512140577e-05,
"loss": 0.0189,
"step": 6580
},
{
"epoch": 4.3383805134957205,
"grad_norm": 0.3021540343761444,
"learning_rate": 9.993098549079284e-05,
"loss": 0.0232,
"step": 6590
},
{
"epoch": 4.3449637919684,
"grad_norm": 0.36075422167778015,
"learning_rate": 9.993011430715047e-05,
"loss": 0.0208,
"step": 6600
},
{
"epoch": 4.35154707044108,
"grad_norm": 0.31216225028038025,
"learning_rate": 9.992923766322586e-05,
"loss": 0.0145,
"step": 6610
},
{
"epoch": 4.358130348913759,
"grad_norm": 0.37069186568260193,
"learning_rate": 9.99283555591149e-05,
"loss": 0.0253,
"step": 6620
},
{
"epoch": 4.364713627386438,
"grad_norm": 0.42145857214927673,
"learning_rate": 9.992746799491404e-05,
"loss": 0.0175,
"step": 6630
},
{
"epoch": 4.371296905859118,
"grad_norm": 0.35462507605552673,
"learning_rate": 9.992657497072033e-05,
"loss": 0.017,
"step": 6640
},
{
"epoch": 4.377880184331797,
"grad_norm": 0.3720964789390564,
"learning_rate": 9.992567648663147e-05,
"loss": 0.0158,
"step": 6650
},
{
"epoch": 4.384463462804477,
"grad_norm": 0.42377179861068726,
"learning_rate": 9.992477254274568e-05,
"loss": 0.0244,
"step": 6660
},
{
"epoch": 4.391046741277156,
"grad_norm": 0.4149368703365326,
"learning_rate": 9.992386313916183e-05,
"loss": 0.0206,
"step": 6670
},
{
"epoch": 4.397630019749835,
"grad_norm": 0.3542799651622772,
"learning_rate": 9.992294827597934e-05,
"loss": 0.0204,
"step": 6680
},
{
"epoch": 4.404213298222515,
"grad_norm": 0.30297449231147766,
"learning_rate": 9.992202795329831e-05,
"loss": 0.0235,
"step": 6690
},
{
"epoch": 4.410796576695194,
"grad_norm": 0.2499985247850418,
"learning_rate": 9.992110217121936e-05,
"loss": 0.0255,
"step": 6700
},
{
"epoch": 4.417379855167874,
"grad_norm": 0.38808703422546387,
"learning_rate": 9.992017092984372e-05,
"loss": 0.0208,
"step": 6710
},
{
"epoch": 4.423963133640553,
"grad_norm": 0.3152003586292267,
"learning_rate": 9.991923422927326e-05,
"loss": 0.0193,
"step": 6720
},
{
"epoch": 4.430546412113232,
"grad_norm": 0.4033868908882141,
"learning_rate": 9.991829206961037e-05,
"loss": 0.0301,
"step": 6730
},
{
"epoch": 4.437129690585912,
"grad_norm": 0.24699853360652924,
"learning_rate": 9.991734445095813e-05,
"loss": 0.0274,
"step": 6740
},
{
"epoch": 4.443712969058591,
"grad_norm": 0.31773144006729126,
"learning_rate": 9.991639137342015e-05,
"loss": 0.0195,
"step": 6750
},
{
"epoch": 4.450296247531271,
"grad_norm": 0.24428239464759827,
"learning_rate": 9.991543283710064e-05,
"loss": 0.0269,
"step": 6760
},
{
"epoch": 4.45687952600395,
"grad_norm": 0.34858056902885437,
"learning_rate": 9.991446884210445e-05,
"loss": 0.0266,
"step": 6770
},
{
"epoch": 4.463462804476629,
"grad_norm": 0.4921180307865143,
"learning_rate": 9.9913499388537e-05,
"loss": 0.0228,
"step": 6780
},
{
"epoch": 4.470046082949309,
"grad_norm": 0.3228455185890198,
"learning_rate": 9.99125244765043e-05,
"loss": 0.02,
"step": 6790
},
{
"epoch": 4.476629361421988,
"grad_norm": 0.4304894804954529,
"learning_rate": 9.991154410611296e-05,
"loss": 0.031,
"step": 6800
},
{
"epoch": 4.483212639894668,
"grad_norm": 0.39444026350975037,
"learning_rate": 9.99105582774702e-05,
"loss": 0.0236,
"step": 6810
},
{
"epoch": 4.489795918367347,
"grad_norm": 0.3531991243362427,
"learning_rate": 9.990956699068384e-05,
"loss": 0.0206,
"step": 6820
},
{
"epoch": 4.496379196840026,
"grad_norm": 0.39403006434440613,
"learning_rate": 9.990857024586224e-05,
"loss": 0.0265,
"step": 6830
},
{
"epoch": 4.502962475312706,
"grad_norm": 0.3178650438785553,
"learning_rate": 9.990756804311446e-05,
"loss": 0.021,
"step": 6840
},
{
"epoch": 4.509545753785385,
"grad_norm": 0.32949298620224,
"learning_rate": 9.990656038255006e-05,
"loss": 0.0228,
"step": 6850
},
{
"epoch": 4.516129032258064,
"grad_norm": 0.5329727530479431,
"learning_rate": 9.990554726427926e-05,
"loss": 0.018,
"step": 6860
},
{
"epoch": 4.522712310730744,
"grad_norm": 0.41025176644325256,
"learning_rate": 9.990452868841284e-05,
"loss": 0.0187,
"step": 6870
},
{
"epoch": 4.529295589203423,
"grad_norm": 0.5469089150428772,
"learning_rate": 9.99035046550622e-05,
"loss": 0.0288,
"step": 6880
},
{
"epoch": 4.535878867676103,
"grad_norm": 0.27765026688575745,
"learning_rate": 9.99024751643393e-05,
"loss": 0.0177,
"step": 6890
},
{
"epoch": 4.542462146148782,
"grad_norm": 0.4035395383834839,
"learning_rate": 9.990144021635677e-05,
"loss": 0.0208,
"step": 6900
},
{
"epoch": 4.549045424621461,
"grad_norm": 0.27665823698043823,
"learning_rate": 9.990039981122775e-05,
"loss": 0.024,
"step": 6910
},
{
"epoch": 4.555628703094141,
"grad_norm": 0.4746190905570984,
"learning_rate": 9.989935394906602e-05,
"loss": 0.0256,
"step": 6920
},
{
"epoch": 4.56221198156682,
"grad_norm": 0.32918480038642883,
"learning_rate": 9.989830262998598e-05,
"loss": 0.0196,
"step": 6930
},
{
"epoch": 4.5687952600395,
"grad_norm": 0.2479863166809082,
"learning_rate": 9.989724585410259e-05,
"loss": 0.0209,
"step": 6940
},
{
"epoch": 4.5753785385121795,
"grad_norm": 0.26176539063453674,
"learning_rate": 9.989618362153139e-05,
"loss": 0.0228,
"step": 6950
},
{
"epoch": 4.581961816984858,
"grad_norm": 0.3173213303089142,
"learning_rate": 9.989511593238859e-05,
"loss": 0.0217,
"step": 6960
},
{
"epoch": 4.588545095457538,
"grad_norm": 0.36864015460014343,
"learning_rate": 9.98940427867909e-05,
"loss": 0.0185,
"step": 6970
},
{
"epoch": 4.595128373930217,
"grad_norm": 0.37078654766082764,
"learning_rate": 9.989296418485573e-05,
"loss": 0.0205,
"step": 6980
},
{
"epoch": 4.601711652402897,
"grad_norm": 0.3953816592693329,
"learning_rate": 9.989188012670101e-05,
"loss": 0.0332,
"step": 6990
},
{
"epoch": 4.6082949308755765,
"grad_norm": 0.3798612058162689,
"learning_rate": 9.989079061244528e-05,
"loss": 0.022,
"step": 7000
},
{
"epoch": 4.614878209348255,
"grad_norm": 0.5186861157417297,
"learning_rate": 9.988969564220769e-05,
"loss": 0.027,
"step": 7010
},
{
"epoch": 4.621461487820935,
"grad_norm": 0.37296605110168457,
"learning_rate": 9.988859521610801e-05,
"loss": 0.0228,
"step": 7020
},
{
"epoch": 4.628044766293614,
"grad_norm": 0.42057400941848755,
"learning_rate": 9.988748933426656e-05,
"loss": 0.0205,
"step": 7030
},
{
"epoch": 4.634628044766294,
"grad_norm": 0.37162378430366516,
"learning_rate": 9.988637799680428e-05,
"loss": 0.0226,
"step": 7040
},
{
"epoch": 4.641211323238973,
"grad_norm": 0.35913753509521484,
"learning_rate": 9.98852612038427e-05,
"loss": 0.0223,
"step": 7050
},
{
"epoch": 4.647794601711652,
"grad_norm": 0.38595256209373474,
"learning_rate": 9.988413895550397e-05,
"loss": 0.0224,
"step": 7060
},
{
"epoch": 4.654377880184332,
"grad_norm": 0.3278777599334717,
"learning_rate": 9.98830112519108e-05,
"loss": 0.0244,
"step": 7070
},
{
"epoch": 4.660961158657011,
"grad_norm": 0.46379902958869934,
"learning_rate": 9.98818780931865e-05,
"loss": 0.0245,
"step": 7080
},
{
"epoch": 4.66754443712969,
"grad_norm": 0.3076514005661011,
"learning_rate": 9.988073947945502e-05,
"loss": 0.0208,
"step": 7090
},
{
"epoch": 4.67412771560237,
"grad_norm": 0.3563447594642639,
"learning_rate": 9.987959541084087e-05,
"loss": 0.0248,
"step": 7100
},
{
"epoch": 4.680710994075049,
"grad_norm": 0.3640218675136566,
"learning_rate": 9.987844588746915e-05,
"loss": 0.0235,
"step": 7110
},
{
"epoch": 4.687294272547729,
"grad_norm": 0.4865831434726715,
"learning_rate": 9.987729090946558e-05,
"loss": 0.0213,
"step": 7120
},
{
"epoch": 4.6938775510204085,
"grad_norm": 0.27077776193618774,
"learning_rate": 9.987613047695647e-05,
"loss": 0.0249,
"step": 7130
},
{
"epoch": 4.700460829493087,
"grad_norm": 0.4091435372829437,
"learning_rate": 9.987496459006871e-05,
"loss": 0.0252,
"step": 7140
},
{
"epoch": 4.707044107965767,
"grad_norm": 0.31777116656303406,
"learning_rate": 9.987379324892982e-05,
"loss": 0.0187,
"step": 7150
},
{
"epoch": 4.713627386438446,
"grad_norm": 0.2603038251399994,
"learning_rate": 9.987261645366788e-05,
"loss": 0.0248,
"step": 7160
},
{
"epoch": 4.720210664911126,
"grad_norm": 0.31177574396133423,
"learning_rate": 9.987143420441158e-05,
"loss": 0.0197,
"step": 7170
},
{
"epoch": 4.7267939433838055,
"grad_norm": 0.30174535512924194,
"learning_rate": 9.987024650129022e-05,
"loss": 0.0199,
"step": 7180
},
{
"epoch": 4.733377221856484,
"grad_norm": 0.3529543876647949,
"learning_rate": 9.986905334443368e-05,
"loss": 0.0199,
"step": 7190
},
{
"epoch": 4.739960500329164,
"grad_norm": 0.37991321086883545,
"learning_rate": 9.986785473397245e-05,
"loss": 0.0257,
"step": 7200
},
{
"epoch": 4.746543778801843,
"grad_norm": 0.3611675500869751,
"learning_rate": 9.98666506700376e-05,
"loss": 0.0195,
"step": 7210
},
{
"epoch": 4.753127057274523,
"grad_norm": 0.3074056804180145,
"learning_rate": 9.986544115276081e-05,
"loss": 0.0163,
"step": 7220
},
{
"epoch": 4.759710335747203,
"grad_norm": 0.29158279299736023,
"learning_rate": 9.986422618227433e-05,
"loss": 0.0195,
"step": 7230
},
{
"epoch": 4.766293614219881,
"grad_norm": 0.2504565715789795,
"learning_rate": 9.986300575871106e-05,
"loss": 0.0195,
"step": 7240
},
{
"epoch": 4.772876892692561,
"grad_norm": 0.36455443501472473,
"learning_rate": 9.986177988220444e-05,
"loss": 0.0231,
"step": 7250
},
{
"epoch": 4.7794601711652405,
"grad_norm": 0.281720906496048,
"learning_rate": 9.986054855288856e-05,
"loss": 0.019,
"step": 7260
},
{
"epoch": 4.78604344963792,
"grad_norm": 0.3896733820438385,
"learning_rate": 9.985931177089802e-05,
"loss": 0.0273,
"step": 7270
},
{
"epoch": 4.792626728110599,
"grad_norm": 0.45102396607398987,
"learning_rate": 9.985806953636814e-05,
"loss": 0.0193,
"step": 7280
},
{
"epoch": 4.799210006583278,
"grad_norm": 0.3472176790237427,
"learning_rate": 9.985682184943471e-05,
"loss": 0.0216,
"step": 7290
},
{
"epoch": 4.805793285055958,
"grad_norm": 0.3363492786884308,
"learning_rate": 9.98555687102342e-05,
"loss": 0.0313,
"step": 7300
},
{
"epoch": 4.8123765635286375,
"grad_norm": 0.39487481117248535,
"learning_rate": 9.985431011890367e-05,
"loss": 0.0191,
"step": 7310
},
{
"epoch": 4.818959842001316,
"grad_norm": 0.3850216567516327,
"learning_rate": 9.985304607558075e-05,
"loss": 0.02,
"step": 7320
},
{
"epoch": 4.825543120473996,
"grad_norm": 0.3322221040725708,
"learning_rate": 9.985177658040364e-05,
"loss": 0.0231,
"step": 7330
},
{
"epoch": 4.832126398946675,
"grad_norm": 0.5157605409622192,
"learning_rate": 9.985050163351119e-05,
"loss": 0.0225,
"step": 7340
},
{
"epoch": 4.838709677419355,
"grad_norm": 0.3799228072166443,
"learning_rate": 9.984922123504286e-05,
"loss": 0.0225,
"step": 7350
},
{
"epoch": 4.845292955892035,
"grad_norm": 0.3860776722431183,
"learning_rate": 9.984793538513862e-05,
"loss": 0.0241,
"step": 7360
},
{
"epoch": 4.851876234364713,
"grad_norm": 0.2610214948654175,
"learning_rate": 9.984664408393912e-05,
"loss": 0.024,
"step": 7370
},
{
"epoch": 4.858459512837393,
"grad_norm": 0.21491529047489166,
"learning_rate": 9.984534733158556e-05,
"loss": 0.0227,
"step": 7380
},
{
"epoch": 4.8650427913100724,
"grad_norm": 0.2675805687904358,
"learning_rate": 9.984404512821977e-05,
"loss": 0.02,
"step": 7390
},
{
"epoch": 4.871626069782752,
"grad_norm": 0.34597620368003845,
"learning_rate": 9.984273747398411e-05,
"loss": 0.0195,
"step": 7400
},
{
"epoch": 4.878209348255432,
"grad_norm": 0.3053875267505646,
"learning_rate": 9.984142436902165e-05,
"loss": 0.0187,
"step": 7410
},
{
"epoch": 4.88479262672811,
"grad_norm": 0.317487508058548,
"learning_rate": 9.984010581347596e-05,
"loss": 0.0323,
"step": 7420
},
{
"epoch": 4.89137590520079,
"grad_norm": 0.29832401871681213,
"learning_rate": 9.983878180749121e-05,
"loss": 0.0264,
"step": 7430
},
{
"epoch": 4.8979591836734695,
"grad_norm": 0.4275784492492676,
"learning_rate": 9.983745235121222e-05,
"loss": 0.0181,
"step": 7440
},
{
"epoch": 4.904542462146149,
"grad_norm": 0.39839449524879456,
"learning_rate": 9.983611744478438e-05,
"loss": 0.0197,
"step": 7450
},
{
"epoch": 4.911125740618829,
"grad_norm": 0.34340113401412964,
"learning_rate": 9.983477708835365e-05,
"loss": 0.0208,
"step": 7460
},
{
"epoch": 4.917709019091507,
"grad_norm": 0.3064946234226227,
"learning_rate": 9.983343128206664e-05,
"loss": 0.0232,
"step": 7470
},
{
"epoch": 4.924292297564187,
"grad_norm": 0.32203012704849243,
"learning_rate": 9.983208002607049e-05,
"loss": 0.0278,
"step": 7480
},
{
"epoch": 4.9308755760368665,
"grad_norm": 0.34060534834861755,
"learning_rate": 9.9830723320513e-05,
"loss": 0.0216,
"step": 7490
},
{
"epoch": 4.937458854509546,
"grad_norm": 0.3820332884788513,
"learning_rate": 9.982936116554254e-05,
"loss": 0.0233,
"step": 7500
},
{
"epoch": 4.944042132982225,
"grad_norm": 0.5205534100532532,
"learning_rate": 9.982799356130803e-05,
"loss": 0.0165,
"step": 7510
},
{
"epoch": 4.950625411454904,
"grad_norm": 0.3744429647922516,
"learning_rate": 9.982662050795908e-05,
"loss": 0.0246,
"step": 7520
},
{
"epoch": 4.957208689927584,
"grad_norm": 0.38129329681396484,
"learning_rate": 9.982524200564583e-05,
"loss": 0.0298,
"step": 7530
},
{
"epoch": 4.963791968400264,
"grad_norm": 0.40962719917297363,
"learning_rate": 9.982385805451901e-05,
"loss": 0.0229,
"step": 7540
},
{
"epoch": 4.970375246872942,
"grad_norm": 0.3934646248817444,
"learning_rate": 9.982246865472998e-05,
"loss": 0.024,
"step": 7550
},
{
"epoch": 4.976958525345622,
"grad_norm": 0.4171105921268463,
"learning_rate": 9.982107380643069e-05,
"loss": 0.02,
"step": 7560
},
{
"epoch": 4.9835418038183015,
"grad_norm": 0.3089905381202698,
"learning_rate": 9.981967350977368e-05,
"loss": 0.0161,
"step": 7570
},
{
"epoch": 4.990125082290981,
"grad_norm": 0.4313291013240814,
"learning_rate": 9.981826776491208e-05,
"loss": 0.0224,
"step": 7580
},
{
"epoch": 4.996708360763661,
"grad_norm": 0.2518802285194397,
"learning_rate": 9.98168565719996e-05,
"loss": 0.0242,
"step": 7590
},
{
"epoch": 5.003291639236339,
"grad_norm": 0.3616885542869568,
"learning_rate": 9.98154399311906e-05,
"loss": 0.0208,
"step": 7600
},
{
"epoch": 5.009874917709019,
"grad_norm": 0.3517681956291199,
"learning_rate": 9.981401784263997e-05,
"loss": 0.0172,
"step": 7610
},
{
"epoch": 5.0164581961816985,
"grad_norm": 0.31701499223709106,
"learning_rate": 9.981259030650326e-05,
"loss": 0.0157,
"step": 7620
},
{
"epoch": 5.023041474654378,
"grad_norm": 0.33666568994522095,
"learning_rate": 9.981115732293655e-05,
"loss": 0.0291,
"step": 7630
},
{
"epoch": 5.029624753127058,
"grad_norm": 0.3851187527179718,
"learning_rate": 9.980971889209659e-05,
"loss": 0.018,
"step": 7640
},
{
"epoch": 5.036208031599736,
"grad_norm": 0.3008062541484833,
"learning_rate": 9.980827501414064e-05,
"loss": 0.0251,
"step": 7650
},
{
"epoch": 5.042791310072416,
"grad_norm": 0.29571911692619324,
"learning_rate": 9.980682568922663e-05,
"loss": 0.0216,
"step": 7660
},
{
"epoch": 5.049374588545096,
"grad_norm": 0.297370582818985,
"learning_rate": 9.980537091751304e-05,
"loss": 0.0207,
"step": 7670
},
{
"epoch": 5.055957867017775,
"grad_norm": 0.32923704385757446,
"learning_rate": 9.980391069915897e-05,
"loss": 0.0221,
"step": 7680
},
{
"epoch": 5.062541145490454,
"grad_norm": 0.2297673374414444,
"learning_rate": 9.98024450343241e-05,
"loss": 0.0196,
"step": 7690
},
{
"epoch": 5.0691244239631335,
"grad_norm": 0.3981427252292633,
"learning_rate": 9.980097392316872e-05,
"loss": 0.0185,
"step": 7700
},
{
"epoch": 5.075707702435813,
"grad_norm": 0.2640717923641205,
"learning_rate": 9.97994973658537e-05,
"loss": 0.0189,
"step": 7710
},
{
"epoch": 5.082290980908493,
"grad_norm": 0.4028050899505615,
"learning_rate": 9.979801536254054e-05,
"loss": 0.0246,
"step": 7720
},
{
"epoch": 5.088874259381172,
"grad_norm": 0.36920779943466187,
"learning_rate": 9.979652791339127e-05,
"loss": 0.0221,
"step": 7730
},
{
"epoch": 5.095457537853851,
"grad_norm": 0.35855594277381897,
"learning_rate": 9.97950350185686e-05,
"loss": 0.0209,
"step": 7740
},
{
"epoch": 5.1020408163265305,
"grad_norm": 0.3702905476093292,
"learning_rate": 9.979353667823574e-05,
"loss": 0.0231,
"step": 7750
},
{
"epoch": 5.10862409479921,
"grad_norm": 0.3827485144138336,
"learning_rate": 9.979203289255658e-05,
"loss": 0.022,
"step": 7760
},
{
"epoch": 5.11520737327189,
"grad_norm": 0.3270743787288666,
"learning_rate": 9.979052366169557e-05,
"loss": 0.0176,
"step": 7770
},
{
"epoch": 5.121790651744568,
"grad_norm": 0.31202802062034607,
"learning_rate": 9.978900898581775e-05,
"loss": 0.0199,
"step": 7780
},
{
"epoch": 5.128373930217248,
"grad_norm": 0.3751831650733948,
"learning_rate": 9.978748886508875e-05,
"loss": 0.0256,
"step": 7790
},
{
"epoch": 5.1349572086899276,
"grad_norm": 0.29420793056488037,
"learning_rate": 9.978596329967484e-05,
"loss": 0.0171,
"step": 7800
},
{
"epoch": 5.141540487162607,
"grad_norm": 0.28880563378334045,
"learning_rate": 9.978443228974284e-05,
"loss": 0.0212,
"step": 7810
},
{
"epoch": 5.148123765635287,
"grad_norm": 0.42300334572792053,
"learning_rate": 9.978289583546015e-05,
"loss": 0.0205,
"step": 7820
},
{
"epoch": 5.154707044107965,
"grad_norm": 0.4288804233074188,
"learning_rate": 9.978135393699484e-05,
"loss": 0.0232,
"step": 7830
},
{
"epoch": 5.161290322580645,
"grad_norm": 0.29093754291534424,
"learning_rate": 9.977980659451548e-05,
"loss": 0.0171,
"step": 7840
},
{
"epoch": 5.167873601053325,
"grad_norm": 0.26314979791641235,
"learning_rate": 9.977825380819135e-05,
"loss": 0.0208,
"step": 7850
},
{
"epoch": 5.174456879526004,
"grad_norm": 0.5064106583595276,
"learning_rate": 9.97766955781922e-05,
"loss": 0.0188,
"step": 7860
},
{
"epoch": 5.181040157998684,
"grad_norm": 0.31865066289901733,
"learning_rate": 9.977513190468848e-05,
"loss": 0.0225,
"step": 7870
},
{
"epoch": 5.1876234364713625,
"grad_norm": 0.3516758382320404,
"learning_rate": 9.977356278785116e-05,
"loss": 0.0211,
"step": 7880
},
{
"epoch": 5.194206714944042,
"grad_norm": 0.38052287697792053,
"learning_rate": 9.977198822785184e-05,
"loss": 0.0212,
"step": 7890
},
{
"epoch": 5.200789993416722,
"grad_norm": 0.2903013527393341,
"learning_rate": 9.977040822486273e-05,
"loss": 0.0247,
"step": 7900
},
{
"epoch": 5.207373271889401,
"grad_norm": 0.4080137014389038,
"learning_rate": 9.97688227790566e-05,
"loss": 0.0237,
"step": 7910
},
{
"epoch": 5.21395655036208,
"grad_norm": 0.38828808069229126,
"learning_rate": 9.976723189060684e-05,
"loss": 0.0191,
"step": 7920
},
{
"epoch": 5.2205398288347595,
"grad_norm": 0.31710729002952576,
"learning_rate": 9.976563555968742e-05,
"loss": 0.0196,
"step": 7930
},
{
"epoch": 5.227123107307439,
"grad_norm": 0.34222549200057983,
"learning_rate": 9.976403378647292e-05,
"loss": 0.025,
"step": 7940
},
{
"epoch": 5.233706385780119,
"grad_norm": 0.3185136020183563,
"learning_rate": 9.97624265711385e-05,
"loss": 0.0199,
"step": 7950
},
{
"epoch": 5.240289664252798,
"grad_norm": 0.2891014516353607,
"learning_rate": 9.976081391385993e-05,
"loss": 0.0187,
"step": 7960
},
{
"epoch": 5.246872942725477,
"grad_norm": 0.31885841488838196,
"learning_rate": 9.975919581481356e-05,
"loss": 0.021,
"step": 7970
},
{
"epoch": 5.253456221198157,
"grad_norm": 0.28424131870269775,
"learning_rate": 9.975757227417634e-05,
"loss": 0.0176,
"step": 7980
},
{
"epoch": 5.260039499670836,
"grad_norm": 0.3443284332752228,
"learning_rate": 9.975594329212586e-05,
"loss": 0.0187,
"step": 7990
},
{
"epoch": 5.266622778143516,
"grad_norm": 0.24576932191848755,
"learning_rate": 9.97543088688402e-05,
"loss": 0.0209,
"step": 8000
},
{
"epoch": 5.2732060566161945,
"grad_norm": 0.27556195855140686,
"learning_rate": 9.975266900449814e-05,
"loss": 0.0184,
"step": 8010
},
{
"epoch": 5.279789335088874,
"grad_norm": 0.26561492681503296,
"learning_rate": 9.975102369927898e-05,
"loss": 0.0204,
"step": 8020
},
{
"epoch": 5.286372613561554,
"grad_norm": 0.4152951240539551,
"learning_rate": 9.974937295336269e-05,
"loss": 0.0184,
"step": 8030
},
{
"epoch": 5.292955892034233,
"grad_norm": 0.3729632496833801,
"learning_rate": 9.974771676692975e-05,
"loss": 0.0192,
"step": 8040
},
{
"epoch": 5.299539170506913,
"grad_norm": 0.465273380279541,
"learning_rate": 9.974605514016131e-05,
"loss": 0.0221,
"step": 8050
},
{
"epoch": 5.3061224489795915,
"grad_norm": 0.4699523448944092,
"learning_rate": 9.974438807323907e-05,
"loss": 0.0233,
"step": 8060
},
{
"epoch": 5.312705727452271,
"grad_norm": 0.3265931308269501,
"learning_rate": 9.974271556634535e-05,
"loss": 0.0216,
"step": 8070
},
{
"epoch": 5.319289005924951,
"grad_norm": 0.24461212754249573,
"learning_rate": 9.974103761966302e-05,
"loss": 0.0247,
"step": 8080
},
{
"epoch": 5.32587228439763,
"grad_norm": 0.25693199038505554,
"learning_rate": 9.973935423337563e-05,
"loss": 0.0169,
"step": 8090
},
{
"epoch": 5.33245556287031,
"grad_norm": 0.29432883858680725,
"learning_rate": 9.973766540766722e-05,
"loss": 0.0191,
"step": 8100
},
{
"epoch": 5.339038841342989,
"grad_norm": 0.31555745005607605,
"learning_rate": 9.97359711427225e-05,
"loss": 0.0206,
"step": 8110
},
{
"epoch": 5.345622119815668,
"grad_norm": 0.28863316774368286,
"learning_rate": 9.973427143872677e-05,
"loss": 0.0252,
"step": 8120
},
{
"epoch": 5.352205398288348,
"grad_norm": 0.3105698823928833,
"learning_rate": 9.973256629586589e-05,
"loss": 0.021,
"step": 8130
},
{
"epoch": 5.358788676761027,
"grad_norm": 0.29443371295928955,
"learning_rate": 9.973085571432632e-05,
"loss": 0.02,
"step": 8140
},
{
"epoch": 5.365371955233706,
"grad_norm": 0.21303094923496246,
"learning_rate": 9.972913969429513e-05,
"loss": 0.0162,
"step": 8150
},
{
"epoch": 5.371955233706386,
"grad_norm": 0.4356093108654022,
"learning_rate": 9.972741823596e-05,
"loss": 0.0201,
"step": 8160
},
{
"epoch": 5.378538512179065,
"grad_norm": 0.3321067690849304,
"learning_rate": 9.972569133950917e-05,
"loss": 0.0213,
"step": 8170
},
{
"epoch": 5.385121790651745,
"grad_norm": 0.39959898591041565,
"learning_rate": 9.972395900513151e-05,
"loss": 0.0179,
"step": 8180
},
{
"epoch": 5.391705069124424,
"grad_norm": 0.3250821828842163,
"learning_rate": 9.972222123301645e-05,
"loss": 0.0234,
"step": 8190
},
{
"epoch": 5.398288347597103,
"grad_norm": 0.44511812925338745,
"learning_rate": 9.972047802335403e-05,
"loss": 0.0217,
"step": 8200
},
{
"epoch": 5.404871626069783,
"grad_norm": 0.38315075635910034,
"learning_rate": 9.971872937633488e-05,
"loss": 0.0225,
"step": 8210
},
{
"epoch": 5.411454904542462,
"grad_norm": 0.33623558282852173,
"learning_rate": 9.971697529215024e-05,
"loss": 0.0239,
"step": 8220
},
{
"epoch": 5.418038183015142,
"grad_norm": 0.3871420919895172,
"learning_rate": 9.971521577099192e-05,
"loss": 0.0191,
"step": 8230
},
{
"epoch": 5.4246214614878205,
"grad_norm": 0.3877359628677368,
"learning_rate": 9.971345081305236e-05,
"loss": 0.0236,
"step": 8240
},
{
"epoch": 5.4312047399605,
"grad_norm": 0.30630284547805786,
"learning_rate": 9.971168041852456e-05,
"loss": 0.0202,
"step": 8250
},
{
"epoch": 5.43778801843318,
"grad_norm": 0.3268486261367798,
"learning_rate": 9.970990458760215e-05,
"loss": 0.0215,
"step": 8260
},
{
"epoch": 5.444371296905859,
"grad_norm": 0.30865469574928284,
"learning_rate": 9.970812332047929e-05,
"loss": 0.0179,
"step": 8270
},
{
"epoch": 5.450954575378539,
"grad_norm": 0.4323071837425232,
"learning_rate": 9.97063366173508e-05,
"loss": 0.0219,
"step": 8280
},
{
"epoch": 5.457537853851218,
"grad_norm": 0.29873839020729065,
"learning_rate": 9.970454447841207e-05,
"loss": 0.0218,
"step": 8290
},
{
"epoch": 5.464121132323897,
"grad_norm": 0.3131571412086487,
"learning_rate": 9.970274690385909e-05,
"loss": 0.0154,
"step": 8300
},
{
"epoch": 5.470704410796577,
"grad_norm": 0.34280869364738464,
"learning_rate": 9.970094389388844e-05,
"loss": 0.0133,
"step": 8310
},
{
"epoch": 5.477287689269256,
"grad_norm": 0.3649035692214966,
"learning_rate": 9.969913544869728e-05,
"loss": 0.0214,
"step": 8320
},
{
"epoch": 5.483870967741936,
"grad_norm": 0.29891666769981384,
"learning_rate": 9.96973215684834e-05,
"loss": 0.0183,
"step": 8330
},
{
"epoch": 5.490454246214615,
"grad_norm": 0.41757312417030334,
"learning_rate": 9.969550225344513e-05,
"loss": 0.0233,
"step": 8340
},
{
"epoch": 5.497037524687294,
"grad_norm": 0.350940078496933,
"learning_rate": 9.969367750378147e-05,
"loss": 0.0181,
"step": 8350
},
{
"epoch": 5.503620803159974,
"grad_norm": 0.41458868980407715,
"learning_rate": 9.969184731969194e-05,
"loss": 0.0276,
"step": 8360
},
{
"epoch": 5.510204081632653,
"grad_norm": 0.2997893691062927,
"learning_rate": 9.96900117013767e-05,
"loss": 0.0257,
"step": 8370
},
{
"epoch": 5.516787360105332,
"grad_norm": 0.35646510124206543,
"learning_rate": 9.96881706490365e-05,
"loss": 0.0213,
"step": 8380
},
{
"epoch": 5.523370638578012,
"grad_norm": 0.34611764550209045,
"learning_rate": 9.968632416287265e-05,
"loss": 0.0208,
"step": 8390
},
{
"epoch": 5.529953917050691,
"grad_norm": 0.27896642684936523,
"learning_rate": 9.96844722430871e-05,
"loss": 0.0169,
"step": 8400
},
{
"epoch": 5.536537195523371,
"grad_norm": 0.29233112931251526,
"learning_rate": 9.968261488988235e-05,
"loss": 0.0175,
"step": 8410
},
{
"epoch": 5.54312047399605,
"grad_norm": 0.33013954758644104,
"learning_rate": 9.968075210346155e-05,
"loss": 0.0151,
"step": 8420
},
{
"epoch": 5.549703752468729,
"grad_norm": 0.35673898458480835,
"learning_rate": 9.967888388402839e-05,
"loss": 0.0217,
"step": 8430
},
{
"epoch": 5.556287030941409,
"grad_norm": 0.35260915756225586,
"learning_rate": 9.967701023178717e-05,
"loss": 0.0153,
"step": 8440
},
{
"epoch": 5.562870309414088,
"grad_norm": 0.40267956256866455,
"learning_rate": 9.967513114694282e-05,
"loss": 0.0184,
"step": 8450
},
{
"epoch": 5.569453587886768,
"grad_norm": 0.37056267261505127,
"learning_rate": 9.967324662970079e-05,
"loss": 0.0164,
"step": 8460
},
{
"epoch": 5.576036866359447,
"grad_norm": 0.25257834792137146,
"learning_rate": 9.96713566802672e-05,
"loss": 0.0211,
"step": 8470
},
{
"epoch": 5.582620144832126,
"grad_norm": 0.43158891797065735,
"learning_rate": 9.966946129884873e-05,
"loss": 0.0261,
"step": 8480
},
{
"epoch": 5.589203423304806,
"grad_norm": 0.28529560565948486,
"learning_rate": 9.966756048565265e-05,
"loss": 0.0213,
"step": 8490
},
{
"epoch": 5.595786701777485,
"grad_norm": 0.5274585485458374,
"learning_rate": 9.966565424088681e-05,
"loss": 0.0264,
"step": 8500
},
{
"epoch": 5.602369980250165,
"grad_norm": 0.3596530258655548,
"learning_rate": 9.96637425647597e-05,
"loss": 0.0307,
"step": 8510
},
{
"epoch": 5.608953258722844,
"grad_norm": 0.30984270572662354,
"learning_rate": 9.966182545748038e-05,
"loss": 0.0165,
"step": 8520
},
{
"epoch": 5.615536537195523,
"grad_norm": 0.44396498799324036,
"learning_rate": 9.96599029192585e-05,
"loss": 0.024,
"step": 8530
},
{
"epoch": 5.622119815668203,
"grad_norm": 0.3186427354812622,
"learning_rate": 9.965797495030428e-05,
"loss": 0.017,
"step": 8540
},
{
"epoch": 5.628703094140882,
"grad_norm": 0.4049028158187866,
"learning_rate": 9.96560415508286e-05,
"loss": 0.0185,
"step": 8550
},
{
"epoch": 5.635286372613562,
"grad_norm": 0.27260008454322815,
"learning_rate": 9.965410272104286e-05,
"loss": 0.0191,
"step": 8560
},
{
"epoch": 5.641869651086241,
"grad_norm": 0.25815439224243164,
"learning_rate": 9.96521584611591e-05,
"loss": 0.0162,
"step": 8570
},
{
"epoch": 5.64845292955892,
"grad_norm": 0.25912392139434814,
"learning_rate": 9.965020877138994e-05,
"loss": 0.0232,
"step": 8580
},
{
"epoch": 5.6550362080316,
"grad_norm": 0.28062114119529724,
"learning_rate": 9.964825365194861e-05,
"loss": 0.0204,
"step": 8590
},
{
"epoch": 5.6616194865042795,
"grad_norm": 0.29528099298477173,
"learning_rate": 9.96462931030489e-05,
"loss": 0.0201,
"step": 8600
},
{
"epoch": 5.668202764976958,
"grad_norm": 0.3116486370563507,
"learning_rate": 9.96443271249052e-05,
"loss": 0.0168,
"step": 8610
},
{
"epoch": 5.674786043449638,
"grad_norm": 0.4527759552001953,
"learning_rate": 9.964235571773255e-05,
"loss": 0.0175,
"step": 8620
},
{
"epoch": 5.681369321922317,
"grad_norm": 0.34042954444885254,
"learning_rate": 9.96403788817465e-05,
"loss": 0.0227,
"step": 8630
},
{
"epoch": 5.687952600394997,
"grad_norm": 0.343671053647995,
"learning_rate": 9.963839661716325e-05,
"loss": 0.0155,
"step": 8640
},
{
"epoch": 5.694535878867676,
"grad_norm": 0.3126232624053955,
"learning_rate": 9.963640892419958e-05,
"loss": 0.0202,
"step": 8650
},
{
"epoch": 5.701119157340355,
"grad_norm": 0.36390814185142517,
"learning_rate": 9.963441580307286e-05,
"loss": 0.0148,
"step": 8660
},
{
"epoch": 5.707702435813035,
"grad_norm": 0.37608572840690613,
"learning_rate": 9.963241725400104e-05,
"loss": 0.0175,
"step": 8670
},
{
"epoch": 5.714285714285714,
"grad_norm": 0.35906463861465454,
"learning_rate": 9.963041327720271e-05,
"loss": 0.0202,
"step": 8680
},
{
"epoch": 5.720868992758394,
"grad_norm": 0.22975964844226837,
"learning_rate": 9.962840387289697e-05,
"loss": 0.0212,
"step": 8690
},
{
"epoch": 5.727452271231073,
"grad_norm": 0.355388343334198,
"learning_rate": 9.962638904130363e-05,
"loss": 0.0184,
"step": 8700
},
{
"epoch": 5.734035549703752,
"grad_norm": 0.31850746273994446,
"learning_rate": 9.962436878264298e-05,
"loss": 0.0159,
"step": 8710
},
{
"epoch": 5.740618828176432,
"grad_norm": 0.2832736372947693,
"learning_rate": 9.962234309713598e-05,
"loss": 0.018,
"step": 8720
},
{
"epoch": 5.7472021066491115,
"grad_norm": 0.3577004075050354,
"learning_rate": 9.962031198500414e-05,
"loss": 0.0188,
"step": 8730
},
{
"epoch": 5.753785385121791,
"grad_norm": 0.380075603723526,
"learning_rate": 9.961827544646958e-05,
"loss": 0.0182,
"step": 8740
},
{
"epoch": 5.76036866359447,
"grad_norm": 0.3154783844947815,
"learning_rate": 9.961623348175501e-05,
"loss": 0.0157,
"step": 8750
},
{
"epoch": 5.766951942067149,
"grad_norm": 0.4730320870876312,
"learning_rate": 9.961418609108377e-05,
"loss": 0.02,
"step": 8760
},
{
"epoch": 5.773535220539829,
"grad_norm": 0.3341652750968933,
"learning_rate": 9.961213327467971e-05,
"loss": 0.0227,
"step": 8770
},
{
"epoch": 5.7801184990125085,
"grad_norm": 0.2865208685398102,
"learning_rate": 9.961007503276736e-05,
"loss": 0.0225,
"step": 8780
},
{
"epoch": 5.786701777485188,
"grad_norm": 0.33236029744148254,
"learning_rate": 9.960801136557179e-05,
"loss": 0.0214,
"step": 8790
},
{
"epoch": 5.793285055957867,
"grad_norm": 0.3582252860069275,
"learning_rate": 9.960594227331866e-05,
"loss": 0.0251,
"step": 8800
},
{
"epoch": 5.799868334430546,
"grad_norm": 0.28339603543281555,
"learning_rate": 9.960386775623429e-05,
"loss": 0.0216,
"step": 8810
},
{
"epoch": 5.806451612903226,
"grad_norm": 0.3069753348827362,
"learning_rate": 9.96017878145455e-05,
"loss": 0.0183,
"step": 8820
},
{
"epoch": 5.813034891375906,
"grad_norm": 0.4349236786365509,
"learning_rate": 9.959970244847977e-05,
"loss": 0.0249,
"step": 8830
},
{
"epoch": 5.819618169848584,
"grad_norm": 0.523542046546936,
"learning_rate": 9.959761165826518e-05,
"loss": 0.0265,
"step": 8840
},
{
"epoch": 5.826201448321264,
"grad_norm": 0.3403133749961853,
"learning_rate": 9.959551544413033e-05,
"loss": 0.0224,
"step": 8850
},
{
"epoch": 5.832784726793943,
"grad_norm": 0.3043544590473175,
"learning_rate": 9.959341380630448e-05,
"loss": 0.0233,
"step": 8860
},
{
"epoch": 5.839368005266623,
"grad_norm": 0.4742325246334076,
"learning_rate": 9.959130674501746e-05,
"loss": 0.0249,
"step": 8870
},
{
"epoch": 5.845951283739302,
"grad_norm": 0.4292590618133545,
"learning_rate": 9.958919426049968e-05,
"loss": 0.0305,
"step": 8880
},
{
"epoch": 5.852534562211981,
"grad_norm": 0.26942798495292664,
"learning_rate": 9.958707635298219e-05,
"loss": 0.0225,
"step": 8890
},
{
"epoch": 5.859117840684661,
"grad_norm": 0.31493642926216125,
"learning_rate": 9.958495302269657e-05,
"loss": 0.014,
"step": 8900
},
{
"epoch": 5.8657011191573405,
"grad_norm": 0.3119445741176605,
"learning_rate": 9.958282426987503e-05,
"loss": 0.0217,
"step": 8910
},
{
"epoch": 5.87228439763002,
"grad_norm": 0.3077771067619324,
"learning_rate": 9.95806900947504e-05,
"loss": 0.0216,
"step": 8920
},
{
"epoch": 5.878867676102699,
"grad_norm": 0.29881590604782104,
"learning_rate": 9.957855049755604e-05,
"loss": 0.0188,
"step": 8930
},
{
"epoch": 5.885450954575378,
"grad_norm": 0.294398695230484,
"learning_rate": 9.957640547852593e-05,
"loss": 0.0184,
"step": 8940
},
{
"epoch": 5.892034233048058,
"grad_norm": 0.3678267300128937,
"learning_rate": 9.957425503789466e-05,
"loss": 0.0189,
"step": 8950
},
{
"epoch": 5.8986175115207375,
"grad_norm": 0.2749888002872467,
"learning_rate": 9.957209917589738e-05,
"loss": 0.0192,
"step": 8960
},
{
"epoch": 5.905200789993417,
"grad_norm": 0.2994067668914795,
"learning_rate": 9.956993789276987e-05,
"loss": 0.0217,
"step": 8970
},
{
"epoch": 5.911784068466096,
"grad_norm": 0.32843664288520813,
"learning_rate": 9.956777118874847e-05,
"loss": 0.0256,
"step": 8980
},
{
"epoch": 5.918367346938775,
"grad_norm": 0.3331758975982666,
"learning_rate": 9.956559906407016e-05,
"loss": 0.0229,
"step": 8990
},
{
"epoch": 5.924950625411455,
"grad_norm": 0.43505731225013733,
"learning_rate": 9.956342151897245e-05,
"loss": 0.0199,
"step": 9000
},
{
"epoch": 5.931533903884135,
"grad_norm": 0.46164658665657043,
"learning_rate": 9.956123855369346e-05,
"loss": 0.0267,
"step": 9010
},
{
"epoch": 5.938117182356814,
"grad_norm": 0.27105483412742615,
"learning_rate": 9.955905016847196e-05,
"loss": 0.0213,
"step": 9020
},
{
"epoch": 5.944700460829493,
"grad_norm": 0.4163826107978821,
"learning_rate": 9.955685636354723e-05,
"loss": 0.0219,
"step": 9030
},
{
"epoch": 5.9512837393021725,
"grad_norm": 0.4182054400444031,
"learning_rate": 9.95546571391592e-05,
"loss": 0.0195,
"step": 9040
},
{
"epoch": 5.957867017774852,
"grad_norm": 0.35891517996788025,
"learning_rate": 9.955245249554837e-05,
"loss": 0.0235,
"step": 9050
},
{
"epoch": 5.964450296247532,
"grad_norm": 0.2635720670223236,
"learning_rate": 9.955024243295582e-05,
"loss": 0.0188,
"step": 9060
},
{
"epoch": 5.97103357472021,
"grad_norm": 0.20277492702007294,
"learning_rate": 9.954802695162328e-05,
"loss": 0.018,
"step": 9070
},
{
"epoch": 5.97761685319289,
"grad_norm": 0.2811432480812073,
"learning_rate": 9.954580605179302e-05,
"loss": 0.0192,
"step": 9080
},
{
"epoch": 5.9842001316655695,
"grad_norm": 0.3374830186367035,
"learning_rate": 9.954357973370788e-05,
"loss": 0.0179,
"step": 9090
},
{
"epoch": 5.990783410138249,
"grad_norm": 0.23352426290512085,
"learning_rate": 9.954134799761135e-05,
"loss": 0.018,
"step": 9100
},
{
"epoch": 5.997366688610928,
"grad_norm": 0.27364882826805115,
"learning_rate": 9.953911084374748e-05,
"loss": 0.0161,
"step": 9110
},
{
"epoch": 6.003949967083607,
"grad_norm": 0.379571795463562,
"learning_rate": 9.953686827236093e-05,
"loss": 0.0179,
"step": 9120
},
{
"epoch": 6.010533245556287,
"grad_norm": 0.2913884222507477,
"learning_rate": 9.953462028369695e-05,
"loss": 0.0198,
"step": 9130
},
{
"epoch": 6.017116524028967,
"grad_norm": 0.3363860249519348,
"learning_rate": 9.953236687800136e-05,
"loss": 0.0161,
"step": 9140
},
{
"epoch": 6.023699802501646,
"grad_norm": 0.3260352909564972,
"learning_rate": 9.95301080555206e-05,
"loss": 0.0178,
"step": 9150
},
{
"epoch": 6.030283080974325,
"grad_norm": 0.32913151383399963,
"learning_rate": 9.952784381650171e-05,
"loss": 0.0184,
"step": 9160
},
{
"epoch": 6.0368663594470044,
"grad_norm": 0.353580504655838,
"learning_rate": 9.952557416119226e-05,
"loss": 0.0205,
"step": 9170
},
{
"epoch": 6.043449637919684,
"grad_norm": 0.2919161319732666,
"learning_rate": 9.95232990898405e-05,
"loss": 0.0167,
"step": 9180
},
{
"epoch": 6.050032916392364,
"grad_norm": 0.3464362919330597,
"learning_rate": 9.95210186026952e-05,
"loss": 0.0184,
"step": 9190
},
{
"epoch": 6.056616194865043,
"grad_norm": 0.27000024914741516,
"learning_rate": 9.951873270000576e-05,
"loss": 0.0206,
"step": 9200
},
{
"epoch": 6.063199473337722,
"grad_norm": 0.5808350443840027,
"learning_rate": 9.951644138202216e-05,
"loss": 0.0223,
"step": 9210
},
{
"epoch": 6.0697827518104015,
"grad_norm": 0.3968835175037384,
"learning_rate": 9.951414464899498e-05,
"loss": 0.0164,
"step": 9220
},
{
"epoch": 6.076366030283081,
"grad_norm": 0.3585447669029236,
"learning_rate": 9.951184250117538e-05,
"loss": 0.0224,
"step": 9230
},
{
"epoch": 6.082949308755761,
"grad_norm": 0.32544565200805664,
"learning_rate": 9.950953493881513e-05,
"loss": 0.0218,
"step": 9240
},
{
"epoch": 6.089532587228439,
"grad_norm": 0.23520123958587646,
"learning_rate": 9.950722196216658e-05,
"loss": 0.0196,
"step": 9250
},
{
"epoch": 6.096115865701119,
"grad_norm": 0.4596306383609772,
"learning_rate": 9.950490357148265e-05,
"loss": 0.0183,
"step": 9260
},
{
"epoch": 6.1026991441737986,
"grad_norm": 0.2417062520980835,
"learning_rate": 9.950257976701692e-05,
"loss": 0.0158,
"step": 9270
},
{
"epoch": 6.109282422646478,
"grad_norm": 0.2519543766975403,
"learning_rate": 9.950025054902348e-05,
"loss": 0.0243,
"step": 9280
},
{
"epoch": 6.115865701119158,
"grad_norm": 0.4018669128417969,
"learning_rate": 9.949791591775706e-05,
"loss": 0.0176,
"step": 9290
},
{
"epoch": 6.122448979591836,
"grad_norm": 0.21316957473754883,
"learning_rate": 9.949557587347298e-05,
"loss": 0.0248,
"step": 9300
},
{
"epoch": 6.129032258064516,
"grad_norm": 0.33540183305740356,
"learning_rate": 9.949323041642713e-05,
"loss": 0.0215,
"step": 9310
},
{
"epoch": 6.135615536537196,
"grad_norm": 0.3875342309474945,
"learning_rate": 9.949087954687602e-05,
"loss": 0.032,
"step": 9320
},
{
"epoch": 6.142198815009875,
"grad_norm": 0.38081595301628113,
"learning_rate": 9.948852326507672e-05,
"loss": 0.0213,
"step": 9330
},
{
"epoch": 6.148782093482554,
"grad_norm": 0.3210954964160919,
"learning_rate": 9.948616157128694e-05,
"loss": 0.0208,
"step": 9340
},
{
"epoch": 6.1553653719552335,
"grad_norm": 0.2606159746646881,
"learning_rate": 9.948379446576493e-05,
"loss": 0.0148,
"step": 9350
},
{
"epoch": 6.161948650427913,
"grad_norm": 0.23226390779018402,
"learning_rate": 9.948142194876952e-05,
"loss": 0.0142,
"step": 9360
},
{
"epoch": 6.168531928900593,
"grad_norm": 0.22695526480674744,
"learning_rate": 9.947904402056024e-05,
"loss": 0.0273,
"step": 9370
},
{
"epoch": 6.175115207373272,
"grad_norm": 0.2978394031524658,
"learning_rate": 9.947666068139708e-05,
"loss": 0.0157,
"step": 9380
},
{
"epoch": 6.181698485845951,
"grad_norm": 0.32799768447875977,
"learning_rate": 9.947427193154071e-05,
"loss": 0.0296,
"step": 9390
},
{
"epoch": 6.1882817643186305,
"grad_norm": 0.3532279133796692,
"learning_rate": 9.947187777125233e-05,
"loss": 0.018,
"step": 9400
},
{
"epoch": 6.19486504279131,
"grad_norm": 0.36867061257362366,
"learning_rate": 9.946947820079377e-05,
"loss": 0.0192,
"step": 9410
},
{
"epoch": 6.20144832126399,
"grad_norm": 0.2883126437664032,
"learning_rate": 9.946707322042747e-05,
"loss": 0.0191,
"step": 9420
},
{
"epoch": 6.208031599736669,
"grad_norm": 0.4079958200454712,
"learning_rate": 9.94646628304164e-05,
"loss": 0.0218,
"step": 9430
},
{
"epoch": 6.214614878209348,
"grad_norm": 0.2711893618106842,
"learning_rate": 9.946224703102418e-05,
"loss": 0.0201,
"step": 9440
},
{
"epoch": 6.221198156682028,
"grad_norm": 0.36159566044807434,
"learning_rate": 9.945982582251498e-05,
"loss": 0.0261,
"step": 9450
},
{
"epoch": 6.227781435154707,
"grad_norm": 0.33721408247947693,
"learning_rate": 9.94573992051536e-05,
"loss": 0.0148,
"step": 9460
},
{
"epoch": 6.234364713627387,
"grad_norm": 0.29908159375190735,
"learning_rate": 9.94549671792054e-05,
"loss": 0.0196,
"step": 9470
},
{
"epoch": 6.2409479921000655,
"grad_norm": 0.26687297224998474,
"learning_rate": 9.945252974493635e-05,
"loss": 0.0177,
"step": 9480
},
{
"epoch": 6.247531270572745,
"grad_norm": 0.3233434855937958,
"learning_rate": 9.9450086902613e-05,
"loss": 0.0202,
"step": 9490
},
{
"epoch": 6.254114549045425,
"grad_norm": 0.33357781171798706,
"learning_rate": 9.944763865250248e-05,
"loss": 0.0185,
"step": 9500
},
{
"epoch": 6.260697827518104,
"grad_norm": 0.27512818574905396,
"learning_rate": 9.944518499487254e-05,
"loss": 0.0208,
"step": 9510
},
{
"epoch": 6.267281105990784,
"grad_norm": 0.30229079723358154,
"learning_rate": 9.944272592999151e-05,
"loss": 0.0187,
"step": 9520
},
{
"epoch": 6.2738643844634625,
"grad_norm": 0.42628300189971924,
"learning_rate": 9.94402614581283e-05,
"loss": 0.0215,
"step": 9530
},
{
"epoch": 6.280447662936142,
"grad_norm": 0.2989635169506073,
"learning_rate": 9.943779157955244e-05,
"loss": 0.0202,
"step": 9540
},
{
"epoch": 6.287030941408822,
"grad_norm": 0.3049742877483368,
"learning_rate": 9.943531629453403e-05,
"loss": 0.0248,
"step": 9550
},
{
"epoch": 6.293614219881501,
"grad_norm": 0.2720232307910919,
"learning_rate": 9.943283560334375e-05,
"loss": 0.0151,
"step": 9560
},
{
"epoch": 6.30019749835418,
"grad_norm": 0.19958046078681946,
"learning_rate": 9.943034950625288e-05,
"loss": 0.0237,
"step": 9570
},
{
"epoch": 6.30678077682686,
"grad_norm": 0.21954038739204407,
"learning_rate": 9.942785800353332e-05,
"loss": 0.0144,
"step": 9580
},
{
"epoch": 6.313364055299539,
"grad_norm": 0.35476773977279663,
"learning_rate": 9.942536109545751e-05,
"loss": 0.0158,
"step": 9590
},
{
"epoch": 6.319947333772219,
"grad_norm": 0.3394959270954132,
"learning_rate": 9.942285878229853e-05,
"loss": 0.0249,
"step": 9600
},
{
"epoch": 6.326530612244898,
"grad_norm": 0.3519703447818756,
"learning_rate": 9.942035106433001e-05,
"loss": 0.0144,
"step": 9610
},
{
"epoch": 6.333113890717577,
"grad_norm": 0.2918345332145691,
"learning_rate": 9.94178379418262e-05,
"loss": 0.0187,
"step": 9620
},
{
"epoch": 6.339697169190257,
"grad_norm": 0.19374874234199524,
"learning_rate": 9.941531941506194e-05,
"loss": 0.0152,
"step": 9630
},
{
"epoch": 6.346280447662936,
"grad_norm": 0.29477545619010925,
"learning_rate": 9.941279548431263e-05,
"loss": 0.0156,
"step": 9640
},
{
"epoch": 6.352863726135616,
"grad_norm": 0.33442914485931396,
"learning_rate": 9.941026614985431e-05,
"loss": 0.02,
"step": 9650
},
{
"epoch": 6.359447004608295,
"grad_norm": 0.24333545565605164,
"learning_rate": 9.940773141196357e-05,
"loss": 0.0214,
"step": 9660
},
{
"epoch": 6.366030283080974,
"grad_norm": 0.36624905467033386,
"learning_rate": 9.94051912709176e-05,
"loss": 0.0337,
"step": 9670
},
{
"epoch": 6.372613561553654,
"grad_norm": 0.2542989253997803,
"learning_rate": 9.940264572699421e-05,
"loss": 0.016,
"step": 9680
},
{
"epoch": 6.379196840026333,
"grad_norm": 0.3040478527545929,
"learning_rate": 9.940009478047174e-05,
"loss": 0.0208,
"step": 9690
},
{
"epoch": 6.385780118499013,
"grad_norm": 0.2591792345046997,
"learning_rate": 9.939753843162918e-05,
"loss": 0.0175,
"step": 9700
},
{
"epoch": 6.3923633969716915,
"grad_norm": 0.287160724401474,
"learning_rate": 9.939497668074609e-05,
"loss": 0.0146,
"step": 9710
},
{
"epoch": 6.398946675444371,
"grad_norm": 0.30162158608436584,
"learning_rate": 9.93924095281026e-05,
"loss": 0.0248,
"step": 9720
},
{
"epoch": 6.405529953917051,
"grad_norm": 0.24138455092906952,
"learning_rate": 9.938983697397948e-05,
"loss": 0.0172,
"step": 9730
},
{
"epoch": 6.41211323238973,
"grad_norm": 0.26627230644226074,
"learning_rate": 9.938725901865805e-05,
"loss": 0.0193,
"step": 9740
},
{
"epoch": 6.41869651086241,
"grad_norm": 0.384267121553421,
"learning_rate": 9.93846756624202e-05,
"loss": 0.0228,
"step": 9750
},
{
"epoch": 6.425279789335089,
"grad_norm": 0.3151776194572449,
"learning_rate": 9.938208690554849e-05,
"loss": 0.0233,
"step": 9760
},
{
"epoch": 6.431863067807768,
"grad_norm": 0.4698539078235626,
"learning_rate": 9.9379492748326e-05,
"loss": 0.0171,
"step": 9770
},
{
"epoch": 6.438446346280448,
"grad_norm": 0.357870489358902,
"learning_rate": 9.937689319103641e-05,
"loss": 0.0241,
"step": 9780
},
{
"epoch": 6.445029624753127,
"grad_norm": 0.24749311804771423,
"learning_rate": 9.937428823396404e-05,
"loss": 0.0194,
"step": 9790
},
{
"epoch": 6.451612903225806,
"grad_norm": 0.30506858229637146,
"learning_rate": 9.937167787739372e-05,
"loss": 0.0198,
"step": 9800
},
{
"epoch": 6.458196181698486,
"grad_norm": 0.26913967728614807,
"learning_rate": 9.936906212161095e-05,
"loss": 0.0176,
"step": 9810
},
{
"epoch": 6.464779460171165,
"grad_norm": 0.21204650402069092,
"learning_rate": 9.936644096690176e-05,
"loss": 0.0169,
"step": 9820
},
{
"epoch": 6.471362738643845,
"grad_norm": 0.29451656341552734,
"learning_rate": 9.936381441355282e-05,
"loss": 0.0155,
"step": 9830
},
{
"epoch": 6.477946017116524,
"grad_norm": 0.28931787610054016,
"learning_rate": 9.936118246185136e-05,
"loss": 0.0166,
"step": 9840
},
{
"epoch": 6.484529295589203,
"grad_norm": 0.3742450773715973,
"learning_rate": 9.935854511208518e-05,
"loss": 0.0183,
"step": 9850
},
{
"epoch": 6.491112574061883,
"grad_norm": 0.3527494966983795,
"learning_rate": 9.935590236454272e-05,
"loss": 0.0189,
"step": 9860
},
{
"epoch": 6.497695852534562,
"grad_norm": 0.2684400677680969,
"learning_rate": 9.935325421951298e-05,
"loss": 0.0153,
"step": 9870
},
{
"epoch": 6.504279131007242,
"grad_norm": 0.3686043322086334,
"learning_rate": 9.935060067728557e-05,
"loss": 0.0234,
"step": 9880
},
{
"epoch": 6.5108624094799215,
"grad_norm": 0.35398194193840027,
"learning_rate": 9.934794173815067e-05,
"loss": 0.0208,
"step": 9890
},
{
"epoch": 6.5174456879526,
"grad_norm": 0.3215058445930481,
"learning_rate": 9.934527740239906e-05,
"loss": 0.0228,
"step": 9900
},
{
"epoch": 6.52402896642528,
"grad_norm": 0.34322696924209595,
"learning_rate": 9.934260767032209e-05,
"loss": 0.0207,
"step": 9910
},
{
"epoch": 6.530612244897959,
"grad_norm": 0.41083627939224243,
"learning_rate": 9.933993254221172e-05,
"loss": 0.016,
"step": 9920
},
{
"epoch": 6.537195523370639,
"grad_norm": 0.3417953550815582,
"learning_rate": 9.933725201836053e-05,
"loss": 0.0304,
"step": 9930
},
{
"epoch": 6.543778801843318,
"grad_norm": 0.2922670841217041,
"learning_rate": 9.933456609906162e-05,
"loss": 0.0211,
"step": 9940
},
{
"epoch": 6.550362080315997,
"grad_norm": 0.34726616740226746,
"learning_rate": 9.933187478460875e-05,
"loss": 0.0315,
"step": 9950
},
{
"epoch": 6.556945358788677,
"grad_norm": 0.3180295526981354,
"learning_rate": 9.93291780752962e-05,
"loss": 0.0214,
"step": 9960
},
{
"epoch": 6.563528637261356,
"grad_norm": 0.3323359489440918,
"learning_rate": 9.932647597141893e-05,
"loss": 0.0213,
"step": 9970
},
{
"epoch": 6.570111915734035,
"grad_norm": 0.38590207695961,
"learning_rate": 9.932376847327239e-05,
"loss": 0.0182,
"step": 9980
},
{
"epoch": 6.576695194206715,
"grad_norm": 0.3982744514942169,
"learning_rate": 9.932105558115268e-05,
"loss": 0.0217,
"step": 9990
},
{
"epoch": 6.583278472679394,
"grad_norm": 0.41468364000320435,
"learning_rate": 9.931833729535651e-05,
"loss": 0.02,
"step": 10000
},
{
"epoch": 6.589861751152074,
"grad_norm": 0.2962639331817627,
"learning_rate": 9.931561361618111e-05,
"loss": 0.0184,
"step": 10010
},
{
"epoch": 6.596445029624753,
"grad_norm": 0.29026249051094055,
"learning_rate": 9.931288454392435e-05,
"loss": 0.0257,
"step": 10020
},
{
"epoch": 6.603028308097432,
"grad_norm": 0.35137197375297546,
"learning_rate": 9.931015007888467e-05,
"loss": 0.0204,
"step": 10030
},
{
"epoch": 6.609611586570112,
"grad_norm": 0.27640193700790405,
"learning_rate": 9.930741022136112e-05,
"loss": 0.019,
"step": 10040
},
{
"epoch": 6.616194865042791,
"grad_norm": 0.358206570148468,
"learning_rate": 9.930466497165333e-05,
"loss": 0.0199,
"step": 10050
},
{
"epoch": 6.622778143515471,
"grad_norm": 0.37465885281562805,
"learning_rate": 9.93019143300615e-05,
"loss": 0.0183,
"step": 10060
},
{
"epoch": 6.6293614219881505,
"grad_norm": 0.29446157813072205,
"learning_rate": 9.929915829688644e-05,
"loss": 0.0191,
"step": 10070
},
{
"epoch": 6.635944700460829,
"grad_norm": 0.2849454879760742,
"learning_rate": 9.929639687242955e-05,
"loss": 0.0205,
"step": 10080
},
{
"epoch": 6.642527978933509,
"grad_norm": 0.40511831641197205,
"learning_rate": 9.929363005699281e-05,
"loss": 0.0236,
"step": 10090
},
{
"epoch": 6.649111257406188,
"grad_norm": 0.31258076429367065,
"learning_rate": 9.92908578508788e-05,
"loss": 0.0171,
"step": 10100
},
{
"epoch": 6.655694535878868,
"grad_norm": 0.2597019076347351,
"learning_rate": 9.928808025439069e-05,
"loss": 0.0169,
"step": 10110
},
{
"epoch": 6.6622778143515475,
"grad_norm": 0.3026615083217621,
"learning_rate": 9.928529726783223e-05,
"loss": 0.0251,
"step": 10120
},
{
"epoch": 6.668861092824226,
"grad_norm": 0.40658512711524963,
"learning_rate": 9.928250889150774e-05,
"loss": 0.0157,
"step": 10130
},
{
"epoch": 6.675444371296906,
"grad_norm": 0.296414852142334,
"learning_rate": 9.92797151257222e-05,
"loss": 0.0141,
"step": 10140
},
{
"epoch": 6.682027649769585,
"grad_norm": 0.19929881393909454,
"learning_rate": 9.927691597078108e-05,
"loss": 0.0208,
"step": 10150
},
{
"epoch": 6.688610928242265,
"grad_norm": 0.2986491024494171,
"learning_rate": 9.927411142699053e-05,
"loss": 0.02,
"step": 10160
},
{
"epoch": 6.695194206714944,
"grad_norm": 0.29914233088493347,
"learning_rate": 9.927130149465725e-05,
"loss": 0.0221,
"step": 10170
},
{
"epoch": 6.701777485187623,
"grad_norm": 0.21189433336257935,
"learning_rate": 9.92684861740885e-05,
"loss": 0.0189,
"step": 10180
},
{
"epoch": 6.708360763660303,
"grad_norm": 0.37645912170410156,
"learning_rate": 9.926566546559217e-05,
"loss": 0.027,
"step": 10190
},
{
"epoch": 6.7149440421329825,
"grad_norm": 0.3663330674171448,
"learning_rate": 9.926283936947673e-05,
"loss": 0.019,
"step": 10200
},
{
"epoch": 6.721527320605661,
"grad_norm": 0.32212164998054504,
"learning_rate": 9.926000788605126e-05,
"loss": 0.019,
"step": 10210
},
{
"epoch": 6.728110599078341,
"grad_norm": 0.44465067982673645,
"learning_rate": 9.92571710156254e-05,
"loss": 0.0201,
"step": 10220
},
{
"epoch": 6.73469387755102,
"grad_norm": 0.3856736421585083,
"learning_rate": 9.925432875850936e-05,
"loss": 0.0263,
"step": 10230
},
{
"epoch": 6.7412771560237,
"grad_norm": 0.28890272974967957,
"learning_rate": 9.925148111501396e-05,
"loss": 0.0219,
"step": 10240
},
{
"epoch": 6.7478604344963795,
"grad_norm": 0.3673318922519684,
"learning_rate": 9.924862808545066e-05,
"loss": 0.0169,
"step": 10250
},
{
"epoch": 6.754443712969058,
"grad_norm": 0.3341732323169708,
"learning_rate": 9.924576967013141e-05,
"loss": 0.0178,
"step": 10260
},
{
"epoch": 6.761026991441738,
"grad_norm": 0.34217989444732666,
"learning_rate": 9.924290586936887e-05,
"loss": 0.0175,
"step": 10270
},
{
"epoch": 6.767610269914417,
"grad_norm": 0.3089868426322937,
"learning_rate": 9.924003668347614e-05,
"loss": 0.0194,
"step": 10280
},
{
"epoch": 6.774193548387097,
"grad_norm": 0.298808753490448,
"learning_rate": 9.923716211276704e-05,
"loss": 0.014,
"step": 10290
},
{
"epoch": 6.780776826859777,
"grad_norm": 0.24005000293254852,
"learning_rate": 9.923428215755594e-05,
"loss": 0.0203,
"step": 10300
},
{
"epoch": 6.787360105332455,
"grad_norm": 0.21396656334400177,
"learning_rate": 9.923139681815775e-05,
"loss": 0.0185,
"step": 10310
},
{
"epoch": 6.793943383805135,
"grad_norm": 0.32189464569091797,
"learning_rate": 9.922850609488801e-05,
"loss": 0.0175,
"step": 10320
},
{
"epoch": 6.800526662277814,
"grad_norm": 0.28423628211021423,
"learning_rate": 9.922560998806287e-05,
"loss": 0.013,
"step": 10330
},
{
"epoch": 6.807109940750494,
"grad_norm": 0.2921487092971802,
"learning_rate": 9.922270849799905e-05,
"loss": 0.0139,
"step": 10340
},
{
"epoch": 6.813693219223174,
"grad_norm": 0.33387309312820435,
"learning_rate": 9.92198016250138e-05,
"loss": 0.0179,
"step": 10350
},
{
"epoch": 6.820276497695852,
"grad_norm": 0.2593229115009308,
"learning_rate": 9.921688936942506e-05,
"loss": 0.0172,
"step": 10360
},
{
"epoch": 6.826859776168532,
"grad_norm": 0.2489296942949295,
"learning_rate": 9.921397173155129e-05,
"loss": 0.0183,
"step": 10370
},
{
"epoch": 6.8334430546412115,
"grad_norm": 0.40210461616516113,
"learning_rate": 9.921104871171157e-05,
"loss": 0.0216,
"step": 10380
},
{
"epoch": 6.840026333113891,
"grad_norm": 0.3916763961315155,
"learning_rate": 9.920812031022554e-05,
"loss": 0.0233,
"step": 10390
},
{
"epoch": 6.84660961158657,
"grad_norm": 0.3899797201156616,
"learning_rate": 9.920518652741348e-05,
"loss": 0.0198,
"step": 10400
},
{
"epoch": 6.853192890059249,
"grad_norm": 0.33417588472366333,
"learning_rate": 9.920224736359618e-05,
"loss": 0.0197,
"step": 10410
},
{
"epoch": 6.859776168531929,
"grad_norm": 0.36752772331237793,
"learning_rate": 9.91993028190951e-05,
"loss": 0.0172,
"step": 10420
},
{
"epoch": 6.8663594470046085,
"grad_norm": 0.19663628935813904,
"learning_rate": 9.919635289423222e-05,
"loss": 0.021,
"step": 10430
},
{
"epoch": 6.872942725477287,
"grad_norm": 0.24231010675430298,
"learning_rate": 9.919339758933015e-05,
"loss": 0.021,
"step": 10440
},
{
"epoch": 6.879526003949967,
"grad_norm": 0.31300294399261475,
"learning_rate": 9.919043690471209e-05,
"loss": 0.0204,
"step": 10450
},
{
"epoch": 6.886109282422646,
"grad_norm": 0.37167423963546753,
"learning_rate": 9.91874708407018e-05,
"loss": 0.017,
"step": 10460
},
{
"epoch": 6.892692560895326,
"grad_norm": 0.23635649681091309,
"learning_rate": 9.918449939762367e-05,
"loss": 0.0147,
"step": 10470
},
{
"epoch": 6.899275839368006,
"grad_norm": 0.32587042450904846,
"learning_rate": 9.91815225758026e-05,
"loss": 0.0186,
"step": 10480
},
{
"epoch": 6.905859117840684,
"grad_norm": 0.31372883915901184,
"learning_rate": 9.917854037556419e-05,
"loss": 0.014,
"step": 10490
},
{
"epoch": 6.912442396313364,
"grad_norm": 0.32236507534980774,
"learning_rate": 9.917555279723454e-05,
"loss": 0.0166,
"step": 10500
},
{
"epoch": 6.9190256747860435,
"grad_norm": 0.3173193335533142,
"learning_rate": 9.917255984114036e-05,
"loss": 0.0184,
"step": 10510
},
{
"epoch": 6.925608953258723,
"grad_norm": 0.23854570090770721,
"learning_rate": 9.916956150760896e-05,
"loss": 0.0198,
"step": 10520
},
{
"epoch": 6.932192231731403,
"grad_norm": 0.3578750491142273,
"learning_rate": 9.916655779696826e-05,
"loss": 0.0237,
"step": 10530
},
{
"epoch": 6.938775510204081,
"grad_norm": 0.22527861595153809,
"learning_rate": 9.916354870954671e-05,
"loss": 0.0169,
"step": 10540
},
{
"epoch": 6.945358788676761,
"grad_norm": 0.2790786623954773,
"learning_rate": 9.91605342456734e-05,
"loss": 0.0235,
"step": 10550
},
{
"epoch": 6.9519420671494405,
"grad_norm": 0.32542911171913147,
"learning_rate": 9.915751440567795e-05,
"loss": 0.0177,
"step": 10560
},
{
"epoch": 6.95852534562212,
"grad_norm": 0.29238200187683105,
"learning_rate": 9.915448918989066e-05,
"loss": 0.021,
"step": 10570
},
{
"epoch": 6.9651086240948,
"grad_norm": 0.4408346116542816,
"learning_rate": 9.915145859864232e-05,
"loss": 0.0193,
"step": 10580
},
{
"epoch": 6.971691902567478,
"grad_norm": 0.34986612200737,
"learning_rate": 9.914842263226437e-05,
"loss": 0.0178,
"step": 10590
},
{
"epoch": 6.978275181040158,
"grad_norm": 0.28159600496292114,
"learning_rate": 9.914538129108882e-05,
"loss": 0.0183,
"step": 10600
},
{
"epoch": 6.984858459512838,
"grad_norm": 0.25488001108169556,
"learning_rate": 9.914233457544825e-05,
"loss": 0.0173,
"step": 10610
},
{
"epoch": 6.991441737985517,
"grad_norm": 0.27186429500579834,
"learning_rate": 9.913928248567586e-05,
"loss": 0.0171,
"step": 10620
},
{
"epoch": 6.998025016458196,
"grad_norm": 0.2964438796043396,
"learning_rate": 9.913622502210542e-05,
"loss": 0.0175,
"step": 10630
},
{
"epoch": 7.0046082949308754,
"grad_norm": 0.25319603085517883,
"learning_rate": 9.913316218507128e-05,
"loss": 0.0195,
"step": 10640
},
{
"epoch": 7.011191573403555,
"grad_norm": 0.2829616963863373,
"learning_rate": 9.91300939749084e-05,
"loss": 0.0152,
"step": 10650
},
{
"epoch": 7.017774851876235,
"grad_norm": 0.2091553807258606,
"learning_rate": 9.91270203919523e-05,
"loss": 0.0156,
"step": 10660
},
{
"epoch": 7.024358130348913,
"grad_norm": 0.27263903617858887,
"learning_rate": 9.912394143653912e-05,
"loss": 0.0186,
"step": 10670
},
{
"epoch": 7.030941408821593,
"grad_norm": 0.2951875329017639,
"learning_rate": 9.912085710900555e-05,
"loss": 0.0177,
"step": 10680
},
{
"epoch": 7.0375246872942725,
"grad_norm": 0.2087211161851883,
"learning_rate": 9.911776740968892e-05,
"loss": 0.0153,
"step": 10690
},
{
"epoch": 7.044107965766952,
"grad_norm": 0.37653759121894836,
"learning_rate": 9.911467233892709e-05,
"loss": 0.018,
"step": 10700
},
{
"epoch": 7.050691244239632,
"grad_norm": 0.2645755410194397,
"learning_rate": 9.911157189705853e-05,
"loss": 0.0242,
"step": 10710
},
{
"epoch": 7.05727452271231,
"grad_norm": 0.30586713552474976,
"learning_rate": 9.910846608442229e-05,
"loss": 0.0174,
"step": 10720
},
{
"epoch": 7.06385780118499,
"grad_norm": 0.22648707032203674,
"learning_rate": 9.910535490135805e-05,
"loss": 0.022,
"step": 10730
},
{
"epoch": 7.0704410796576695,
"grad_norm": 0.3550933599472046,
"learning_rate": 9.910223834820603e-05,
"loss": 0.0221,
"step": 10740
},
{
"epoch": 7.077024358130349,
"grad_norm": 0.3650365173816681,
"learning_rate": 9.909911642530703e-05,
"loss": 0.0201,
"step": 10750
},
{
"epoch": 7.083607636603029,
"grad_norm": 0.29339197278022766,
"learning_rate": 9.909598913300249e-05,
"loss": 0.0182,
"step": 10760
},
{
"epoch": 7.090190915075707,
"grad_norm": 0.2898188531398773,
"learning_rate": 9.909285647163438e-05,
"loss": 0.0169,
"step": 10770
},
{
"epoch": 7.096774193548387,
"grad_norm": 0.210793599486351,
"learning_rate": 9.908971844154531e-05,
"loss": 0.0183,
"step": 10780
},
{
"epoch": 7.103357472021067,
"grad_norm": 0.2698214054107666,
"learning_rate": 9.908657504307843e-05,
"loss": 0.0149,
"step": 10790
},
{
"epoch": 7.109940750493746,
"grad_norm": 0.34165287017822266,
"learning_rate": 9.908342627657751e-05,
"loss": 0.0191,
"step": 10800
},
{
"epoch": 7.116524028966425,
"grad_norm": 0.33044493198394775,
"learning_rate": 9.908027214238689e-05,
"loss": 0.0217,
"step": 10810
},
{
"epoch": 7.1231073074391045,
"grad_norm": 0.3296797275543213,
"learning_rate": 9.90771126408515e-05,
"loss": 0.0195,
"step": 10820
},
{
"epoch": 7.129690585911784,
"grad_norm": 0.4196171164512634,
"learning_rate": 9.907394777231685e-05,
"loss": 0.0192,
"step": 10830
},
{
"epoch": 7.136273864384464,
"grad_norm": 0.3580937385559082,
"learning_rate": 9.907077753712905e-05,
"loss": 0.0161,
"step": 10840
},
{
"epoch": 7.142857142857143,
"grad_norm": 0.3238092064857483,
"learning_rate": 9.906760193563482e-05,
"loss": 0.0191,
"step": 10850
},
{
"epoch": 7.149440421329822,
"grad_norm": 0.309400349855423,
"learning_rate": 9.906442096818139e-05,
"loss": 0.0177,
"step": 10860
},
{
"epoch": 7.1560236998025015,
"grad_norm": 0.2711527645587921,
"learning_rate": 9.906123463511665e-05,
"loss": 0.0147,
"step": 10870
},
{
"epoch": 7.162606978275181,
"grad_norm": 0.25941452383995056,
"learning_rate": 9.905804293678907e-05,
"loss": 0.0195,
"step": 10880
},
{
"epoch": 7.169190256747861,
"grad_norm": 0.34450215101242065,
"learning_rate": 9.905484587354766e-05,
"loss": 0.0244,
"step": 10890
},
{
"epoch": 7.175773535220539,
"grad_norm": 0.39104101061820984,
"learning_rate": 9.905164344574205e-05,
"loss": 0.0196,
"step": 10900
},
{
"epoch": 7.182356813693219,
"grad_norm": 0.24513410031795502,
"learning_rate": 9.904843565372248e-05,
"loss": 0.0139,
"step": 10910
},
{
"epoch": 7.188940092165899,
"grad_norm": 0.37569987773895264,
"learning_rate": 9.904522249783972e-05,
"loss": 0.0186,
"step": 10920
},
{
"epoch": 7.195523370638578,
"grad_norm": 0.3551914691925049,
"learning_rate": 9.904200397844517e-05,
"loss": 0.021,
"step": 10930
},
{
"epoch": 7.202106649111258,
"grad_norm": 0.24651531875133514,
"learning_rate": 9.903878009589078e-05,
"loss": 0.0143,
"step": 10940
},
{
"epoch": 7.2086899275839365,
"grad_norm": 0.2332409769296646,
"learning_rate": 9.903555085052915e-05,
"loss": 0.0133,
"step": 10950
},
{
"epoch": 7.215273206056616,
"grad_norm": 0.39533382654190063,
"learning_rate": 9.903231624271338e-05,
"loss": 0.0174,
"step": 10960
},
{
"epoch": 7.221856484529296,
"grad_norm": 0.34340912103652954,
"learning_rate": 9.902907627279724e-05,
"loss": 0.0142,
"step": 10970
},
{
"epoch": 7.228439763001975,
"grad_norm": 0.22741550207138062,
"learning_rate": 9.902583094113504e-05,
"loss": 0.0134,
"step": 10980
},
{
"epoch": 7.235023041474655,
"grad_norm": 0.2840103209018707,
"learning_rate": 9.902258024808168e-05,
"loss": 0.0158,
"step": 10990
},
{
"epoch": 7.2416063199473335,
"grad_norm": 0.39114007353782654,
"learning_rate": 9.901932419399264e-05,
"loss": 0.017,
"step": 11000
},
{
"epoch": 7.248189598420013,
"grad_norm": 0.247371643781662,
"learning_rate": 9.9016062779224e-05,
"loss": 0.0193,
"step": 11010
},
{
"epoch": 7.254772876892693,
"grad_norm": 0.3058176338672638,
"learning_rate": 9.901279600413242e-05,
"loss": 0.0188,
"step": 11020
},
{
"epoch": 7.261356155365372,
"grad_norm": 0.3149106204509735,
"learning_rate": 9.900952386907518e-05,
"loss": 0.0142,
"step": 11030
},
{
"epoch": 7.267939433838051,
"grad_norm": 0.2861136496067047,
"learning_rate": 9.90062463744101e-05,
"loss": 0.0152,
"step": 11040
},
{
"epoch": 7.2745227123107306,
"grad_norm": 0.31497111916542053,
"learning_rate": 9.900296352049558e-05,
"loss": 0.0193,
"step": 11050
},
{
"epoch": 7.28110599078341,
"grad_norm": 0.3361019790172577,
"learning_rate": 9.899967530769065e-05,
"loss": 0.0269,
"step": 11060
},
{
"epoch": 7.28768926925609,
"grad_norm": 0.3321257531642914,
"learning_rate": 9.899638173635489e-05,
"loss": 0.0185,
"step": 11070
},
{
"epoch": 7.294272547728769,
"grad_norm": 0.3059992492198944,
"learning_rate": 9.899308280684849e-05,
"loss": 0.0168,
"step": 11080
},
{
"epoch": 7.300855826201448,
"grad_norm": 0.3122404217720032,
"learning_rate": 9.898977851953222e-05,
"loss": 0.0137,
"step": 11090
},
{
"epoch": 7.307439104674128,
"grad_norm": 0.27130135893821716,
"learning_rate": 9.898646887476741e-05,
"loss": 0.016,
"step": 11100
},
{
"epoch": 7.314022383146807,
"grad_norm": 0.24871034920215607,
"learning_rate": 9.898315387291603e-05,
"loss": 0.0179,
"step": 11110
},
{
"epoch": 7.320605661619487,
"grad_norm": 0.33382299542427063,
"learning_rate": 9.89798335143406e-05,
"loss": 0.0155,
"step": 11120
},
{
"epoch": 7.3271889400921655,
"grad_norm": 0.4217686951160431,
"learning_rate": 9.897650779940419e-05,
"loss": 0.0158,
"step": 11130
},
{
"epoch": 7.333772218564845,
"grad_norm": 0.37109145522117615,
"learning_rate": 9.897317672847054e-05,
"loss": 0.0152,
"step": 11140
},
{
"epoch": 7.340355497037525,
"grad_norm": 0.3274284899234772,
"learning_rate": 9.89698403019039e-05,
"loss": 0.0149,
"step": 11150
},
{
"epoch": 7.346938775510204,
"grad_norm": 0.35187312960624695,
"learning_rate": 9.896649852006917e-05,
"loss": 0.0203,
"step": 11160
},
{
"epoch": 7.353522053982884,
"grad_norm": 0.347332239151001,
"learning_rate": 9.896315138333177e-05,
"loss": 0.0205,
"step": 11170
},
{
"epoch": 7.3601053324555625,
"grad_norm": 0.33512142300605774,
"learning_rate": 9.895979889205774e-05,
"loss": 0.0191,
"step": 11180
},
{
"epoch": 7.366688610928242,
"grad_norm": 0.3043818175792694,
"learning_rate": 9.895644104661372e-05,
"loss": 0.0238,
"step": 11190
},
{
"epoch": 7.373271889400922,
"grad_norm": 0.28545814752578735,
"learning_rate": 9.895307784736691e-05,
"loss": 0.0166,
"step": 11200
},
{
"epoch": 7.379855167873601,
"grad_norm": 0.38100355863571167,
"learning_rate": 9.894970929468512e-05,
"loss": 0.0294,
"step": 11210
},
{
"epoch": 7.386438446346281,
"grad_norm": 0.36033323407173157,
"learning_rate": 9.89463353889367e-05,
"loss": 0.0171,
"step": 11220
},
{
"epoch": 7.39302172481896,
"grad_norm": 0.41543373465538025,
"learning_rate": 9.894295613049065e-05,
"loss": 0.0214,
"step": 11230
},
{
"epoch": 7.399605003291639,
"grad_norm": 0.35040634870529175,
"learning_rate": 9.893957151971649e-05,
"loss": 0.0219,
"step": 11240
},
{
"epoch": 7.406188281764319,
"grad_norm": 0.21702241897583008,
"learning_rate": 9.893618155698436e-05,
"loss": 0.0172,
"step": 11250
},
{
"epoch": 7.412771560236998,
"grad_norm": 0.16841621696949005,
"learning_rate": 9.8932786242665e-05,
"loss": 0.0163,
"step": 11260
},
{
"epoch": 7.419354838709677,
"grad_norm": 0.2708188593387604,
"learning_rate": 9.89293855771297e-05,
"loss": 0.0174,
"step": 11270
},
{
"epoch": 7.425938117182357,
"grad_norm": 0.2793221175670624,
"learning_rate": 9.892597956075036e-05,
"loss": 0.0301,
"step": 11280
},
{
"epoch": 7.432521395655036,
"grad_norm": 0.28793931007385254,
"learning_rate": 9.892256819389947e-05,
"loss": 0.0136,
"step": 11290
},
{
"epoch": 7.439104674127716,
"grad_norm": 0.2080307900905609,
"learning_rate": 9.891915147695006e-05,
"loss": 0.0135,
"step": 11300
},
{
"epoch": 7.445687952600395,
"grad_norm": 0.34421682357788086,
"learning_rate": 9.891572941027577e-05,
"loss": 0.0154,
"step": 11310
},
{
"epoch": 7.452271231073074,
"grad_norm": 0.28043636679649353,
"learning_rate": 9.89123019942509e-05,
"loss": 0.0227,
"step": 11320
},
{
"epoch": 7.458854509545754,
"grad_norm": 0.43760061264038086,
"learning_rate": 9.89088692292502e-05,
"loss": 0.0205,
"step": 11330
},
{
"epoch": 7.465437788018433,
"grad_norm": 0.36671656370162964,
"learning_rate": 9.89054311156491e-05,
"loss": 0.0188,
"step": 11340
},
{
"epoch": 7.472021066491113,
"grad_norm": 0.32964199781417847,
"learning_rate": 9.890198765382357e-05,
"loss": 0.0169,
"step": 11350
},
{
"epoch": 7.478604344963792,
"grad_norm": 0.2949651777744293,
"learning_rate": 9.889853884415021e-05,
"loss": 0.0204,
"step": 11360
},
{
"epoch": 7.485187623436471,
"grad_norm": 0.19746604561805725,
"learning_rate": 9.889508468700614e-05,
"loss": 0.0154,
"step": 11370
},
{
"epoch": 7.491770901909151,
"grad_norm": 0.25154274702072144,
"learning_rate": 9.889162518276915e-05,
"loss": 0.024,
"step": 11380
},
{
"epoch": 7.49835418038183,
"grad_norm": 0.28722724318504333,
"learning_rate": 9.888816033181752e-05,
"loss": 0.0144,
"step": 11390
},
{
"epoch": 7.50493745885451,
"grad_norm": 0.30025094747543335,
"learning_rate": 9.888469013453018e-05,
"loss": 0.0132,
"step": 11400
},
{
"epoch": 7.511520737327189,
"grad_norm": 0.3434738218784332,
"learning_rate": 9.888121459128663e-05,
"loss": 0.0204,
"step": 11410
},
{
"epoch": 7.518104015799868,
"grad_norm": 0.3177078068256378,
"learning_rate": 9.887773370246693e-05,
"loss": 0.0203,
"step": 11420
},
{
"epoch": 7.524687294272548,
"grad_norm": 0.20764517784118652,
"learning_rate": 9.887424746845177e-05,
"loss": 0.0236,
"step": 11430
},
{
"epoch": 7.531270572745227,
"grad_norm": 0.32055962085723877,
"learning_rate": 9.887075588962239e-05,
"loss": 0.0193,
"step": 11440
},
{
"epoch": 7.537853851217907,
"grad_norm": 0.41916972398757935,
"learning_rate": 9.88672589663606e-05,
"loss": 0.0236,
"step": 11450
},
{
"epoch": 7.544437129690586,
"grad_norm": 0.3371690511703491,
"learning_rate": 9.886375669904886e-05,
"loss": 0.0217,
"step": 11460
},
{
"epoch": 7.551020408163265,
"grad_norm": 0.3066478669643402,
"learning_rate": 9.886024908807014e-05,
"loss": 0.0154,
"step": 11470
},
{
"epoch": 7.557603686635945,
"grad_norm": 0.35706502199172974,
"learning_rate": 9.885673613380806e-05,
"loss": 0.02,
"step": 11480
},
{
"epoch": 7.564186965108624,
"grad_norm": 0.32649919390678406,
"learning_rate": 9.885321783664676e-05,
"loss": 0.0165,
"step": 11490
},
{
"epoch": 7.570770243581303,
"grad_norm": 0.41602134704589844,
"learning_rate": 9.884969419697101e-05,
"loss": 0.0171,
"step": 11500
},
{
"epoch": 7.577353522053983,
"grad_norm": 0.24239778518676758,
"learning_rate": 9.884616521516614e-05,
"loss": 0.0161,
"step": 11510
},
{
"epoch": 7.583936800526662,
"grad_norm": 0.3191376328468323,
"learning_rate": 9.88426308916181e-05,
"loss": 0.0197,
"step": 11520
},
{
"epoch": 7.590520078999342,
"grad_norm": 0.28587859869003296,
"learning_rate": 9.883909122671335e-05,
"loss": 0.0225,
"step": 11530
},
{
"epoch": 7.597103357472021,
"grad_norm": 0.29848742485046387,
"learning_rate": 9.883554622083904e-05,
"loss": 0.0162,
"step": 11540
},
{
"epoch": 7.6036866359447,
"grad_norm": 0.2986828684806824,
"learning_rate": 9.88319958743828e-05,
"loss": 0.0193,
"step": 11550
},
{
"epoch": 7.61026991441738,
"grad_norm": 0.2457902580499649,
"learning_rate": 9.882844018773291e-05,
"loss": 0.0236,
"step": 11560
},
{
"epoch": 7.616853192890059,
"grad_norm": 0.2636571526527405,
"learning_rate": 9.882487916127823e-05,
"loss": 0.0135,
"step": 11570
},
{
"epoch": 7.623436471362739,
"grad_norm": 0.2617330849170685,
"learning_rate": 9.882131279540815e-05,
"loss": 0.0232,
"step": 11580
},
{
"epoch": 7.630019749835418,
"grad_norm": 0.2502003014087677,
"learning_rate": 9.881774109051271e-05,
"loss": 0.0145,
"step": 11590
},
{
"epoch": 7.636603028308097,
"grad_norm": 0.3198162615299225,
"learning_rate": 9.881416404698252e-05,
"loss": 0.0161,
"step": 11600
},
{
"epoch": 7.643186306780777,
"grad_norm": 0.2585660517215729,
"learning_rate": 9.881058166520873e-05,
"loss": 0.015,
"step": 11610
},
{
"epoch": 7.649769585253456,
"grad_norm": 0.4241032600402832,
"learning_rate": 9.880699394558311e-05,
"loss": 0.0156,
"step": 11620
},
{
"epoch": 7.656352863726136,
"grad_norm": 0.2813708484172821,
"learning_rate": 9.880340088849801e-05,
"loss": 0.0158,
"step": 11630
},
{
"epoch": 7.662936142198815,
"grad_norm": 0.3326049745082855,
"learning_rate": 9.879980249434637e-05,
"loss": 0.0186,
"step": 11640
},
{
"epoch": 7.669519420671494,
"grad_norm": 0.42590346932411194,
"learning_rate": 9.879619876352168e-05,
"loss": 0.0214,
"step": 11650
},
{
"epoch": 7.676102699144174,
"grad_norm": 0.37882736325263977,
"learning_rate": 9.879258969641809e-05,
"loss": 0.0209,
"step": 11660
},
{
"epoch": 7.6826859776168535,
"grad_norm": 0.3366725742816925,
"learning_rate": 9.878897529343023e-05,
"loss": 0.0192,
"step": 11670
},
{
"epoch": 7.689269256089533,
"grad_norm": 0.3143966794013977,
"learning_rate": 9.878535555495338e-05,
"loss": 0.0194,
"step": 11680
},
{
"epoch": 7.695852534562212,
"grad_norm": 0.24715743958950043,
"learning_rate": 9.87817304813834e-05,
"loss": 0.0249,
"step": 11690
},
{
"epoch": 7.702435813034891,
"grad_norm": 0.45325416326522827,
"learning_rate": 9.877810007311671e-05,
"loss": 0.0279,
"step": 11700
},
{
"epoch": 7.709019091507571,
"grad_norm": 0.2868269681930542,
"learning_rate": 9.877446433055035e-05,
"loss": 0.0201,
"step": 11710
},
{
"epoch": 7.7156023699802505,
"grad_norm": 0.42461174726486206,
"learning_rate": 9.877082325408191e-05,
"loss": 0.0192,
"step": 11720
},
{
"epoch": 7.722185648452929,
"grad_norm": 0.325031578540802,
"learning_rate": 9.876717684410954e-05,
"loss": 0.0197,
"step": 11730
},
{
"epoch": 7.728768926925609,
"grad_norm": 0.23536282777786255,
"learning_rate": 9.876352510103204e-05,
"loss": 0.0168,
"step": 11740
},
{
"epoch": 7.735352205398288,
"grad_norm": 0.34148913621902466,
"learning_rate": 9.875986802524875e-05,
"loss": 0.0257,
"step": 11750
},
{
"epoch": 7.741935483870968,
"grad_norm": 0.23587101697921753,
"learning_rate": 9.87562056171596e-05,
"loss": 0.0286,
"step": 11760
},
{
"epoch": 7.748518762343647,
"grad_norm": 0.3217344284057617,
"learning_rate": 9.875253787716511e-05,
"loss": 0.0166,
"step": 11770
},
{
"epoch": 7.755102040816326,
"grad_norm": 0.286668062210083,
"learning_rate": 9.874886480566637e-05,
"loss": 0.0163,
"step": 11780
},
{
"epoch": 7.761685319289006,
"grad_norm": 0.2901325225830078,
"learning_rate": 9.874518640306507e-05,
"loss": 0.0163,
"step": 11790
},
{
"epoch": 7.768268597761685,
"grad_norm": 0.31648361682891846,
"learning_rate": 9.874150266976347e-05,
"loss": 0.0234,
"step": 11800
},
{
"epoch": 7.774851876234365,
"grad_norm": 0.21095728874206543,
"learning_rate": 9.873781360616443e-05,
"loss": 0.0203,
"step": 11810
},
{
"epoch": 7.781435154707044,
"grad_norm": 0.3472936153411865,
"learning_rate": 9.873411921267137e-05,
"loss": 0.0193,
"step": 11820
},
{
"epoch": 7.788018433179723,
"grad_norm": 0.2894751727581024,
"learning_rate": 9.873041948968829e-05,
"loss": 0.0189,
"step": 11830
},
{
"epoch": 7.794601711652403,
"grad_norm": 0.2715581953525543,
"learning_rate": 9.872671443761981e-05,
"loss": 0.0216,
"step": 11840
},
{
"epoch": 7.8011849901250825,
"grad_norm": 0.307728111743927,
"learning_rate": 9.872300405687109e-05,
"loss": 0.0172,
"step": 11850
},
{
"epoch": 7.807768268597762,
"grad_norm": 0.29953813552856445,
"learning_rate": 9.871928834784792e-05,
"loss": 0.0167,
"step": 11860
},
{
"epoch": 7.814351547070441,
"grad_norm": 0.23695741593837738,
"learning_rate": 9.871556731095661e-05,
"loss": 0.0185,
"step": 11870
},
{
"epoch": 7.82093482554312,
"grad_norm": 0.29778993129730225,
"learning_rate": 9.871184094660411e-05,
"loss": 0.0175,
"step": 11880
},
{
"epoch": 7.8275181040158,
"grad_norm": 0.340423583984375,
"learning_rate": 9.870810925519791e-05,
"loss": 0.0172,
"step": 11890
},
{
"epoch": 7.8341013824884795,
"grad_norm": 0.27096155285835266,
"learning_rate": 9.870437223714612e-05,
"loss": 0.0167,
"step": 11900
},
{
"epoch": 7.840684660961159,
"grad_norm": 0.31351974606513977,
"learning_rate": 9.87006298928574e-05,
"loss": 0.0163,
"step": 11910
},
{
"epoch": 7.847267939433838,
"grad_norm": 0.30044785141944885,
"learning_rate": 9.869688222274103e-05,
"loss": 0.0161,
"step": 11920
},
{
"epoch": 7.853851217906517,
"grad_norm": 0.358417809009552,
"learning_rate": 9.869312922720681e-05,
"loss": 0.0152,
"step": 11930
},
{
"epoch": 7.860434496379197,
"grad_norm": 0.1807613968849182,
"learning_rate": 9.868937090666521e-05,
"loss": 0.0163,
"step": 11940
},
{
"epoch": 7.867017774851877,
"grad_norm": 0.31109392642974854,
"learning_rate": 9.86856072615272e-05,
"loss": 0.0162,
"step": 11950
},
{
"epoch": 7.873601053324555,
"grad_norm": 0.2280602753162384,
"learning_rate": 9.868183829220438e-05,
"loss": 0.0179,
"step": 11960
},
{
"epoch": 7.880184331797235,
"grad_norm": 0.24024052917957306,
"learning_rate": 9.867806399910893e-05,
"loss": 0.0136,
"step": 11970
},
{
"epoch": 7.8867676102699145,
"grad_norm": 0.3139645755290985,
"learning_rate": 9.867428438265356e-05,
"loss": 0.0193,
"step": 11980
},
{
"epoch": 7.893350888742594,
"grad_norm": 0.2891336679458618,
"learning_rate": 9.867049944325165e-05,
"loss": 0.0169,
"step": 11990
},
{
"epoch": 7.899934167215273,
"grad_norm": 0.2247208207845688,
"learning_rate": 9.86667091813171e-05,
"loss": 0.0189,
"step": 12000
},
{
"epoch": 7.906517445687952,
"grad_norm": 0.2784267067909241,
"learning_rate": 9.866291359726438e-05,
"loss": 0.0127,
"step": 12010
},
{
"epoch": 7.913100724160632,
"grad_norm": 0.24842417240142822,
"learning_rate": 9.865911269150861e-05,
"loss": 0.0165,
"step": 12020
},
{
"epoch": 7.9196840026333115,
"grad_norm": 0.3291228413581848,
"learning_rate": 9.865530646446544e-05,
"loss": 0.0175,
"step": 12030
},
{
"epoch": 7.926267281105991,
"grad_norm": 0.26979872584342957,
"learning_rate": 9.86514949165511e-05,
"loss": 0.0154,
"step": 12040
},
{
"epoch": 7.93285055957867,
"grad_norm": 0.2742098569869995,
"learning_rate": 9.864767804818243e-05,
"loss": 0.0153,
"step": 12050
},
{
"epoch": 7.939433838051349,
"grad_norm": 0.3229682147502899,
"learning_rate": 9.86438558597768e-05,
"loss": 0.0125,
"step": 12060
},
{
"epoch": 7.946017116524029,
"grad_norm": 0.2588154375553131,
"learning_rate": 9.864002835175225e-05,
"loss": 0.0215,
"step": 12070
},
{
"epoch": 7.952600394996709,
"grad_norm": 0.22620663046836853,
"learning_rate": 9.863619552452734e-05,
"loss": 0.0148,
"step": 12080
},
{
"epoch": 7.959183673469388,
"grad_norm": 0.3100728392601013,
"learning_rate": 9.863235737852119e-05,
"loss": 0.0143,
"step": 12090
},
{
"epoch": 7.965766951942067,
"grad_norm": 0.2987661063671112,
"learning_rate": 9.862851391415356e-05,
"loss": 0.0132,
"step": 12100
},
{
"epoch": 7.972350230414746,
"grad_norm": 0.45167309045791626,
"learning_rate": 9.862466513184477e-05,
"loss": 0.0182,
"step": 12110
},
{
"epoch": 7.978933508887426,
"grad_norm": 0.21111200749874115,
"learning_rate": 9.86208110320157e-05,
"loss": 0.0165,
"step": 12120
},
{
"epoch": 7.985516787360106,
"grad_norm": 0.30797868967056274,
"learning_rate": 9.861695161508784e-05,
"loss": 0.015,
"step": 12130
},
{
"epoch": 7.992100065832785,
"grad_norm": 0.40244606137275696,
"learning_rate": 9.861308688148324e-05,
"loss": 0.0156,
"step": 12140
},
{
"epoch": 7.998683344305464,
"grad_norm": 0.39233842492103577,
"learning_rate": 9.860921683162455e-05,
"loss": 0.0229,
"step": 12150
},
{
"epoch": 8.005266622778144,
"grad_norm": 0.34103813767433167,
"learning_rate": 9.860534146593499e-05,
"loss": 0.0183,
"step": 12160
},
{
"epoch": 8.011849901250823,
"grad_norm": 0.24232713878154755,
"learning_rate": 9.860146078483836e-05,
"loss": 0.0136,
"step": 12170
},
{
"epoch": 8.018433179723502,
"grad_norm": 0.2246544510126114,
"learning_rate": 9.859757478875905e-05,
"loss": 0.0173,
"step": 12180
},
{
"epoch": 8.025016458196182,
"grad_norm": 0.3285483419895172,
"learning_rate": 9.859368347812204e-05,
"loss": 0.0135,
"step": 12190
},
{
"epoch": 8.031599736668861,
"grad_norm": 0.36343762278556824,
"learning_rate": 9.858978685335285e-05,
"loss": 0.0141,
"step": 12200
},
{
"epoch": 8.03818301514154,
"grad_norm": 0.32194772362709045,
"learning_rate": 9.858588491487763e-05,
"loss": 0.0148,
"step": 12210
},
{
"epoch": 8.04476629361422,
"grad_norm": 0.30875617265701294,
"learning_rate": 9.858197766312308e-05,
"loss": 0.0145,
"step": 12220
},
{
"epoch": 8.051349572086899,
"grad_norm": 0.3113337457180023,
"learning_rate": 9.857806509851649e-05,
"loss": 0.0178,
"step": 12230
},
{
"epoch": 8.05793285055958,
"grad_norm": 0.33212047815322876,
"learning_rate": 9.857414722148574e-05,
"loss": 0.018,
"step": 12240
},
{
"epoch": 8.064516129032258,
"grad_norm": 0.3719279170036316,
"learning_rate": 9.857022403245928e-05,
"loss": 0.018,
"step": 12250
},
{
"epoch": 8.071099407504937,
"grad_norm": 0.2751464545726776,
"learning_rate": 9.856629553186615e-05,
"loss": 0.0185,
"step": 12260
},
{
"epoch": 8.077682685977617,
"grad_norm": 0.2813555896282196,
"learning_rate": 9.856236172013595e-05,
"loss": 0.0145,
"step": 12270
},
{
"epoch": 8.084265964450296,
"grad_norm": 0.22800247371196747,
"learning_rate": 9.85584225976989e-05,
"loss": 0.0149,
"step": 12280
},
{
"epoch": 8.090849242922976,
"grad_norm": 0.2789894640445709,
"learning_rate": 9.855447816498575e-05,
"loss": 0.0139,
"step": 12290
},
{
"epoch": 8.097432521395655,
"grad_norm": 0.3160497546195984,
"learning_rate": 9.855052842242787e-05,
"loss": 0.0149,
"step": 12300
},
{
"epoch": 8.104015799868334,
"grad_norm": 0.19210468232631683,
"learning_rate": 9.85465733704572e-05,
"loss": 0.0159,
"step": 12310
},
{
"epoch": 8.110599078341014,
"grad_norm": 0.28471383452415466,
"learning_rate": 9.854261300950624e-05,
"loss": 0.0204,
"step": 12320
},
{
"epoch": 8.117182356813693,
"grad_norm": 0.23705516755580902,
"learning_rate": 9.853864734000813e-05,
"loss": 0.0178,
"step": 12330
},
{
"epoch": 8.123765635286373,
"grad_norm": 0.2857564389705658,
"learning_rate": 9.85346763623965e-05,
"loss": 0.0185,
"step": 12340
},
{
"epoch": 8.130348913759052,
"grad_norm": 0.3060814440250397,
"learning_rate": 9.853070007710564e-05,
"loss": 0.0178,
"step": 12350
},
{
"epoch": 8.13693219223173,
"grad_norm": 0.31875714659690857,
"learning_rate": 9.85267184845704e-05,
"loss": 0.016,
"step": 12360
},
{
"epoch": 8.143515470704411,
"grad_norm": 0.35909396409988403,
"learning_rate": 9.852273158522616e-05,
"loss": 0.0193,
"step": 12370
},
{
"epoch": 8.15009874917709,
"grad_norm": 0.2615959644317627,
"learning_rate": 9.851873937950896e-05,
"loss": 0.023,
"step": 12380
},
{
"epoch": 8.156682027649769,
"grad_norm": 0.36124199628829956,
"learning_rate": 9.851474186785537e-05,
"loss": 0.0203,
"step": 12390
},
{
"epoch": 8.16326530612245,
"grad_norm": 0.24005907773971558,
"learning_rate": 9.851073905070254e-05,
"loss": 0.0144,
"step": 12400
},
{
"epoch": 8.169848584595128,
"grad_norm": 0.2791614532470703,
"learning_rate": 9.850673092848824e-05,
"loss": 0.0175,
"step": 12410
},
{
"epoch": 8.176431863067808,
"grad_norm": 0.3442709743976593,
"learning_rate": 9.850271750165077e-05,
"loss": 0.0199,
"step": 12420
},
{
"epoch": 8.183015141540487,
"grad_norm": 0.33790722489356995,
"learning_rate": 9.849869877062902e-05,
"loss": 0.0133,
"step": 12430
},
{
"epoch": 8.189598420013166,
"grad_norm": 0.27029949426651,
"learning_rate": 9.849467473586252e-05,
"loss": 0.0205,
"step": 12440
},
{
"epoch": 8.196181698485846,
"grad_norm": 0.28875410556793213,
"learning_rate": 9.849064539779127e-05,
"loss": 0.0191,
"step": 12450
},
{
"epoch": 8.202764976958525,
"grad_norm": 0.24372316896915436,
"learning_rate": 9.848661075685594e-05,
"loss": 0.0122,
"step": 12460
},
{
"epoch": 8.209348255431205,
"grad_norm": 0.32099875807762146,
"learning_rate": 9.848257081349778e-05,
"loss": 0.0173,
"step": 12470
},
{
"epoch": 8.215931533903884,
"grad_norm": 0.37903597950935364,
"learning_rate": 9.847852556815856e-05,
"loss": 0.0177,
"step": 12480
},
{
"epoch": 8.222514812376563,
"grad_norm": 0.355498731136322,
"learning_rate": 9.847447502128067e-05,
"loss": 0.0173,
"step": 12490
},
{
"epoch": 8.229098090849243,
"grad_norm": 0.23270663619041443,
"learning_rate": 9.847041917330708e-05,
"loss": 0.0205,
"step": 12500
},
{
"epoch": 8.235681369321922,
"grad_norm": 0.25598177313804626,
"learning_rate": 9.846635802468132e-05,
"loss": 0.0167,
"step": 12510
},
{
"epoch": 8.242264647794602,
"grad_norm": 0.1973666250705719,
"learning_rate": 9.84622915758475e-05,
"loss": 0.0183,
"step": 12520
},
{
"epoch": 8.248847926267281,
"grad_norm": 0.30461353063583374,
"learning_rate": 9.845821982725034e-05,
"loss": 0.0186,
"step": 12530
},
{
"epoch": 8.25543120473996,
"grad_norm": 0.20619577169418335,
"learning_rate": 9.845414277933514e-05,
"loss": 0.0166,
"step": 12540
},
{
"epoch": 8.26201448321264,
"grad_norm": 0.3321439325809479,
"learning_rate": 9.845006043254771e-05,
"loss": 0.0206,
"step": 12550
},
{
"epoch": 8.268597761685319,
"grad_norm": 0.3072890043258667,
"learning_rate": 9.844597278733451e-05,
"loss": 0.0175,
"step": 12560
},
{
"epoch": 8.275181040158,
"grad_norm": 0.3021565079689026,
"learning_rate": 9.844187984414259e-05,
"loss": 0.0241,
"step": 12570
},
{
"epoch": 8.281764318630678,
"grad_norm": 0.19653943181037903,
"learning_rate": 9.84377816034195e-05,
"loss": 0.0161,
"step": 12580
},
{
"epoch": 8.288347597103357,
"grad_norm": 0.3110966682434082,
"learning_rate": 9.843367806561345e-05,
"loss": 0.0141,
"step": 12590
},
{
"epoch": 8.294930875576037,
"grad_norm": 0.23024876415729523,
"learning_rate": 9.842956923117317e-05,
"loss": 0.0156,
"step": 12600
},
{
"epoch": 8.301514154048716,
"grad_norm": 0.21673083305358887,
"learning_rate": 9.842545510054802e-05,
"loss": 0.0175,
"step": 12610
},
{
"epoch": 8.308097432521397,
"grad_norm": 0.32435718178749084,
"learning_rate": 9.842133567418792e-05,
"loss": 0.0177,
"step": 12620
},
{
"epoch": 8.314680710994075,
"grad_norm": 0.3552844524383545,
"learning_rate": 9.841721095254333e-05,
"loss": 0.0133,
"step": 12630
},
{
"epoch": 8.321263989466754,
"grad_norm": 0.24043118953704834,
"learning_rate": 9.841308093606537e-05,
"loss": 0.0162,
"step": 12640
},
{
"epoch": 8.327847267939434,
"grad_norm": 0.3286025822162628,
"learning_rate": 9.840894562520565e-05,
"loss": 0.0168,
"step": 12650
},
{
"epoch": 8.334430546412113,
"grad_norm": 0.3328453004360199,
"learning_rate": 9.840480502041642e-05,
"loss": 0.0185,
"step": 12660
},
{
"epoch": 8.341013824884792,
"grad_norm": 0.36894917488098145,
"learning_rate": 9.840065912215049e-05,
"loss": 0.0213,
"step": 12670
},
{
"epoch": 8.347597103357472,
"grad_norm": 0.3171577453613281,
"learning_rate": 9.839650793086124e-05,
"loss": 0.0167,
"step": 12680
},
{
"epoch": 8.354180381830151,
"grad_norm": 0.25338155031204224,
"learning_rate": 9.839235144700265e-05,
"loss": 0.0153,
"step": 12690
},
{
"epoch": 8.360763660302831,
"grad_norm": 0.3059747517108917,
"learning_rate": 9.838818967102926e-05,
"loss": 0.0135,
"step": 12700
},
{
"epoch": 8.36734693877551,
"grad_norm": 0.37138986587524414,
"learning_rate": 9.83840226033962e-05,
"loss": 0.0155,
"step": 12710
},
{
"epoch": 8.373930217248189,
"grad_norm": 0.34434598684310913,
"learning_rate": 9.837985024455918e-05,
"loss": 0.0184,
"step": 12720
},
{
"epoch": 8.38051349572087,
"grad_norm": 0.30485403537750244,
"learning_rate": 9.837567259497447e-05,
"loss": 0.0188,
"step": 12730
},
{
"epoch": 8.387096774193548,
"grad_norm": 0.3552128076553345,
"learning_rate": 9.837148965509894e-05,
"loss": 0.0192,
"step": 12740
},
{
"epoch": 8.393680052666229,
"grad_norm": 0.34970349073410034,
"learning_rate": 9.836730142539001e-05,
"loss": 0.0158,
"step": 12750
},
{
"epoch": 8.400263331138907,
"grad_norm": 0.2959810197353363,
"learning_rate": 9.836310790630574e-05,
"loss": 0.015,
"step": 12760
},
{
"epoch": 8.406846609611586,
"grad_norm": 0.38214898109436035,
"learning_rate": 9.83589090983047e-05,
"loss": 0.0235,
"step": 12770
},
{
"epoch": 8.413429888084266,
"grad_norm": 0.2933112382888794,
"learning_rate": 9.835470500184605e-05,
"loss": 0.0152,
"step": 12780
},
{
"epoch": 8.420013166556945,
"grad_norm": 0.30480775237083435,
"learning_rate": 9.835049561738957e-05,
"loss": 0.0183,
"step": 12790
},
{
"epoch": 8.426596445029626,
"grad_norm": 0.3544946610927582,
"learning_rate": 9.834628094539558e-05,
"loss": 0.0163,
"step": 12800
},
{
"epoch": 8.433179723502304,
"grad_norm": 0.33438676595687866,
"learning_rate": 9.834206098632499e-05,
"loss": 0.0186,
"step": 12810
},
{
"epoch": 8.439763001974983,
"grad_norm": 0.3019654452800751,
"learning_rate": 9.833783574063931e-05,
"loss": 0.0164,
"step": 12820
},
{
"epoch": 8.446346280447663,
"grad_norm": 0.3421627879142761,
"learning_rate": 9.833360520880058e-05,
"loss": 0.0157,
"step": 12830
},
{
"epoch": 8.452929558920342,
"grad_norm": 0.2156718522310257,
"learning_rate": 9.832936939127144e-05,
"loss": 0.0201,
"step": 12840
},
{
"epoch": 8.45951283739302,
"grad_norm": 0.26637282967567444,
"learning_rate": 9.832512828851515e-05,
"loss": 0.0134,
"step": 12850
},
{
"epoch": 8.466096115865701,
"grad_norm": 0.27418282628059387,
"learning_rate": 9.832088190099546e-05,
"loss": 0.0145,
"step": 12860
},
{
"epoch": 8.47267939433838,
"grad_norm": 0.29825589060783386,
"learning_rate": 9.831663022917679e-05,
"loss": 0.0192,
"step": 12870
},
{
"epoch": 8.47926267281106,
"grad_norm": 0.2637762725353241,
"learning_rate": 9.831237327352407e-05,
"loss": 0.0156,
"step": 12880
},
{
"epoch": 8.48584595128374,
"grad_norm": 0.29409998655319214,
"learning_rate": 9.830811103450286e-05,
"loss": 0.0125,
"step": 12890
},
{
"epoch": 8.492429229756418,
"grad_norm": 0.23705434799194336,
"learning_rate": 9.830384351257924e-05,
"loss": 0.0146,
"step": 12900
},
{
"epoch": 8.499012508229098,
"grad_norm": 0.21183177828788757,
"learning_rate": 9.829957070821993e-05,
"loss": 0.0135,
"step": 12910
},
{
"epoch": 8.505595786701777,
"grad_norm": 0.31055688858032227,
"learning_rate": 9.829529262189218e-05,
"loss": 0.018,
"step": 12920
},
{
"epoch": 8.512179065174458,
"grad_norm": 0.42195186018943787,
"learning_rate": 9.829100925406385e-05,
"loss": 0.0164,
"step": 12930
},
{
"epoch": 8.518762343647136,
"grad_norm": 0.3602997064590454,
"learning_rate": 9.828672060520333e-05,
"loss": 0.0237,
"step": 12940
},
{
"epoch": 8.525345622119815,
"grad_norm": 0.3757852613925934,
"learning_rate": 9.828242667577966e-05,
"loss": 0.0141,
"step": 12950
},
{
"epoch": 8.531928900592495,
"grad_norm": 0.25511571764945984,
"learning_rate": 9.82781274662624e-05,
"loss": 0.0175,
"step": 12960
},
{
"epoch": 8.538512179065174,
"grad_norm": 0.20275302231311798,
"learning_rate": 9.82738229771217e-05,
"loss": 0.0117,
"step": 12970
},
{
"epoch": 8.545095457537855,
"grad_norm": 0.23094315826892853,
"learning_rate": 9.826951320882829e-05,
"loss": 0.016,
"step": 12980
},
{
"epoch": 8.551678736010533,
"grad_norm": 0.365413099527359,
"learning_rate": 9.826519816185351e-05,
"loss": 0.0153,
"step": 12990
},
{
"epoch": 8.558262014483212,
"grad_norm": 0.35667482018470764,
"learning_rate": 9.826087783666921e-05,
"loss": 0.0155,
"step": 13000
},
{
"epoch": 8.564845292955892,
"grad_norm": 0.2302827686071396,
"learning_rate": 9.825655223374787e-05,
"loss": 0.0158,
"step": 13010
},
{
"epoch": 8.571428571428571,
"grad_norm": 0.24258863925933838,
"learning_rate": 9.825222135356253e-05,
"loss": 0.015,
"step": 13020
},
{
"epoch": 8.578011849901252,
"grad_norm": 0.28371527791023254,
"learning_rate": 9.82478851965868e-05,
"loss": 0.0141,
"step": 13030
},
{
"epoch": 8.58459512837393,
"grad_norm": 0.30404961109161377,
"learning_rate": 9.82435437632949e-05,
"loss": 0.0232,
"step": 13040
},
{
"epoch": 8.591178406846609,
"grad_norm": 0.31755396723747253,
"learning_rate": 9.823919705416158e-05,
"loss": 0.0193,
"step": 13050
},
{
"epoch": 8.59776168531929,
"grad_norm": 0.327505886554718,
"learning_rate": 9.82348450696622e-05,
"loss": 0.0157,
"step": 13060
},
{
"epoch": 8.604344963791968,
"grad_norm": 0.285749614238739,
"learning_rate": 9.823048781027268e-05,
"loss": 0.016,
"step": 13070
},
{
"epoch": 8.610928242264649,
"grad_norm": 0.27768033742904663,
"learning_rate": 9.822612527646953e-05,
"loss": 0.0191,
"step": 13080
},
{
"epoch": 8.617511520737327,
"grad_norm": 0.2752527892589569,
"learning_rate": 9.822175746872984e-05,
"loss": 0.0154,
"step": 13090
},
{
"epoch": 8.624094799210006,
"grad_norm": 0.2068960964679718,
"learning_rate": 9.821738438753123e-05,
"loss": 0.0169,
"step": 13100
},
{
"epoch": 8.630678077682687,
"grad_norm": 0.3518577814102173,
"learning_rate": 9.821300603335196e-05,
"loss": 0.0183,
"step": 13110
},
{
"epoch": 8.637261356155365,
"grad_norm": 0.3151343762874603,
"learning_rate": 9.820862240667085e-05,
"loss": 0.0144,
"step": 13120
},
{
"epoch": 8.643844634628044,
"grad_norm": 0.25698354840278625,
"learning_rate": 9.820423350796726e-05,
"loss": 0.0192,
"step": 13130
},
{
"epoch": 8.650427913100724,
"grad_norm": 0.2843801975250244,
"learning_rate": 9.819983933772118e-05,
"loss": 0.0223,
"step": 13140
},
{
"epoch": 8.657011191573403,
"grad_norm": 0.294238418340683,
"learning_rate": 9.819543989641314e-05,
"loss": 0.0246,
"step": 13150
},
{
"epoch": 8.663594470046084,
"grad_norm": 0.33931365609169006,
"learning_rate": 9.819103518452423e-05,
"loss": 0.022,
"step": 13160
},
{
"epoch": 8.670177748518762,
"grad_norm": 0.3251313269138336,
"learning_rate": 9.818662520253618e-05,
"loss": 0.0178,
"step": 13170
},
{
"epoch": 8.676761026991441,
"grad_norm": 0.31433048844337463,
"learning_rate": 9.818220995093126e-05,
"loss": 0.0166,
"step": 13180
},
{
"epoch": 8.683344305464122,
"grad_norm": 0.39611032605171204,
"learning_rate": 9.817778943019228e-05,
"loss": 0.0153,
"step": 13190
},
{
"epoch": 8.6899275839368,
"grad_norm": 0.3499734401702881,
"learning_rate": 9.81733636408027e-05,
"loss": 0.0199,
"step": 13200
},
{
"epoch": 8.69651086240948,
"grad_norm": 0.306035578250885,
"learning_rate": 9.816893258324649e-05,
"loss": 0.0133,
"step": 13210
},
{
"epoch": 8.70309414088216,
"grad_norm": 0.28625723719596863,
"learning_rate": 9.816449625800823e-05,
"loss": 0.0185,
"step": 13220
},
{
"epoch": 8.709677419354838,
"grad_norm": 0.2791520655155182,
"learning_rate": 9.816005466557308e-05,
"loss": 0.0169,
"step": 13230
},
{
"epoch": 8.716260697827519,
"grad_norm": 0.33265793323516846,
"learning_rate": 9.815560780642674e-05,
"loss": 0.0123,
"step": 13240
},
{
"epoch": 8.722843976300197,
"grad_norm": 0.3330969512462616,
"learning_rate": 9.815115568105555e-05,
"loss": 0.0163,
"step": 13250
},
{
"epoch": 8.729427254772876,
"grad_norm": 0.35198885202407837,
"learning_rate": 9.814669828994638e-05,
"loss": 0.0216,
"step": 13260
},
{
"epoch": 8.736010533245556,
"grad_norm": 0.27274513244628906,
"learning_rate": 9.814223563358665e-05,
"loss": 0.0185,
"step": 13270
},
{
"epoch": 8.742593811718235,
"grad_norm": 0.2755146920681,
"learning_rate": 9.813776771246443e-05,
"loss": 0.0172,
"step": 13280
},
{
"epoch": 8.749177090190916,
"grad_norm": 0.35094380378723145,
"learning_rate": 9.813329452706829e-05,
"loss": 0.0128,
"step": 13290
},
{
"epoch": 8.755760368663594,
"grad_norm": 0.31531864404678345,
"learning_rate": 9.812881607788744e-05,
"loss": 0.0177,
"step": 13300
},
{
"epoch": 8.762343647136273,
"grad_norm": 0.37198206782341003,
"learning_rate": 9.812433236541163e-05,
"loss": 0.0166,
"step": 13310
},
{
"epoch": 8.768926925608953,
"grad_norm": 0.32415178418159485,
"learning_rate": 9.811984339013116e-05,
"loss": 0.018,
"step": 13320
},
{
"epoch": 8.775510204081632,
"grad_norm": 0.3791441023349762,
"learning_rate": 9.811534915253698e-05,
"loss": 0.0177,
"step": 13330
},
{
"epoch": 8.782093482554313,
"grad_norm": 0.30991920828819275,
"learning_rate": 9.811084965312056e-05,
"loss": 0.0167,
"step": 13340
},
{
"epoch": 8.788676761026991,
"grad_norm": 0.42311421036720276,
"learning_rate": 9.810634489237396e-05,
"loss": 0.0225,
"step": 13350
},
{
"epoch": 8.79526003949967,
"grad_norm": 0.2889334559440613,
"learning_rate": 9.81018348707898e-05,
"loss": 0.0165,
"step": 13360
},
{
"epoch": 8.80184331797235,
"grad_norm": 0.2725805938243866,
"learning_rate": 9.809731958886131e-05,
"loss": 0.0128,
"step": 13370
},
{
"epoch": 8.80842659644503,
"grad_norm": 0.19188492000102997,
"learning_rate": 9.809279904708224e-05,
"loss": 0.0185,
"step": 13380
},
{
"epoch": 8.81500987491771,
"grad_norm": 0.2741115689277649,
"learning_rate": 9.808827324594699e-05,
"loss": 0.0142,
"step": 13390
},
{
"epoch": 8.821593153390388,
"grad_norm": 0.30418282747268677,
"learning_rate": 9.808374218595046e-05,
"loss": 0.0168,
"step": 13400
},
{
"epoch": 8.828176431863067,
"grad_norm": 0.20805540680885315,
"learning_rate": 9.80792058675882e-05,
"loss": 0.0131,
"step": 13410
},
{
"epoch": 8.834759710335748,
"grad_norm": 0.3448200225830078,
"learning_rate": 9.807466429135627e-05,
"loss": 0.0172,
"step": 13420
},
{
"epoch": 8.841342988808426,
"grad_norm": 0.2902281582355499,
"learning_rate": 9.807011745775132e-05,
"loss": 0.0148,
"step": 13430
},
{
"epoch": 8.847926267281107,
"grad_norm": 0.3353148102760315,
"learning_rate": 9.806556536727061e-05,
"loss": 0.0154,
"step": 13440
},
{
"epoch": 8.854509545753785,
"grad_norm": 0.285465270280838,
"learning_rate": 9.806100802041193e-05,
"loss": 0.0137,
"step": 13450
},
{
"epoch": 8.861092824226464,
"grad_norm": 0.29432445764541626,
"learning_rate": 9.805644541767368e-05,
"loss": 0.0192,
"step": 13460
},
{
"epoch": 8.867676102699145,
"grad_norm": 0.3276880383491516,
"learning_rate": 9.805187755955478e-05,
"loss": 0.0225,
"step": 13470
},
{
"epoch": 8.874259381171823,
"grad_norm": 0.3457360565662384,
"learning_rate": 9.804730444655483e-05,
"loss": 0.0174,
"step": 13480
},
{
"epoch": 8.880842659644504,
"grad_norm": 0.15821167826652527,
"learning_rate": 9.804272607917388e-05,
"loss": 0.0166,
"step": 13490
},
{
"epoch": 8.887425938117183,
"grad_norm": 0.28339052200317383,
"learning_rate": 9.803814245791265e-05,
"loss": 0.0159,
"step": 13500
},
{
"epoch": 8.894009216589861,
"grad_norm": 0.2857043445110321,
"learning_rate": 9.803355358327239e-05,
"loss": 0.0132,
"step": 13510
},
{
"epoch": 8.900592495062542,
"grad_norm": 0.27127256989479065,
"learning_rate": 9.802895945575492e-05,
"loss": 0.0161,
"step": 13520
},
{
"epoch": 8.90717577353522,
"grad_norm": 0.3474126160144806,
"learning_rate": 9.802436007586266e-05,
"loss": 0.0135,
"step": 13530
},
{
"epoch": 8.9137590520079,
"grad_norm": 0.32982638478279114,
"learning_rate": 9.801975544409858e-05,
"loss": 0.0151,
"step": 13540
},
{
"epoch": 8.92034233048058,
"grad_norm": 0.2339026778936386,
"learning_rate": 9.801514556096625e-05,
"loss": 0.0114,
"step": 13550
},
{
"epoch": 8.926925608953258,
"grad_norm": 0.339264839887619,
"learning_rate": 9.801053042696977e-05,
"loss": 0.0168,
"step": 13560
},
{
"epoch": 8.933508887425939,
"grad_norm": 0.29597020149230957,
"learning_rate": 9.800591004261388e-05,
"loss": 0.018,
"step": 13570
},
{
"epoch": 8.940092165898617,
"grad_norm": 0.292415052652359,
"learning_rate": 9.800128440840385e-05,
"loss": 0.0145,
"step": 13580
},
{
"epoch": 8.946675444371296,
"grad_norm": 0.33653610944747925,
"learning_rate": 9.799665352484552e-05,
"loss": 0.019,
"step": 13590
},
{
"epoch": 8.953258722843977,
"grad_norm": 0.26187387108802795,
"learning_rate": 9.799201739244532e-05,
"loss": 0.0232,
"step": 13600
},
{
"epoch": 8.959842001316655,
"grad_norm": 0.33759915828704834,
"learning_rate": 9.798737601171025e-05,
"loss": 0.014,
"step": 13610
},
{
"epoch": 8.966425279789336,
"grad_norm": 0.25862258672714233,
"learning_rate": 9.79827293831479e-05,
"loss": 0.014,
"step": 13620
},
{
"epoch": 8.973008558262014,
"grad_norm": 0.2738065719604492,
"learning_rate": 9.797807750726638e-05,
"loss": 0.0183,
"step": 13630
},
{
"epoch": 8.979591836734693,
"grad_norm": 0.313561350107193,
"learning_rate": 9.797342038457446e-05,
"loss": 0.0119,
"step": 13640
},
{
"epoch": 8.986175115207374,
"grad_norm": 0.2978798449039459,
"learning_rate": 9.796875801558141e-05,
"loss": 0.0115,
"step": 13650
},
{
"epoch": 8.992758393680052,
"grad_norm": 0.34267014265060425,
"learning_rate": 9.79640904007971e-05,
"loss": 0.0161,
"step": 13660
},
{
"epoch": 8.999341672152733,
"grad_norm": 0.31244221329689026,
"learning_rate": 9.795941754073199e-05,
"loss": 0.0116,
"step": 13670
},
{
"epoch": 9.005924950625412,
"grad_norm": 0.25567924976348877,
"learning_rate": 9.795473943589705e-05,
"loss": 0.0191,
"step": 13680
},
{
"epoch": 9.01250822909809,
"grad_norm": 0.3351512551307678,
"learning_rate": 9.795005608680394e-05,
"loss": 0.0135,
"step": 13690
},
{
"epoch": 9.01909150757077,
"grad_norm": 0.3257712423801422,
"learning_rate": 9.794536749396477e-05,
"loss": 0.0122,
"step": 13700
},
{
"epoch": 9.02567478604345,
"grad_norm": 0.22828339040279388,
"learning_rate": 9.79406736578923e-05,
"loss": 0.0153,
"step": 13710
},
{
"epoch": 9.03225806451613,
"grad_norm": 0.2682214379310608,
"learning_rate": 9.793597457909984e-05,
"loss": 0.0221,
"step": 13720
},
{
"epoch": 9.038841342988809,
"grad_norm": 0.29810208082199097,
"learning_rate": 9.793127025810127e-05,
"loss": 0.0163,
"step": 13730
},
{
"epoch": 9.045424621461487,
"grad_norm": 0.32273998856544495,
"learning_rate": 9.792656069541104e-05,
"loss": 0.0215,
"step": 13740
},
{
"epoch": 9.052007899934168,
"grad_norm": 0.32357364892959595,
"learning_rate": 9.79218458915442e-05,
"loss": 0.0205,
"step": 13750
},
{
"epoch": 9.058591178406846,
"grad_norm": 0.2245616763830185,
"learning_rate": 9.791712584701634e-05,
"loss": 0.0217,
"step": 13760
},
{
"epoch": 9.065174456879525,
"grad_norm": 0.276113361120224,
"learning_rate": 9.791240056234364e-05,
"loss": 0.0187,
"step": 13770
},
{
"epoch": 9.071757735352206,
"grad_norm": 0.255197674036026,
"learning_rate": 9.790767003804283e-05,
"loss": 0.0153,
"step": 13780
},
{
"epoch": 9.078341013824884,
"grad_norm": 0.31894806027412415,
"learning_rate": 9.790293427463126e-05,
"loss": 0.0152,
"step": 13790
},
{
"epoch": 9.084924292297565,
"grad_norm": 0.3326372802257538,
"learning_rate": 9.789819327262684e-05,
"loss": 0.0171,
"step": 13800
},
{
"epoch": 9.091507570770244,
"grad_norm": 0.31314146518707275,
"learning_rate": 9.7893447032548e-05,
"loss": 0.0178,
"step": 13810
},
{
"epoch": 9.098090849242922,
"grad_norm": 0.32698312401771545,
"learning_rate": 9.78886955549138e-05,
"loss": 0.0129,
"step": 13820
},
{
"epoch": 9.104674127715603,
"grad_norm": 0.3800216317176819,
"learning_rate": 9.788393884024387e-05,
"loss": 0.0248,
"step": 13830
},
{
"epoch": 9.111257406188281,
"grad_norm": 0.29832887649536133,
"learning_rate": 9.787917688905836e-05,
"loss": 0.0205,
"step": 13840
},
{
"epoch": 9.117840684660962,
"grad_norm": 0.3725479543209076,
"learning_rate": 9.787440970187807e-05,
"loss": 0.016,
"step": 13850
},
{
"epoch": 9.12442396313364,
"grad_norm": 0.2770329713821411,
"learning_rate": 9.786963727922429e-05,
"loss": 0.0125,
"step": 13860
},
{
"epoch": 9.13100724160632,
"grad_norm": 0.25596705079078674,
"learning_rate": 9.786485962161897e-05,
"loss": 0.0143,
"step": 13870
},
{
"epoch": 9.137590520079,
"grad_norm": 0.3303760886192322,
"learning_rate": 9.786007672958455e-05,
"loss": 0.0136,
"step": 13880
},
{
"epoch": 9.144173798551678,
"grad_norm": 0.24910618364810944,
"learning_rate": 9.78552886036441e-05,
"loss": 0.0192,
"step": 13890
},
{
"epoch": 9.150757077024359,
"grad_norm": 0.39877766370773315,
"learning_rate": 9.785049524432124e-05,
"loss": 0.0182,
"step": 13900
},
{
"epoch": 9.157340355497038,
"grad_norm": 0.27651259303092957,
"learning_rate": 9.784569665214016e-05,
"loss": 0.0165,
"step": 13910
},
{
"epoch": 9.163923633969716,
"grad_norm": 0.3207607865333557,
"learning_rate": 9.784089282762563e-05,
"loss": 0.0176,
"step": 13920
},
{
"epoch": 9.170506912442397,
"grad_norm": 0.20456086099147797,
"learning_rate": 9.7836083771303e-05,
"loss": 0.0107,
"step": 13930
},
{
"epoch": 9.177090190915075,
"grad_norm": 0.2635287046432495,
"learning_rate": 9.783126948369817e-05,
"loss": 0.0203,
"step": 13940
},
{
"epoch": 9.183673469387756,
"grad_norm": 0.23705953359603882,
"learning_rate": 9.78264499653376e-05,
"loss": 0.013,
"step": 13950
},
{
"epoch": 9.190256747860435,
"grad_norm": 0.24071741104125977,
"learning_rate": 9.782162521674838e-05,
"loss": 0.0144,
"step": 13960
},
{
"epoch": 9.196840026333113,
"grad_norm": 0.26427075266838074,
"learning_rate": 9.781679523845812e-05,
"loss": 0.0157,
"step": 13970
},
{
"epoch": 9.203423304805794,
"grad_norm": 0.24588192999362946,
"learning_rate": 9.781196003099502e-05,
"loss": 0.0204,
"step": 13980
},
{
"epoch": 9.210006583278473,
"grad_norm": 0.2302236407995224,
"learning_rate": 9.780711959488786e-05,
"loss": 0.011,
"step": 13990
},
{
"epoch": 9.216589861751151,
"grad_norm": 0.3896937072277069,
"learning_rate": 9.780227393066599e-05,
"loss": 0.0174,
"step": 14000
},
{
"epoch": 9.223173140223832,
"grad_norm": 0.2744394838809967,
"learning_rate": 9.77974230388593e-05,
"loss": 0.0153,
"step": 14010
},
{
"epoch": 9.22975641869651,
"grad_norm": 0.42921507358551025,
"learning_rate": 9.779256691999829e-05,
"loss": 0.0176,
"step": 14020
},
{
"epoch": 9.236339697169191,
"grad_norm": 0.26810210943222046,
"learning_rate": 9.778770557461403e-05,
"loss": 0.0135,
"step": 14030
},
{
"epoch": 9.24292297564187,
"grad_norm": 0.2330348938703537,
"learning_rate": 9.778283900323812e-05,
"loss": 0.0123,
"step": 14040
},
{
"epoch": 9.249506254114548,
"grad_norm": 0.25289079546928406,
"learning_rate": 9.777796720640277e-05,
"loss": 0.0158,
"step": 14050
},
{
"epoch": 9.256089532587229,
"grad_norm": 0.31572824716567993,
"learning_rate": 9.777309018464078e-05,
"loss": 0.0163,
"step": 14060
},
{
"epoch": 9.262672811059907,
"grad_norm": 0.3886810541152954,
"learning_rate": 9.776820793848547e-05,
"loss": 0.0141,
"step": 14070
},
{
"epoch": 9.269256089532588,
"grad_norm": 0.33628222346305847,
"learning_rate": 9.776332046847075e-05,
"loss": 0.023,
"step": 14080
},
{
"epoch": 9.275839368005267,
"grad_norm": 0.32423245906829834,
"learning_rate": 9.775842777513111e-05,
"loss": 0.0202,
"step": 14090
},
{
"epoch": 9.282422646477945,
"grad_norm": 0.29128214716911316,
"learning_rate": 9.775352985900163e-05,
"loss": 0.0252,
"step": 14100
},
{
"epoch": 9.289005924950626,
"grad_norm": 0.2644447386264801,
"learning_rate": 9.774862672061791e-05,
"loss": 0.0133,
"step": 14110
},
{
"epoch": 9.295589203423305,
"grad_norm": 0.2934948205947876,
"learning_rate": 9.774371836051616e-05,
"loss": 0.0152,
"step": 14120
},
{
"epoch": 9.302172481895985,
"grad_norm": 0.3662622570991516,
"learning_rate": 9.773880477923315e-05,
"loss": 0.0199,
"step": 14130
},
{
"epoch": 9.308755760368664,
"grad_norm": 0.221084862947464,
"learning_rate": 9.773388597730623e-05,
"loss": 0.0132,
"step": 14140
},
{
"epoch": 9.315339038841342,
"grad_norm": 0.2095106989145279,
"learning_rate": 9.77289619552733e-05,
"loss": 0.014,
"step": 14150
},
{
"epoch": 9.321922317314023,
"grad_norm": 0.19932235777378082,
"learning_rate": 9.772403271367285e-05,
"loss": 0.0132,
"step": 14160
},
{
"epoch": 9.328505595786702,
"grad_norm": 0.3056185245513916,
"learning_rate": 9.771909825304396e-05,
"loss": 0.0156,
"step": 14170
},
{
"epoch": 9.33508887425938,
"grad_norm": 0.2711247503757477,
"learning_rate": 9.771415857392619e-05,
"loss": 0.0109,
"step": 14180
},
{
"epoch": 9.34167215273206,
"grad_norm": 0.2054968923330307,
"learning_rate": 9.770921367685978e-05,
"loss": 0.0149,
"step": 14190
},
{
"epoch": 9.34825543120474,
"grad_norm": 0.33008214831352234,
"learning_rate": 9.770426356238551e-05,
"loss": 0.0155,
"step": 14200
},
{
"epoch": 9.35483870967742,
"grad_norm": 0.25478431582450867,
"learning_rate": 9.769930823104469e-05,
"loss": 0.0152,
"step": 14210
},
{
"epoch": 9.361421988150099,
"grad_norm": 0.27259862422943115,
"learning_rate": 9.769434768337926e-05,
"loss": 0.0143,
"step": 14220
},
{
"epoch": 9.368005266622777,
"grad_norm": 0.3343975245952606,
"learning_rate": 9.768938191993164e-05,
"loss": 0.0157,
"step": 14230
},
{
"epoch": 9.374588545095458,
"grad_norm": 0.4523605704307556,
"learning_rate": 9.768441094124494e-05,
"loss": 0.0191,
"step": 14240
},
{
"epoch": 9.381171823568137,
"grad_norm": 0.28783687949180603,
"learning_rate": 9.767943474786275e-05,
"loss": 0.0121,
"step": 14250
},
{
"epoch": 9.387755102040817,
"grad_norm": 0.3093189597129822,
"learning_rate": 9.767445334032923e-05,
"loss": 0.0179,
"step": 14260
},
{
"epoch": 9.394338380513496,
"grad_norm": 0.1693592220544815,
"learning_rate": 9.766946671918919e-05,
"loss": 0.0163,
"step": 14270
},
{
"epoch": 9.400921658986174,
"grad_norm": 0.3516111373901367,
"learning_rate": 9.766447488498796e-05,
"loss": 0.0191,
"step": 14280
},
{
"epoch": 9.407504937458855,
"grad_norm": 0.3251212537288666,
"learning_rate": 9.765947783827139e-05,
"loss": 0.0216,
"step": 14290
},
{
"epoch": 9.414088215931534,
"grad_norm": 0.18838593363761902,
"learning_rate": 9.765447557958599e-05,
"loss": 0.0131,
"step": 14300
},
{
"epoch": 9.420671494404214,
"grad_norm": 0.19156122207641602,
"learning_rate": 9.764946810947879e-05,
"loss": 0.022,
"step": 14310
},
{
"epoch": 9.427254772876893,
"grad_norm": 0.3185754418373108,
"learning_rate": 9.764445542849738e-05,
"loss": 0.0143,
"step": 14320
},
{
"epoch": 9.433838051349571,
"grad_norm": 0.19829919934272766,
"learning_rate": 9.763943753718998e-05,
"loss": 0.015,
"step": 14330
},
{
"epoch": 9.440421329822252,
"grad_norm": 0.2635055184364319,
"learning_rate": 9.76344144361053e-05,
"loss": 0.014,
"step": 14340
},
{
"epoch": 9.44700460829493,
"grad_norm": 0.27124494314193726,
"learning_rate": 9.762938612579269e-05,
"loss": 0.0156,
"step": 14350
},
{
"epoch": 9.453587886767611,
"grad_norm": 0.23035362362861633,
"learning_rate": 9.762435260680202e-05,
"loss": 0.0161,
"step": 14360
},
{
"epoch": 9.46017116524029,
"grad_norm": 0.23884962499141693,
"learning_rate": 9.761931387968373e-05,
"loss": 0.0234,
"step": 14370
},
{
"epoch": 9.466754443712968,
"grad_norm": 0.39816921949386597,
"learning_rate": 9.76142699449889e-05,
"loss": 0.0207,
"step": 14380
},
{
"epoch": 9.473337722185649,
"grad_norm": 0.2983594238758087,
"learning_rate": 9.760922080326908e-05,
"loss": 0.0142,
"step": 14390
},
{
"epoch": 9.479921000658328,
"grad_norm": 0.20895807445049286,
"learning_rate": 9.760416645507644e-05,
"loss": 0.0191,
"step": 14400
},
{
"epoch": 9.486504279131008,
"grad_norm": 0.21971069276332855,
"learning_rate": 9.759910690096375e-05,
"loss": 0.0135,
"step": 14410
},
{
"epoch": 9.493087557603687,
"grad_norm": 0.2968548834323883,
"learning_rate": 9.759404214148429e-05,
"loss": 0.0143,
"step": 14420
},
{
"epoch": 9.499670836076366,
"grad_norm": 0.24540165066719055,
"learning_rate": 9.758897217719191e-05,
"loss": 0.019,
"step": 14430
},
{
"epoch": 9.506254114549046,
"grad_norm": 0.21374639868736267,
"learning_rate": 9.758389700864113e-05,
"loss": 0.021,
"step": 14440
},
{
"epoch": 9.512837393021725,
"grad_norm": 0.3622443974018097,
"learning_rate": 9.757881663638688e-05,
"loss": 0.016,
"step": 14450
},
{
"epoch": 9.519420671494403,
"grad_norm": 0.25969240069389343,
"learning_rate": 9.757373106098478e-05,
"loss": 0.0146,
"step": 14460
},
{
"epoch": 9.526003949967084,
"grad_norm": 0.25105011463165283,
"learning_rate": 9.756864028299097e-05,
"loss": 0.0142,
"step": 14470
},
{
"epoch": 9.532587228439763,
"grad_norm": 0.21822065114974976,
"learning_rate": 9.75635443029622e-05,
"loss": 0.0166,
"step": 14480
},
{
"epoch": 9.539170506912443,
"grad_norm": 0.26269999146461487,
"learning_rate": 9.755844312145572e-05,
"loss": 0.0135,
"step": 14490
},
{
"epoch": 9.545753785385122,
"grad_norm": 0.38388341665267944,
"learning_rate": 9.755333673902941e-05,
"loss": 0.0135,
"step": 14500
},
{
"epoch": 9.5523370638578,
"grad_norm": 0.2771194577217102,
"learning_rate": 9.75482251562417e-05,
"loss": 0.0149,
"step": 14510
},
{
"epoch": 9.558920342330481,
"grad_norm": 0.2810881435871124,
"learning_rate": 9.754310837365155e-05,
"loss": 0.0157,
"step": 14520
},
{
"epoch": 9.56550362080316,
"grad_norm": 0.27615103125572205,
"learning_rate": 9.753798639181856e-05,
"loss": 0.0156,
"step": 14530
},
{
"epoch": 9.57208689927584,
"grad_norm": 0.33256658911705017,
"learning_rate": 9.753285921130286e-05,
"loss": 0.0155,
"step": 14540
},
{
"epoch": 9.578670177748519,
"grad_norm": 0.27427178621292114,
"learning_rate": 9.752772683266512e-05,
"loss": 0.0159,
"step": 14550
},
{
"epoch": 9.585253456221198,
"grad_norm": 0.23231226205825806,
"learning_rate": 9.752258925646665e-05,
"loss": 0.0131,
"step": 14560
},
{
"epoch": 9.591836734693878,
"grad_norm": 0.28529050946235657,
"learning_rate": 9.751744648326926e-05,
"loss": 0.013,
"step": 14570
},
{
"epoch": 9.598420013166557,
"grad_norm": 0.21589602530002594,
"learning_rate": 9.751229851363536e-05,
"loss": 0.0166,
"step": 14580
},
{
"epoch": 9.605003291639237,
"grad_norm": 0.26866182684898376,
"learning_rate": 9.750714534812793e-05,
"loss": 0.0193,
"step": 14590
},
{
"epoch": 9.611586570111916,
"grad_norm": 0.1514309197664261,
"learning_rate": 9.750198698731053e-05,
"loss": 0.0136,
"step": 14600
},
{
"epoch": 9.618169848584595,
"grad_norm": 0.2843020558357239,
"learning_rate": 9.749682343174722e-05,
"loss": 0.0178,
"step": 14610
},
{
"epoch": 9.624753127057275,
"grad_norm": 0.23375630378723145,
"learning_rate": 9.749165468200272e-05,
"loss": 0.0175,
"step": 14620
},
{
"epoch": 9.631336405529954,
"grad_norm": 0.2827167809009552,
"learning_rate": 9.748648073864229e-05,
"loss": 0.0135,
"step": 14630
},
{
"epoch": 9.637919684002632,
"grad_norm": 0.22570891678333282,
"learning_rate": 9.748130160223168e-05,
"loss": 0.0147,
"step": 14640
},
{
"epoch": 9.644502962475313,
"grad_norm": 0.35171452164649963,
"learning_rate": 9.747611727333734e-05,
"loss": 0.0145,
"step": 14650
},
{
"epoch": 9.651086240947992,
"grad_norm": 0.3610263168811798,
"learning_rate": 9.74709277525262e-05,
"loss": 0.0197,
"step": 14660
},
{
"epoch": 9.657669519420672,
"grad_norm": 0.3679834306240082,
"learning_rate": 9.746573304036576e-05,
"loss": 0.0148,
"step": 14670
},
{
"epoch": 9.66425279789335,
"grad_norm": 0.23180590569972992,
"learning_rate": 9.746053313742412e-05,
"loss": 0.0165,
"step": 14680
},
{
"epoch": 9.67083607636603,
"grad_norm": 0.2764023244380951,
"learning_rate": 9.745532804426994e-05,
"loss": 0.0158,
"step": 14690
},
{
"epoch": 9.67741935483871,
"grad_norm": 0.23102205991744995,
"learning_rate": 9.745011776147242e-05,
"loss": 0.0126,
"step": 14700
},
{
"epoch": 9.684002633311389,
"grad_norm": 0.2220112383365631,
"learning_rate": 9.744490228960138e-05,
"loss": 0.0119,
"step": 14710
},
{
"epoch": 9.69058591178407,
"grad_norm": 0.20861835777759552,
"learning_rate": 9.743968162922713e-05,
"loss": 0.0146,
"step": 14720
},
{
"epoch": 9.697169190256748,
"grad_norm": 0.22803926467895508,
"learning_rate": 9.743445578092064e-05,
"loss": 0.0113,
"step": 14730
},
{
"epoch": 9.703752468729427,
"grad_norm": 0.22774170339107513,
"learning_rate": 9.742922474525338e-05,
"loss": 0.0158,
"step": 14740
},
{
"epoch": 9.710335747202107,
"grad_norm": 0.28892743587493896,
"learning_rate": 9.742398852279741e-05,
"loss": 0.0181,
"step": 14750
},
{
"epoch": 9.716919025674786,
"grad_norm": 0.36422398686408997,
"learning_rate": 9.741874711412535e-05,
"loss": 0.0127,
"step": 14760
},
{
"epoch": 9.723502304147466,
"grad_norm": 0.2788906693458557,
"learning_rate": 9.741350051981042e-05,
"loss": 0.0241,
"step": 14770
},
{
"epoch": 9.730085582620145,
"grad_norm": 0.2301618754863739,
"learning_rate": 9.740824874042633e-05,
"loss": 0.013,
"step": 14780
},
{
"epoch": 9.736668861092824,
"grad_norm": 0.23938624560832977,
"learning_rate": 9.740299177654746e-05,
"loss": 0.019,
"step": 14790
},
{
"epoch": 9.743252139565504,
"grad_norm": 0.24052244424819946,
"learning_rate": 9.739772962874867e-05,
"loss": 0.0119,
"step": 14800
},
{
"epoch": 9.749835418038183,
"grad_norm": 0.24570004642009735,
"learning_rate": 9.739246229760541e-05,
"loss": 0.0091,
"step": 14810
},
{
"epoch": 9.756418696510863,
"grad_norm": 0.29425832629203796,
"learning_rate": 9.738718978369376e-05,
"loss": 0.0141,
"step": 14820
},
{
"epoch": 9.763001974983542,
"grad_norm": 0.22018343210220337,
"learning_rate": 9.738191208759025e-05,
"loss": 0.0177,
"step": 14830
},
{
"epoch": 9.76958525345622,
"grad_norm": 0.25019919872283936,
"learning_rate": 9.73766292098721e-05,
"loss": 0.0169,
"step": 14840
},
{
"epoch": 9.776168531928901,
"grad_norm": 0.19468551874160767,
"learning_rate": 9.737134115111699e-05,
"loss": 0.013,
"step": 14850
},
{
"epoch": 9.78275181040158,
"grad_norm": 0.26408490538597107,
"learning_rate": 9.736604791190323e-05,
"loss": 0.0181,
"step": 14860
},
{
"epoch": 9.78933508887426,
"grad_norm": 0.2587328255176544,
"learning_rate": 9.73607494928097e-05,
"loss": 0.019,
"step": 14870
},
{
"epoch": 9.795918367346939,
"grad_norm": 0.23688548803329468,
"learning_rate": 9.735544589441581e-05,
"loss": 0.0159,
"step": 14880
},
{
"epoch": 9.802501645819618,
"grad_norm": 0.2512758672237396,
"learning_rate": 9.735013711730154e-05,
"loss": 0.016,
"step": 14890
},
{
"epoch": 9.809084924292298,
"grad_norm": 0.26726773381233215,
"learning_rate": 9.734482316204747e-05,
"loss": 0.0117,
"step": 14900
},
{
"epoch": 9.815668202764977,
"grad_norm": 0.24948666989803314,
"learning_rate": 9.733950402923473e-05,
"loss": 0.0106,
"step": 14910
},
{
"epoch": 9.822251481237656,
"grad_norm": 0.3272800147533417,
"learning_rate": 9.7334179719445e-05,
"loss": 0.0238,
"step": 14920
},
{
"epoch": 9.828834759710336,
"grad_norm": 0.27654412388801575,
"learning_rate": 9.732885023326053e-05,
"loss": 0.0166,
"step": 14930
},
{
"epoch": 9.835418038183015,
"grad_norm": 0.32585039734840393,
"learning_rate": 9.732351557126418e-05,
"loss": 0.0159,
"step": 14940
},
{
"epoch": 9.842001316655695,
"grad_norm": 0.35041555762290955,
"learning_rate": 9.731817573403929e-05,
"loss": 0.0205,
"step": 14950
},
{
"epoch": 9.848584595128374,
"grad_norm": 0.38426727056503296,
"learning_rate": 9.731283072216985e-05,
"loss": 0.0141,
"step": 14960
},
{
"epoch": 9.855167873601053,
"grad_norm": 0.29917630553245544,
"learning_rate": 9.730748053624039e-05,
"loss": 0.0207,
"step": 14970
},
{
"epoch": 9.861751152073733,
"grad_norm": 0.21876105666160583,
"learning_rate": 9.730212517683598e-05,
"loss": 0.0148,
"step": 14980
},
{
"epoch": 9.868334430546412,
"grad_norm": 0.2989797294139862,
"learning_rate": 9.729676464454228e-05,
"loss": 0.0148,
"step": 14990
},
{
"epoch": 9.874917709019092,
"grad_norm": 0.22588540613651276,
"learning_rate": 9.72913989399455e-05,
"loss": 0.015,
"step": 15000
},
{
"epoch": 9.881500987491771,
"grad_norm": 0.28132230043411255,
"learning_rate": 9.728602806363242e-05,
"loss": 0.0153,
"step": 15010
},
{
"epoch": 9.88808426596445,
"grad_norm": 0.3104924261569977,
"learning_rate": 9.728065201619043e-05,
"loss": 0.0131,
"step": 15020
},
{
"epoch": 9.89466754443713,
"grad_norm": 0.2443353831768036,
"learning_rate": 9.727527079820742e-05,
"loss": 0.0198,
"step": 15030
},
{
"epoch": 9.901250822909809,
"grad_norm": 0.3172476589679718,
"learning_rate": 9.726988441027186e-05,
"loss": 0.0147,
"step": 15040
},
{
"epoch": 9.907834101382488,
"grad_norm": 0.3207993507385254,
"learning_rate": 9.726449285297281e-05,
"loss": 0.0149,
"step": 15050
},
{
"epoch": 9.914417379855168,
"grad_norm": 0.3613393306732178,
"learning_rate": 9.72590961268999e-05,
"loss": 0.0208,
"step": 15060
},
{
"epoch": 9.921000658327847,
"grad_norm": 0.3080381155014038,
"learning_rate": 9.725369423264328e-05,
"loss": 0.0155,
"step": 15070
},
{
"epoch": 9.927583936800527,
"grad_norm": 0.24589434266090393,
"learning_rate": 9.72482871707937e-05,
"loss": 0.0126,
"step": 15080
},
{
"epoch": 9.934167215273206,
"grad_norm": 0.191184401512146,
"learning_rate": 9.724287494194247e-05,
"loss": 0.0138,
"step": 15090
},
{
"epoch": 9.940750493745885,
"grad_norm": 0.30196091532707214,
"learning_rate": 9.723745754668147e-05,
"loss": 0.0144,
"step": 15100
},
{
"epoch": 9.947333772218565,
"grad_norm": 0.22865305840969086,
"learning_rate": 9.723203498560313e-05,
"loss": 0.0125,
"step": 15110
},
{
"epoch": 9.953917050691244,
"grad_norm": 0.2847011983394623,
"learning_rate": 9.722660725930046e-05,
"loss": 0.0153,
"step": 15120
},
{
"epoch": 9.960500329163924,
"grad_norm": 0.22358070313930511,
"learning_rate": 9.722117436836702e-05,
"loss": 0.0106,
"step": 15130
},
{
"epoch": 9.967083607636603,
"grad_norm": 0.4344227910041809,
"learning_rate": 9.721573631339696e-05,
"loss": 0.0212,
"step": 15140
},
{
"epoch": 9.973666886109282,
"grad_norm": 0.4170410633087158,
"learning_rate": 9.721029309498494e-05,
"loss": 0.0156,
"step": 15150
},
{
"epoch": 9.980250164581962,
"grad_norm": 0.30752262473106384,
"learning_rate": 9.720484471372627e-05,
"loss": 0.0231,
"step": 15160
},
{
"epoch": 9.98683344305464,
"grad_norm": 0.2725664973258972,
"learning_rate": 9.719939117021673e-05,
"loss": 0.0166,
"step": 15170
},
{
"epoch": 9.993416721527321,
"grad_norm": 0.2802931070327759,
"learning_rate": 9.719393246505275e-05,
"loss": 0.0158,
"step": 15180
},
{
"epoch": 10.0,
"grad_norm": 0.37909621000289917,
"learning_rate": 9.718846859883128e-05,
"loss": 0.0176,
"step": 15190
},
{
"epoch": 10.006583278472679,
"grad_norm": 0.19113107025623322,
"learning_rate": 9.718299957214982e-05,
"loss": 0.018,
"step": 15200
},
{
"epoch": 10.01316655694536,
"grad_norm": 0.394198477268219,
"learning_rate": 9.717752538560646e-05,
"loss": 0.0144,
"step": 15210
},
{
"epoch": 10.019749835418038,
"grad_norm": 0.30720144510269165,
"learning_rate": 9.717204603979986e-05,
"loss": 0.0153,
"step": 15220
},
{
"epoch": 10.026333113890718,
"grad_norm": 0.27015867829322815,
"learning_rate": 9.716656153532922e-05,
"loss": 0.0148,
"step": 15230
},
{
"epoch": 10.032916392363397,
"grad_norm": 0.20855754613876343,
"learning_rate": 9.716107187279434e-05,
"loss": 0.0141,
"step": 15240
},
{
"epoch": 10.039499670836076,
"grad_norm": 0.29502955079078674,
"learning_rate": 9.715557705279555e-05,
"loss": 0.0169,
"step": 15250
},
{
"epoch": 10.046082949308756,
"grad_norm": 0.3268682360649109,
"learning_rate": 9.715007707593372e-05,
"loss": 0.0156,
"step": 15260
},
{
"epoch": 10.052666227781435,
"grad_norm": 0.27503931522369385,
"learning_rate": 9.714457194281036e-05,
"loss": 0.0187,
"step": 15270
},
{
"epoch": 10.059249506254115,
"grad_norm": 0.3133302628993988,
"learning_rate": 9.713906165402751e-05,
"loss": 0.0138,
"step": 15280
},
{
"epoch": 10.065832784726794,
"grad_norm": 0.2011304795742035,
"learning_rate": 9.713354621018774e-05,
"loss": 0.0183,
"step": 15290
},
{
"epoch": 10.072416063199473,
"grad_norm": 0.40640631318092346,
"learning_rate": 9.712802561189422e-05,
"loss": 0.0153,
"step": 15300
},
{
"epoch": 10.078999341672153,
"grad_norm": 0.2884555459022522,
"learning_rate": 9.712249985975069e-05,
"loss": 0.0154,
"step": 15310
},
{
"epoch": 10.085582620144832,
"grad_norm": 0.2679706811904907,
"learning_rate": 9.71169689543614e-05,
"loss": 0.0151,
"step": 15320
},
{
"epoch": 10.09216589861751,
"grad_norm": 0.33826032280921936,
"learning_rate": 9.711143289633123e-05,
"loss": 0.0173,
"step": 15330
},
{
"epoch": 10.098749177090191,
"grad_norm": 0.30218732357025146,
"learning_rate": 9.710589168626561e-05,
"loss": 0.0123,
"step": 15340
},
{
"epoch": 10.10533245556287,
"grad_norm": 0.20553290843963623,
"learning_rate": 9.710034532477048e-05,
"loss": 0.0147,
"step": 15350
},
{
"epoch": 10.11191573403555,
"grad_norm": 0.3158141076564789,
"learning_rate": 9.709479381245239e-05,
"loss": 0.0135,
"step": 15360
},
{
"epoch": 10.118499012508229,
"grad_norm": 0.22012987732887268,
"learning_rate": 9.708923714991847e-05,
"loss": 0.015,
"step": 15370
},
{
"epoch": 10.125082290980908,
"grad_norm": 0.3112947344779968,
"learning_rate": 9.708367533777638e-05,
"loss": 0.0122,
"step": 15380
},
{
"epoch": 10.131665569453588,
"grad_norm": 0.40745672583580017,
"learning_rate": 9.707810837663431e-05,
"loss": 0.0142,
"step": 15390
},
{
"epoch": 10.138248847926267,
"grad_norm": 0.24147891998291016,
"learning_rate": 9.707253626710113e-05,
"loss": 0.0156,
"step": 15400
},
{
"epoch": 10.144832126398947,
"grad_norm": 0.266966313123703,
"learning_rate": 9.706695900978613e-05,
"loss": 0.0147,
"step": 15410
},
{
"epoch": 10.151415404871626,
"grad_norm": 0.323513388633728,
"learning_rate": 9.706137660529926e-05,
"loss": 0.0174,
"step": 15420
},
{
"epoch": 10.157998683344305,
"grad_norm": 0.3009984493255615,
"learning_rate": 9.705578905425101e-05,
"loss": 0.0141,
"step": 15430
},
{
"epoch": 10.164581961816985,
"grad_norm": 0.2053128182888031,
"learning_rate": 9.705019635725241e-05,
"loss": 0.0134,
"step": 15440
},
{
"epoch": 10.171165240289664,
"grad_norm": 0.24102552235126495,
"learning_rate": 9.704459851491508e-05,
"loss": 0.0163,
"step": 15450
},
{
"epoch": 10.177748518762344,
"grad_norm": 0.17827284336090088,
"learning_rate": 9.703899552785118e-05,
"loss": 0.0163,
"step": 15460
},
{
"epoch": 10.184331797235023,
"grad_norm": 0.2356404811143875,
"learning_rate": 9.703338739667346e-05,
"loss": 0.0116,
"step": 15470
},
{
"epoch": 10.190915075707702,
"grad_norm": 0.2633216977119446,
"learning_rate": 9.70277741219952e-05,
"loss": 0.016,
"step": 15480
},
{
"epoch": 10.197498354180382,
"grad_norm": 0.3446008563041687,
"learning_rate": 9.702215570443027e-05,
"loss": 0.0118,
"step": 15490
},
{
"epoch": 10.204081632653061,
"grad_norm": 0.21019017696380615,
"learning_rate": 9.701653214459309e-05,
"loss": 0.0145,
"step": 15500
},
{
"epoch": 10.210664911125741,
"grad_norm": 0.21062102913856506,
"learning_rate": 9.701090344309865e-05,
"loss": 0.0116,
"step": 15510
},
{
"epoch": 10.21724818959842,
"grad_norm": 0.2719181478023529,
"learning_rate": 9.700526960056247e-05,
"loss": 0.0137,
"step": 15520
},
{
"epoch": 10.223831468071099,
"grad_norm": 0.31443876028060913,
"learning_rate": 9.699963061760068e-05,
"loss": 0.0091,
"step": 15530
},
{
"epoch": 10.23041474654378,
"grad_norm": 0.31492510437965393,
"learning_rate": 9.699398649482997e-05,
"loss": 0.0133,
"step": 15540
},
{
"epoch": 10.236998025016458,
"grad_norm": 0.3109484016895294,
"learning_rate": 9.698833723286753e-05,
"loss": 0.0172,
"step": 15550
},
{
"epoch": 10.243581303489137,
"grad_norm": 0.32065045833587646,
"learning_rate": 9.698268283233118e-05,
"loss": 0.016,
"step": 15560
},
{
"epoch": 10.250164581961817,
"grad_norm": 0.28147438168525696,
"learning_rate": 9.697702329383929e-05,
"loss": 0.0143,
"step": 15570
},
{
"epoch": 10.256747860434496,
"grad_norm": 0.26053377985954285,
"learning_rate": 9.697135861801074e-05,
"loss": 0.0121,
"step": 15580
},
{
"epoch": 10.263331138907176,
"grad_norm": 0.3274065852165222,
"learning_rate": 9.696568880546505e-05,
"loss": 0.0198,
"step": 15590
},
{
"epoch": 10.269914417379855,
"grad_norm": 0.2635926604270935,
"learning_rate": 9.696001385682223e-05,
"loss": 0.0109,
"step": 15600
},
{
"epoch": 10.276497695852534,
"grad_norm": 0.20655913650989532,
"learning_rate": 9.695433377270291e-05,
"loss": 0.0145,
"step": 15610
},
{
"epoch": 10.283080974325214,
"grad_norm": 0.27768421173095703,
"learning_rate": 9.694864855372824e-05,
"loss": 0.0179,
"step": 15620
},
{
"epoch": 10.289664252797893,
"grad_norm": 0.24878139793872833,
"learning_rate": 9.694295820051995e-05,
"loss": 0.0245,
"step": 15630
},
{
"epoch": 10.296247531270573,
"grad_norm": 0.30092543363571167,
"learning_rate": 9.693726271370032e-05,
"loss": 0.0148,
"step": 15640
},
{
"epoch": 10.302830809743252,
"grad_norm": 0.30754855275154114,
"learning_rate": 9.693156209389221e-05,
"loss": 0.0203,
"step": 15650
},
{
"epoch": 10.30941408821593,
"grad_norm": 0.2890348434448242,
"learning_rate": 9.692585634171905e-05,
"loss": 0.024,
"step": 15660
},
{
"epoch": 10.315997366688611,
"grad_norm": 0.2974121868610382,
"learning_rate": 9.692014545780476e-05,
"loss": 0.0137,
"step": 15670
},
{
"epoch": 10.32258064516129,
"grad_norm": 0.2702404856681824,
"learning_rate": 9.691442944277393e-05,
"loss": 0.0174,
"step": 15680
},
{
"epoch": 10.32916392363397,
"grad_norm": 0.30624863505363464,
"learning_rate": 9.690870829725162e-05,
"loss": 0.0154,
"step": 15690
},
{
"epoch": 10.33574720210665,
"grad_norm": 0.3077887296676636,
"learning_rate": 9.69029820218635e-05,
"loss": 0.0147,
"step": 15700
},
{
"epoch": 10.342330480579328,
"grad_norm": 0.3283807337284088,
"learning_rate": 9.689725061723579e-05,
"loss": 0.0144,
"step": 15710
},
{
"epoch": 10.348913759052008,
"grad_norm": 0.24190741777420044,
"learning_rate": 9.689151408399527e-05,
"loss": 0.017,
"step": 15720
},
{
"epoch": 10.355497037524687,
"grad_norm": 0.22236356139183044,
"learning_rate": 9.688577242276924e-05,
"loss": 0.0168,
"step": 15730
},
{
"epoch": 10.362080315997368,
"grad_norm": 0.1556847095489502,
"learning_rate": 9.688002563418566e-05,
"loss": 0.0151,
"step": 15740
},
{
"epoch": 10.368663594470046,
"grad_norm": 0.2426396906375885,
"learning_rate": 9.687427371887293e-05,
"loss": 0.018,
"step": 15750
},
{
"epoch": 10.375246872942725,
"grad_norm": 0.34557875990867615,
"learning_rate": 9.686851667746012e-05,
"loss": 0.016,
"step": 15760
},
{
"epoch": 10.381830151415405,
"grad_norm": 0.2825423777103424,
"learning_rate": 9.686275451057677e-05,
"loss": 0.0169,
"step": 15770
},
{
"epoch": 10.388413429888084,
"grad_norm": 0.30879515409469604,
"learning_rate": 9.685698721885308e-05,
"loss": 0.0148,
"step": 15780
},
{
"epoch": 10.394996708360763,
"grad_norm": 0.36233267188072205,
"learning_rate": 9.68512148029197e-05,
"loss": 0.0195,
"step": 15790
},
{
"epoch": 10.401579986833443,
"grad_norm": 0.2731311023235321,
"learning_rate": 9.684543726340791e-05,
"loss": 0.0126,
"step": 15800
},
{
"epoch": 10.408163265306122,
"grad_norm": 0.3801662027835846,
"learning_rate": 9.683965460094952e-05,
"loss": 0.0205,
"step": 15810
},
{
"epoch": 10.414746543778802,
"grad_norm": 0.23723062872886658,
"learning_rate": 9.683386681617694e-05,
"loss": 0.0159,
"step": 15820
},
{
"epoch": 10.421329822251481,
"grad_norm": 0.24990543723106384,
"learning_rate": 9.68280739097231e-05,
"loss": 0.0163,
"step": 15830
},
{
"epoch": 10.42791310072416,
"grad_norm": 0.18654191493988037,
"learning_rate": 9.682227588222148e-05,
"loss": 0.0142,
"step": 15840
},
{
"epoch": 10.43449637919684,
"grad_norm": 0.25609639286994934,
"learning_rate": 9.681647273430618e-05,
"loss": 0.0116,
"step": 15850
},
{
"epoch": 10.441079657669519,
"grad_norm": 0.31502842903137207,
"learning_rate": 9.681066446661182e-05,
"loss": 0.0193,
"step": 15860
},
{
"epoch": 10.4476629361422,
"grad_norm": 0.1982906609773636,
"learning_rate": 9.680485107977357e-05,
"loss": 0.0145,
"step": 15870
},
{
"epoch": 10.454246214614878,
"grad_norm": 0.3008141815662384,
"learning_rate": 9.679903257442716e-05,
"loss": 0.0148,
"step": 15880
},
{
"epoch": 10.460829493087557,
"grad_norm": 0.276407390832901,
"learning_rate": 9.679320895120891e-05,
"loss": 0.0198,
"step": 15890
},
{
"epoch": 10.467412771560237,
"grad_norm": 0.2045586258172989,
"learning_rate": 9.67873802107557e-05,
"loss": 0.0199,
"step": 15900
},
{
"epoch": 10.473996050032916,
"grad_norm": 0.33652809262275696,
"learning_rate": 9.67815463537049e-05,
"loss": 0.0197,
"step": 15910
},
{
"epoch": 10.480579328505597,
"grad_norm": 0.2981519401073456,
"learning_rate": 9.677570738069457e-05,
"loss": 0.0228,
"step": 15920
},
{
"epoch": 10.487162606978275,
"grad_norm": 0.24559056758880615,
"learning_rate": 9.676986329236318e-05,
"loss": 0.0158,
"step": 15930
},
{
"epoch": 10.493745885450954,
"grad_norm": 0.3628149926662445,
"learning_rate": 9.676401408934987e-05,
"loss": 0.0179,
"step": 15940
},
{
"epoch": 10.500329163923634,
"grad_norm": 0.319520503282547,
"learning_rate": 9.675815977229428e-05,
"loss": 0.0219,
"step": 15950
},
{
"epoch": 10.506912442396313,
"grad_norm": 0.28974470496177673,
"learning_rate": 9.675230034183664e-05,
"loss": 0.0262,
"step": 15960
},
{
"epoch": 10.513495720868992,
"grad_norm": 0.37616097927093506,
"learning_rate": 9.674643579861773e-05,
"loss": 0.0148,
"step": 15970
},
{
"epoch": 10.520078999341672,
"grad_norm": 0.2945089340209961,
"learning_rate": 9.674056614327886e-05,
"loss": 0.0215,
"step": 15980
},
{
"epoch": 10.526662277814351,
"grad_norm": 0.31004783511161804,
"learning_rate": 9.673469137646198e-05,
"loss": 0.0136,
"step": 15990
},
{
"epoch": 10.533245556287032,
"grad_norm": 0.24885942041873932,
"learning_rate": 9.67288114988095e-05,
"loss": 0.0186,
"step": 16000
},
{
"epoch": 10.53982883475971,
"grad_norm": 0.2841681241989136,
"learning_rate": 9.672292651096447e-05,
"loss": 0.02,
"step": 16010
},
{
"epoch": 10.546412113232389,
"grad_norm": 0.23327705264091492,
"learning_rate": 9.671703641357042e-05,
"loss": 0.0161,
"step": 16020
},
{
"epoch": 10.55299539170507,
"grad_norm": 0.2775796949863434,
"learning_rate": 9.67111412072715e-05,
"loss": 0.0176,
"step": 16030
},
{
"epoch": 10.559578670177748,
"grad_norm": 0.28706464171409607,
"learning_rate": 9.670524089271242e-05,
"loss": 0.0139,
"step": 16040
},
{
"epoch": 10.566161948650429,
"grad_norm": 0.2590261995792389,
"learning_rate": 9.669933547053842e-05,
"loss": 0.0181,
"step": 16050
},
{
"epoch": 10.572745227123107,
"grad_norm": 0.26913416385650635,
"learning_rate": 9.669342494139531e-05,
"loss": 0.0146,
"step": 16060
},
{
"epoch": 10.579328505595786,
"grad_norm": 0.2555517256259918,
"learning_rate": 9.668750930592943e-05,
"loss": 0.0146,
"step": 16070
},
{
"epoch": 10.585911784068466,
"grad_norm": 0.2189902365207672,
"learning_rate": 9.668158856478775e-05,
"loss": 0.0111,
"step": 16080
},
{
"epoch": 10.592495062541145,
"grad_norm": 0.22816002368927002,
"learning_rate": 9.66756627186177e-05,
"loss": 0.0132,
"step": 16090
},
{
"epoch": 10.599078341013826,
"grad_norm": 0.25368914008140564,
"learning_rate": 9.666973176806737e-05,
"loss": 0.0118,
"step": 16100
},
{
"epoch": 10.605661619486504,
"grad_norm": 0.27892035245895386,
"learning_rate": 9.666379571378534e-05,
"loss": 0.0154,
"step": 16110
},
{
"epoch": 10.612244897959183,
"grad_norm": 0.21458196640014648,
"learning_rate": 9.665785455642076e-05,
"loss": 0.0153,
"step": 16120
},
{
"epoch": 10.618828176431863,
"grad_norm": 0.23175175487995148,
"learning_rate": 9.665190829662337e-05,
"loss": 0.0174,
"step": 16130
},
{
"epoch": 10.625411454904542,
"grad_norm": 0.16731104254722595,
"learning_rate": 9.664595693504342e-05,
"loss": 0.0101,
"step": 16140
},
{
"epoch": 10.631994733377223,
"grad_norm": 0.19103018939495087,
"learning_rate": 9.664000047233175e-05,
"loss": 0.0139,
"step": 16150
},
{
"epoch": 10.638578011849901,
"grad_norm": 0.23936223983764648,
"learning_rate": 9.663403890913976e-05,
"loss": 0.0152,
"step": 16160
},
{
"epoch": 10.64516129032258,
"grad_norm": 0.2621452212333679,
"learning_rate": 9.662807224611938e-05,
"loss": 0.0134,
"step": 16170
},
{
"epoch": 10.65174456879526,
"grad_norm": 0.27436408400535583,
"learning_rate": 9.662210048392311e-05,
"loss": 0.0126,
"step": 16180
},
{
"epoch": 10.65832784726794,
"grad_norm": 0.19598782062530518,
"learning_rate": 9.661612362320405e-05,
"loss": 0.014,
"step": 16190
},
{
"epoch": 10.66491112574062,
"grad_norm": 0.2814145088195801,
"learning_rate": 9.661014166461579e-05,
"loss": 0.0124,
"step": 16200
},
{
"epoch": 10.671494404213298,
"grad_norm": 0.25364986062049866,
"learning_rate": 9.66041546088125e-05,
"loss": 0.0144,
"step": 16210
},
{
"epoch": 10.678077682685977,
"grad_norm": 0.3888259530067444,
"learning_rate": 9.659816245644895e-05,
"loss": 0.0156,
"step": 16220
},
{
"epoch": 10.684660961158658,
"grad_norm": 0.3312648832798004,
"learning_rate": 9.65921652081804e-05,
"loss": 0.0136,
"step": 16230
},
{
"epoch": 10.691244239631336,
"grad_norm": 0.290276437997818,
"learning_rate": 9.658616286466271e-05,
"loss": 0.016,
"step": 16240
},
{
"epoch": 10.697827518104015,
"grad_norm": 0.2101401835680008,
"learning_rate": 9.65801554265523e-05,
"loss": 0.0142,
"step": 16250
},
{
"epoch": 10.704410796576695,
"grad_norm": 0.14616473019123077,
"learning_rate": 9.657414289450612e-05,
"loss": 0.0115,
"step": 16260
},
{
"epoch": 10.710994075049374,
"grad_norm": 0.37345191836357117,
"learning_rate": 9.656812526918171e-05,
"loss": 0.0127,
"step": 16270
},
{
"epoch": 10.717577353522055,
"grad_norm": 0.24671819806098938,
"learning_rate": 9.656210255123712e-05,
"loss": 0.0188,
"step": 16280
},
{
"epoch": 10.724160631994733,
"grad_norm": 0.2507968246936798,
"learning_rate": 9.6556074741331e-05,
"loss": 0.0147,
"step": 16290
},
{
"epoch": 10.730743910467412,
"grad_norm": 0.32088175415992737,
"learning_rate": 9.655004184012256e-05,
"loss": 0.0142,
"step": 16300
},
{
"epoch": 10.737327188940093,
"grad_norm": 0.19999663531780243,
"learning_rate": 9.654400384827152e-05,
"loss": 0.0158,
"step": 16310
},
{
"epoch": 10.743910467412771,
"grad_norm": 0.24240942299365997,
"learning_rate": 9.653796076643818e-05,
"loss": 0.0129,
"step": 16320
},
{
"epoch": 10.750493745885452,
"grad_norm": 0.3254489302635193,
"learning_rate": 9.653191259528344e-05,
"loss": 0.0124,
"step": 16330
},
{
"epoch": 10.75707702435813,
"grad_norm": 0.3790977895259857,
"learning_rate": 9.65258593354687e-05,
"loss": 0.0162,
"step": 16340
},
{
"epoch": 10.763660302830809,
"grad_norm": 0.2869807481765747,
"learning_rate": 9.651980098765591e-05,
"loss": 0.0168,
"step": 16350
},
{
"epoch": 10.77024358130349,
"grad_norm": 0.190029114484787,
"learning_rate": 9.651373755250765e-05,
"loss": 0.015,
"step": 16360
},
{
"epoch": 10.776826859776168,
"grad_norm": 0.21167069673538208,
"learning_rate": 9.650766903068697e-05,
"loss": 0.0166,
"step": 16370
},
{
"epoch": 10.783410138248849,
"grad_norm": 0.19293388724327087,
"learning_rate": 9.650159542285753e-05,
"loss": 0.0133,
"step": 16380
},
{
"epoch": 10.789993416721527,
"grad_norm": 0.29362258315086365,
"learning_rate": 9.649551672968353e-05,
"loss": 0.0176,
"step": 16390
},
{
"epoch": 10.796576695194206,
"grad_norm": 0.3098928928375244,
"learning_rate": 9.648943295182973e-05,
"loss": 0.0124,
"step": 16400
},
{
"epoch": 10.803159973666887,
"grad_norm": 0.28814199566841125,
"learning_rate": 9.648334408996144e-05,
"loss": 0.0131,
"step": 16410
},
{
"epoch": 10.809743252139565,
"grad_norm": 0.23959976434707642,
"learning_rate": 9.647725014474452e-05,
"loss": 0.0179,
"step": 16420
},
{
"epoch": 10.816326530612244,
"grad_norm": 0.2196599245071411,
"learning_rate": 9.64711511168454e-05,
"loss": 0.0183,
"step": 16430
},
{
"epoch": 10.822909809084925,
"grad_norm": 0.16412965953350067,
"learning_rate": 9.646504700693108e-05,
"loss": 0.0116,
"step": 16440
},
{
"epoch": 10.829493087557603,
"grad_norm": 0.21654629707336426,
"learning_rate": 9.645893781566907e-05,
"loss": 0.0145,
"step": 16450
},
{
"epoch": 10.836076366030284,
"grad_norm": 0.2961553931236267,
"learning_rate": 9.645282354372744e-05,
"loss": 0.0169,
"step": 16460
},
{
"epoch": 10.842659644502962,
"grad_norm": 0.22558678686618805,
"learning_rate": 9.644670419177491e-05,
"loss": 0.0111,
"step": 16470
},
{
"epoch": 10.849242922975641,
"grad_norm": 0.2290339469909668,
"learning_rate": 9.644057976048062e-05,
"loss": 0.0142,
"step": 16480
},
{
"epoch": 10.855826201448322,
"grad_norm": 0.29076334834098816,
"learning_rate": 9.643445025051435e-05,
"loss": 0.0133,
"step": 16490
},
{
"epoch": 10.862409479921,
"grad_norm": 0.2640547454357147,
"learning_rate": 9.642831566254641e-05,
"loss": 0.0141,
"step": 16500
},
{
"epoch": 10.86899275839368,
"grad_norm": 0.2584684491157532,
"learning_rate": 9.642217599724769e-05,
"loss": 0.0134,
"step": 16510
},
{
"epoch": 10.87557603686636,
"grad_norm": 0.20886312425136566,
"learning_rate": 9.64160312552896e-05,
"loss": 0.013,
"step": 16520
},
{
"epoch": 10.882159315339038,
"grad_norm": 0.3242923617362976,
"learning_rate": 9.64098814373441e-05,
"loss": 0.0203,
"step": 16530
},
{
"epoch": 10.888742593811719,
"grad_norm": 0.3009207844734192,
"learning_rate": 9.640372654408374e-05,
"loss": 0.0239,
"step": 16540
},
{
"epoch": 10.895325872284397,
"grad_norm": 0.24736428260803223,
"learning_rate": 9.639756657618162e-05,
"loss": 0.0181,
"step": 16550
},
{
"epoch": 10.901909150757078,
"grad_norm": 0.202747642993927,
"learning_rate": 9.639140153431138e-05,
"loss": 0.0132,
"step": 16560
},
{
"epoch": 10.908492429229756,
"grad_norm": 0.2655058801174164,
"learning_rate": 9.638523141914721e-05,
"loss": 0.0179,
"step": 16570
},
{
"epoch": 10.915075707702435,
"grad_norm": 0.26304811239242554,
"learning_rate": 9.637905623136388e-05,
"loss": 0.0114,
"step": 16580
},
{
"epoch": 10.921658986175116,
"grad_norm": 0.305568665266037,
"learning_rate": 9.637287597163669e-05,
"loss": 0.0132,
"step": 16590
},
{
"epoch": 10.928242264647794,
"grad_norm": 0.20936600863933563,
"learning_rate": 9.63666906406415e-05,
"loss": 0.01,
"step": 16600
},
{
"epoch": 10.934825543120475,
"grad_norm": 0.30186283588409424,
"learning_rate": 9.636050023905473e-05,
"loss": 0.0146,
"step": 16610
},
{
"epoch": 10.941408821593154,
"grad_norm": 0.30084389448165894,
"learning_rate": 9.635430476755336e-05,
"loss": 0.0221,
"step": 16620
},
{
"epoch": 10.947992100065832,
"grad_norm": 0.2260257601737976,
"learning_rate": 9.63481042268149e-05,
"loss": 0.0172,
"step": 16630
},
{
"epoch": 10.954575378538513,
"grad_norm": 0.32619011402130127,
"learning_rate": 9.634189861751745e-05,
"loss": 0.0151,
"step": 16640
},
{
"epoch": 10.961158657011191,
"grad_norm": 0.327972412109375,
"learning_rate": 9.633568794033967e-05,
"loss": 0.015,
"step": 16650
},
{
"epoch": 10.967741935483872,
"grad_norm": 0.283176988363266,
"learning_rate": 9.63294721959607e-05,
"loss": 0.0181,
"step": 16660
},
{
"epoch": 10.97432521395655,
"grad_norm": 0.2560659646987915,
"learning_rate": 9.63232513850603e-05,
"loss": 0.014,
"step": 16670
},
{
"epoch": 10.98090849242923,
"grad_norm": 0.23773403465747833,
"learning_rate": 9.631702550831878e-05,
"loss": 0.0117,
"step": 16680
},
{
"epoch": 10.98749177090191,
"grad_norm": 0.2701393961906433,
"learning_rate": 9.631079456641698e-05,
"loss": 0.0166,
"step": 16690
},
{
"epoch": 10.994075049374588,
"grad_norm": 0.28061920404434204,
"learning_rate": 9.630455856003632e-05,
"loss": 0.0129,
"step": 16700
},
{
"epoch": 11.000658327847267,
"grad_norm": 0.26682597398757935,
"learning_rate": 9.629831748985876e-05,
"loss": 0.0126,
"step": 16710
},
{
"epoch": 11.007241606319948,
"grad_norm": 0.2745446264743805,
"learning_rate": 9.629207135656679e-05,
"loss": 0.0156,
"step": 16720
},
{
"epoch": 11.013824884792626,
"grad_norm": 0.16933688521385193,
"learning_rate": 9.628582016084353e-05,
"loss": 0.0106,
"step": 16730
},
{
"epoch": 11.020408163265307,
"grad_norm": 0.22354745864868164,
"learning_rate": 9.627956390337254e-05,
"loss": 0.0106,
"step": 16740
},
{
"epoch": 11.026991441737986,
"grad_norm": 0.28213855624198914,
"learning_rate": 9.627330258483802e-05,
"loss": 0.0133,
"step": 16750
},
{
"epoch": 11.033574720210664,
"grad_norm": 0.18230313062667847,
"learning_rate": 9.62670362059247e-05,
"loss": 0.0143,
"step": 16760
},
{
"epoch": 11.040157998683345,
"grad_norm": 0.22066256403923035,
"learning_rate": 9.626076476731786e-05,
"loss": 0.0128,
"step": 16770
},
{
"epoch": 11.046741277156023,
"grad_norm": 0.28549712896347046,
"learning_rate": 9.625448826970336e-05,
"loss": 0.0167,
"step": 16780
},
{
"epoch": 11.053324555628704,
"grad_norm": 0.32958680391311646,
"learning_rate": 9.624820671376755e-05,
"loss": 0.0126,
"step": 16790
},
{
"epoch": 11.059907834101383,
"grad_norm": 0.19280874729156494,
"learning_rate": 9.62419201001974e-05,
"loss": 0.0093,
"step": 16800
},
{
"epoch": 11.066491112574061,
"grad_norm": 0.2638837993144989,
"learning_rate": 9.623562842968037e-05,
"loss": 0.0123,
"step": 16810
},
{
"epoch": 11.073074391046742,
"grad_norm": 0.29600128531455994,
"learning_rate": 9.622933170290454e-05,
"loss": 0.0143,
"step": 16820
},
{
"epoch": 11.07965766951942,
"grad_norm": 0.23162941634655,
"learning_rate": 9.622302992055849e-05,
"loss": 0.0131,
"step": 16830
},
{
"epoch": 11.086240947992101,
"grad_norm": 0.20874431729316711,
"learning_rate": 9.62167230833314e-05,
"loss": 0.0139,
"step": 16840
},
{
"epoch": 11.09282422646478,
"grad_norm": 0.2711836099624634,
"learning_rate": 9.621041119191295e-05,
"loss": 0.0133,
"step": 16850
},
{
"epoch": 11.099407504937458,
"grad_norm": 0.2752048969268799,
"learning_rate": 9.620409424699342e-05,
"loss": 0.0147,
"step": 16860
},
{
"epoch": 11.105990783410139,
"grad_norm": 0.3085440993309021,
"learning_rate": 9.619777224926359e-05,
"loss": 0.0123,
"step": 16870
},
{
"epoch": 11.112574061882817,
"grad_norm": 0.2839735448360443,
"learning_rate": 9.619144519941485e-05,
"loss": 0.0139,
"step": 16880
},
{
"epoch": 11.119157340355496,
"grad_norm": 0.2748444676399231,
"learning_rate": 9.618511309813912e-05,
"loss": 0.0119,
"step": 16890
},
{
"epoch": 11.125740618828177,
"grad_norm": 0.3296380341053009,
"learning_rate": 9.617877594612886e-05,
"loss": 0.0137,
"step": 16900
},
{
"epoch": 11.132323897300855,
"grad_norm": 0.20475225150585175,
"learning_rate": 9.617243374407707e-05,
"loss": 0.0103,
"step": 16910
},
{
"epoch": 11.138907175773536,
"grad_norm": 0.20466870069503784,
"learning_rate": 9.616608649267736e-05,
"loss": 0.0108,
"step": 16920
},
{
"epoch": 11.145490454246215,
"grad_norm": 0.2670227289199829,
"learning_rate": 9.615973419262385e-05,
"loss": 0.0136,
"step": 16930
},
{
"epoch": 11.152073732718893,
"grad_norm": 0.2923719882965088,
"learning_rate": 9.615337684461119e-05,
"loss": 0.0113,
"step": 16940
},
{
"epoch": 11.158657011191574,
"grad_norm": 0.23016898334026337,
"learning_rate": 9.614701444933465e-05,
"loss": 0.0129,
"step": 16950
},
{
"epoch": 11.165240289664252,
"grad_norm": 0.33124321699142456,
"learning_rate": 9.614064700748997e-05,
"loss": 0.0239,
"step": 16960
},
{
"epoch": 11.171823568136933,
"grad_norm": 0.21006736159324646,
"learning_rate": 9.613427451977352e-05,
"loss": 0.0114,
"step": 16970
},
{
"epoch": 11.178406846609612,
"grad_norm": 0.29312625527381897,
"learning_rate": 9.612789698688216e-05,
"loss": 0.0194,
"step": 16980
},
{
"epoch": 11.18499012508229,
"grad_norm": 0.236184760928154,
"learning_rate": 9.612151440951334e-05,
"loss": 0.0112,
"step": 16990
},
{
"epoch": 11.19157340355497,
"grad_norm": 0.2833174169063568,
"learning_rate": 9.611512678836506e-05,
"loss": 0.0132,
"step": 17000
},
{
"epoch": 11.19815668202765,
"grad_norm": 0.19580461084842682,
"learning_rate": 9.610873412413584e-05,
"loss": 0.0156,
"step": 17010
},
{
"epoch": 11.20473996050033,
"grad_norm": 0.1716928333044052,
"learning_rate": 9.610233641752476e-05,
"loss": 0.0156,
"step": 17020
},
{
"epoch": 11.211323238973009,
"grad_norm": 0.2890176475048065,
"learning_rate": 9.609593366923151e-05,
"loss": 0.0131,
"step": 17030
},
{
"epoch": 11.217906517445687,
"grad_norm": 0.2618769705295563,
"learning_rate": 9.608952587995625e-05,
"loss": 0.0146,
"step": 17040
},
{
"epoch": 11.224489795918368,
"grad_norm": 0.18812201917171478,
"learning_rate": 9.608311305039972e-05,
"loss": 0.0116,
"step": 17050
},
{
"epoch": 11.231073074391047,
"grad_norm": 0.26689669489860535,
"learning_rate": 9.607669518126326e-05,
"loss": 0.0122,
"step": 17060
},
{
"epoch": 11.237656352863727,
"grad_norm": 0.35730162262916565,
"learning_rate": 9.607027227324866e-05,
"loss": 0.0112,
"step": 17070
},
{
"epoch": 11.244239631336406,
"grad_norm": 0.2858322858810425,
"learning_rate": 9.606384432705837e-05,
"loss": 0.0135,
"step": 17080
},
{
"epoch": 11.250822909809084,
"grad_norm": 0.302531361579895,
"learning_rate": 9.60574113433953e-05,
"loss": 0.0178,
"step": 17090
},
{
"epoch": 11.257406188281765,
"grad_norm": 0.2280137985944748,
"learning_rate": 9.6050973322963e-05,
"loss": 0.0144,
"step": 17100
},
{
"epoch": 11.263989466754444,
"grad_norm": 0.2035662978887558,
"learning_rate": 9.604453026646547e-05,
"loss": 0.0125,
"step": 17110
},
{
"epoch": 11.270572745227122,
"grad_norm": 0.24414938688278198,
"learning_rate": 9.603808217460735e-05,
"loss": 0.0119,
"step": 17120
},
{
"epoch": 11.277156023699803,
"grad_norm": 0.26986372470855713,
"learning_rate": 9.603162904809377e-05,
"loss": 0.0161,
"step": 17130
},
{
"epoch": 11.283739302172481,
"grad_norm": 0.30425772070884705,
"learning_rate": 9.602517088763045e-05,
"loss": 0.0174,
"step": 17140
},
{
"epoch": 11.290322580645162,
"grad_norm": 0.24080686271190643,
"learning_rate": 9.601870769392365e-05,
"loss": 0.0132,
"step": 17150
},
{
"epoch": 11.29690585911784,
"grad_norm": 0.26470744609832764,
"learning_rate": 9.601223946768017e-05,
"loss": 0.0148,
"step": 17160
},
{
"epoch": 11.30348913759052,
"grad_norm": 0.19979465007781982,
"learning_rate": 9.600576620960734e-05,
"loss": 0.0092,
"step": 17170
},
{
"epoch": 11.3100724160632,
"grad_norm": 0.23830321431159973,
"learning_rate": 9.599928792041308e-05,
"loss": 0.0152,
"step": 17180
},
{
"epoch": 11.316655694535878,
"grad_norm": 0.2547787129878998,
"learning_rate": 9.599280460080587e-05,
"loss": 0.0161,
"step": 17190
},
{
"epoch": 11.323238973008559,
"grad_norm": 0.45135146379470825,
"learning_rate": 9.59863162514947e-05,
"loss": 0.016,
"step": 17200
},
{
"epoch": 11.329822251481238,
"grad_norm": 0.287458211183548,
"learning_rate": 9.597982287318911e-05,
"loss": 0.0135,
"step": 17210
},
{
"epoch": 11.336405529953916,
"grad_norm": 0.3474227786064148,
"learning_rate": 9.597332446659923e-05,
"loss": 0.0159,
"step": 17220
},
{
"epoch": 11.342988808426597,
"grad_norm": 0.34872138500213623,
"learning_rate": 9.59668210324357e-05,
"loss": 0.0151,
"step": 17230
},
{
"epoch": 11.349572086899276,
"grad_norm": 0.1969517171382904,
"learning_rate": 9.596031257140974e-05,
"loss": 0.0159,
"step": 17240
},
{
"epoch": 11.356155365371956,
"grad_norm": 0.17637105286121368,
"learning_rate": 9.59537990842331e-05,
"loss": 0.0125,
"step": 17250
},
{
"epoch": 11.362738643844635,
"grad_norm": 0.31508952379226685,
"learning_rate": 9.594728057161806e-05,
"loss": 0.0144,
"step": 17260
},
{
"epoch": 11.369321922317313,
"grad_norm": 0.23208802938461304,
"learning_rate": 9.594075703427752e-05,
"loss": 0.0144,
"step": 17270
},
{
"epoch": 11.375905200789994,
"grad_norm": 0.26345616579055786,
"learning_rate": 9.593422847292486e-05,
"loss": 0.0116,
"step": 17280
},
{
"epoch": 11.382488479262673,
"grad_norm": 0.3005332052707672,
"learning_rate": 9.592769488827402e-05,
"loss": 0.0149,
"step": 17290
},
{
"epoch": 11.389071757735351,
"grad_norm": 0.29459723830223083,
"learning_rate": 9.592115628103952e-05,
"loss": 0.0157,
"step": 17300
},
{
"epoch": 11.395655036208032,
"grad_norm": 0.19516821205615997,
"learning_rate": 9.591461265193643e-05,
"loss": 0.0121,
"step": 17310
},
{
"epoch": 11.40223831468071,
"grad_norm": 0.3573923110961914,
"learning_rate": 9.590806400168032e-05,
"loss": 0.0144,
"step": 17320
},
{
"epoch": 11.408821593153391,
"grad_norm": 0.2713916003704071,
"learning_rate": 9.590151033098735e-05,
"loss": 0.0122,
"step": 17330
},
{
"epoch": 11.41540487162607,
"grad_norm": 0.28751304745674133,
"learning_rate": 9.589495164057423e-05,
"loss": 0.0162,
"step": 17340
},
{
"epoch": 11.421988150098748,
"grad_norm": 0.33724692463874817,
"learning_rate": 9.58883879311582e-05,
"loss": 0.0154,
"step": 17350
},
{
"epoch": 11.428571428571429,
"grad_norm": 0.24834242463111877,
"learning_rate": 9.588181920345705e-05,
"loss": 0.0203,
"step": 17360
},
{
"epoch": 11.435154707044108,
"grad_norm": 0.19150114059448242,
"learning_rate": 9.587524545818913e-05,
"loss": 0.012,
"step": 17370
},
{
"epoch": 11.441737985516788,
"grad_norm": 0.29528844356536865,
"learning_rate": 9.586866669607335e-05,
"loss": 0.0142,
"step": 17380
},
{
"epoch": 11.448321263989467,
"grad_norm": 0.2387654185295105,
"learning_rate": 9.586208291782915e-05,
"loss": 0.0174,
"step": 17390
},
{
"epoch": 11.454904542462145,
"grad_norm": 0.27042609453201294,
"learning_rate": 9.58554941241765e-05,
"loss": 0.0119,
"step": 17400
},
{
"epoch": 11.461487820934826,
"grad_norm": 0.2882551848888397,
"learning_rate": 9.584890031583596e-05,
"loss": 0.0123,
"step": 17410
},
{
"epoch": 11.468071099407505,
"grad_norm": 0.23587967455387115,
"learning_rate": 9.584230149352861e-05,
"loss": 0.0113,
"step": 17420
},
{
"epoch": 11.474654377880185,
"grad_norm": 0.2878916263580322,
"learning_rate": 9.58356976579761e-05,
"loss": 0.0162,
"step": 17430
},
{
"epoch": 11.481237656352864,
"grad_norm": 0.3046250343322754,
"learning_rate": 9.58290888099006e-05,
"loss": 0.0139,
"step": 17440
},
{
"epoch": 11.487820934825542,
"grad_norm": 0.2953943610191345,
"learning_rate": 9.582247495002486e-05,
"loss": 0.0161,
"step": 17450
},
{
"epoch": 11.494404213298223,
"grad_norm": 0.26791849732398987,
"learning_rate": 9.581585607907214e-05,
"loss": 0.017,
"step": 17460
},
{
"epoch": 11.500987491770902,
"grad_norm": 0.20201347768306732,
"learning_rate": 9.580923219776628e-05,
"loss": 0.0102,
"step": 17470
},
{
"epoch": 11.507570770243582,
"grad_norm": 0.24374482035636902,
"learning_rate": 9.580260330683167e-05,
"loss": 0.0148,
"step": 17480
},
{
"epoch": 11.51415404871626,
"grad_norm": 0.22874702513217926,
"learning_rate": 9.579596940699322e-05,
"loss": 0.016,
"step": 17490
},
{
"epoch": 11.52073732718894,
"grad_norm": 0.26501190662384033,
"learning_rate": 9.578933049897643e-05,
"loss": 0.0123,
"step": 17500
},
{
"epoch": 11.52732060566162,
"grad_norm": 0.2637917995452881,
"learning_rate": 9.578268658350728e-05,
"loss": 0.0136,
"step": 17510
},
{
"epoch": 11.533903884134299,
"grad_norm": 0.32776206731796265,
"learning_rate": 9.577603766131235e-05,
"loss": 0.0181,
"step": 17520
},
{
"epoch": 11.54048716260698,
"grad_norm": 0.28864943981170654,
"learning_rate": 9.576938373311878e-05,
"loss": 0.0132,
"step": 17530
},
{
"epoch": 11.547070441079658,
"grad_norm": 0.31169456243515015,
"learning_rate": 9.576272479965421e-05,
"loss": 0.0144,
"step": 17540
},
{
"epoch": 11.553653719552337,
"grad_norm": 0.26172152161598206,
"learning_rate": 9.575606086164687e-05,
"loss": 0.0199,
"step": 17550
},
{
"epoch": 11.560236998025017,
"grad_norm": 0.26040390133857727,
"learning_rate": 9.57493919198255e-05,
"loss": 0.014,
"step": 17560
},
{
"epoch": 11.566820276497696,
"grad_norm": 0.3550359904766083,
"learning_rate": 9.57427179749194e-05,
"loss": 0.0228,
"step": 17570
},
{
"epoch": 11.573403554970374,
"grad_norm": 0.19901423156261444,
"learning_rate": 9.573603902765846e-05,
"loss": 0.0111,
"step": 17580
},
{
"epoch": 11.579986833443055,
"grad_norm": 0.2301362156867981,
"learning_rate": 9.572935507877304e-05,
"loss": 0.0135,
"step": 17590
},
{
"epoch": 11.586570111915734,
"grad_norm": 0.2321770191192627,
"learning_rate": 9.57226661289941e-05,
"loss": 0.0164,
"step": 17600
},
{
"epoch": 11.593153390388414,
"grad_norm": 0.2747436761856079,
"learning_rate": 9.571597217905315e-05,
"loss": 0.021,
"step": 17610
},
{
"epoch": 11.599736668861093,
"grad_norm": 0.20071610808372498,
"learning_rate": 9.57092732296822e-05,
"loss": 0.0125,
"step": 17620
},
{
"epoch": 11.606319947333771,
"grad_norm": 0.1949387639760971,
"learning_rate": 9.570256928161385e-05,
"loss": 0.0116,
"step": 17630
},
{
"epoch": 11.612903225806452,
"grad_norm": 0.3634372353553772,
"learning_rate": 9.569586033558126e-05,
"loss": 0.0193,
"step": 17640
},
{
"epoch": 11.61948650427913,
"grad_norm": 0.21258898079395294,
"learning_rate": 9.568914639231807e-05,
"loss": 0.0134,
"step": 17650
},
{
"epoch": 11.626069782751811,
"grad_norm": 0.36896803975105286,
"learning_rate": 9.568242745255852e-05,
"loss": 0.0159,
"step": 17660
},
{
"epoch": 11.63265306122449,
"grad_norm": 0.2278522551059723,
"learning_rate": 9.567570351703739e-05,
"loss": 0.0112,
"step": 17670
},
{
"epoch": 11.639236339697169,
"grad_norm": 0.27720415592193604,
"learning_rate": 9.566897458649001e-05,
"loss": 0.0129,
"step": 17680
},
{
"epoch": 11.645819618169849,
"grad_norm": 0.2750234603881836,
"learning_rate": 9.566224066165221e-05,
"loss": 0.0129,
"step": 17690
},
{
"epoch": 11.652402896642528,
"grad_norm": 0.20785076916217804,
"learning_rate": 9.565550174326043e-05,
"loss": 0.0166,
"step": 17700
},
{
"epoch": 11.658986175115208,
"grad_norm": 0.3343009650707245,
"learning_rate": 9.564875783205162e-05,
"loss": 0.0171,
"step": 17710
},
{
"epoch": 11.665569453587887,
"grad_norm": 0.2655935287475586,
"learning_rate": 9.564200892876328e-05,
"loss": 0.0145,
"step": 17720
},
{
"epoch": 11.672152732060566,
"grad_norm": 0.2510881721973419,
"learning_rate": 9.563525503413348e-05,
"loss": 0.0155,
"step": 17730
},
{
"epoch": 11.678736010533246,
"grad_norm": 0.19270378351211548,
"learning_rate": 9.562849614890079e-05,
"loss": 0.0108,
"step": 17740
},
{
"epoch": 11.685319289005925,
"grad_norm": 0.2604980170726776,
"learning_rate": 9.562173227380436e-05,
"loss": 0.0126,
"step": 17750
},
{
"epoch": 11.691902567478603,
"grad_norm": 0.266309529542923,
"learning_rate": 9.561496340958389e-05,
"loss": 0.0166,
"step": 17760
},
{
"epoch": 11.698485845951284,
"grad_norm": 0.25196966528892517,
"learning_rate": 9.560818955697959e-05,
"loss": 0.0138,
"step": 17770
},
{
"epoch": 11.705069124423963,
"grad_norm": 0.261741578578949,
"learning_rate": 9.560141071673228e-05,
"loss": 0.0119,
"step": 17780
},
{
"epoch": 11.711652402896643,
"grad_norm": 0.30264192819595337,
"learning_rate": 9.559462688958323e-05,
"loss": 0.0116,
"step": 17790
},
{
"epoch": 11.718235681369322,
"grad_norm": 0.30297014117240906,
"learning_rate": 9.558783807627434e-05,
"loss": 0.0232,
"step": 17800
},
{
"epoch": 11.724818959842,
"grad_norm": 0.22254282236099243,
"learning_rate": 9.558104427754801e-05,
"loss": 0.0127,
"step": 17810
},
{
"epoch": 11.731402238314681,
"grad_norm": 0.43164294958114624,
"learning_rate": 9.557424549414722e-05,
"loss": 0.0151,
"step": 17820
},
{
"epoch": 11.73798551678736,
"grad_norm": 0.33549776673316956,
"learning_rate": 9.556744172681546e-05,
"loss": 0.0129,
"step": 17830
},
{
"epoch": 11.74456879526004,
"grad_norm": 0.29119569063186646,
"learning_rate": 9.556063297629677e-05,
"loss": 0.015,
"step": 17840
},
{
"epoch": 11.751152073732719,
"grad_norm": 0.32678577303886414,
"learning_rate": 9.555381924333578e-05,
"loss": 0.0143,
"step": 17850
},
{
"epoch": 11.757735352205398,
"grad_norm": 0.2586158215999603,
"learning_rate": 9.554700052867758e-05,
"loss": 0.012,
"step": 17860
},
{
"epoch": 11.764318630678078,
"grad_norm": 0.38514357805252075,
"learning_rate": 9.554017683306789e-05,
"loss": 0.015,
"step": 17870
},
{
"epoch": 11.770901909150757,
"grad_norm": 0.2856191396713257,
"learning_rate": 9.553334815725294e-05,
"loss": 0.015,
"step": 17880
},
{
"epoch": 11.777485187623437,
"grad_norm": 0.3445255756378174,
"learning_rate": 9.552651450197949e-05,
"loss": 0.0189,
"step": 17890
},
{
"epoch": 11.784068466096116,
"grad_norm": 0.37124982476234436,
"learning_rate": 9.551967586799486e-05,
"loss": 0.0156,
"step": 17900
},
{
"epoch": 11.790651744568795,
"grad_norm": 0.24827559292316437,
"learning_rate": 9.551283225604692e-05,
"loss": 0.0152,
"step": 17910
},
{
"epoch": 11.797235023041475,
"grad_norm": 0.24968737363815308,
"learning_rate": 9.550598366688406e-05,
"loss": 0.0151,
"step": 17920
},
{
"epoch": 11.803818301514154,
"grad_norm": 0.20630638301372528,
"learning_rate": 9.549913010125526e-05,
"loss": 0.0102,
"step": 17930
},
{
"epoch": 11.810401579986834,
"grad_norm": 0.2887851297855377,
"learning_rate": 9.549227155990999e-05,
"loss": 0.0115,
"step": 17940
},
{
"epoch": 11.816984858459513,
"grad_norm": 0.21263433992862701,
"learning_rate": 9.548540804359828e-05,
"loss": 0.0109,
"step": 17950
},
{
"epoch": 11.823568136932192,
"grad_norm": 0.19671645760536194,
"learning_rate": 9.547853955307077e-05,
"loss": 0.0194,
"step": 17960
},
{
"epoch": 11.830151415404872,
"grad_norm": 0.40197381377220154,
"learning_rate": 9.547166608907853e-05,
"loss": 0.0164,
"step": 17970
},
{
"epoch": 11.83673469387755,
"grad_norm": 0.1970282793045044,
"learning_rate": 9.546478765237326e-05,
"loss": 0.0117,
"step": 17980
},
{
"epoch": 11.843317972350231,
"grad_norm": 0.28184401988983154,
"learning_rate": 9.545790424370715e-05,
"loss": 0.0177,
"step": 17990
},
{
"epoch": 11.84990125082291,
"grad_norm": 0.33765512704849243,
"learning_rate": 9.5451015863833e-05,
"loss": 0.0157,
"step": 18000
},
{
"epoch": 11.856484529295589,
"grad_norm": 0.2885270118713379,
"learning_rate": 9.544412251350408e-05,
"loss": 0.0126,
"step": 18010
},
{
"epoch": 11.86306780776827,
"grad_norm": 0.23564064502716064,
"learning_rate": 9.543722419347422e-05,
"loss": 0.0134,
"step": 18020
},
{
"epoch": 11.869651086240948,
"grad_norm": 0.23535889387130737,
"learning_rate": 9.543032090449788e-05,
"loss": 0.0102,
"step": 18030
},
{
"epoch": 11.876234364713627,
"grad_norm": 0.2706483006477356,
"learning_rate": 9.542341264732992e-05,
"loss": 0.0125,
"step": 18040
},
{
"epoch": 11.882817643186307,
"grad_norm": 0.2801242172718048,
"learning_rate": 9.541649942272585e-05,
"loss": 0.0151,
"step": 18050
},
{
"epoch": 11.889400921658986,
"grad_norm": 0.2163863331079483,
"learning_rate": 9.54095812314417e-05,
"loss": 0.009,
"step": 18060
},
{
"epoch": 11.895984200131666,
"grad_norm": 0.2012210488319397,
"learning_rate": 9.540265807423401e-05,
"loss": 0.0117,
"step": 18070
},
{
"epoch": 11.902567478604345,
"grad_norm": 0.2149057537317276,
"learning_rate": 9.53957299518599e-05,
"loss": 0.0119,
"step": 18080
},
{
"epoch": 11.909150757077024,
"grad_norm": 0.3948190212249756,
"learning_rate": 9.5388796865077e-05,
"loss": 0.0123,
"step": 18090
},
{
"epoch": 11.915734035549704,
"grad_norm": 0.24839039146900177,
"learning_rate": 9.538185881464353e-05,
"loss": 0.0145,
"step": 18100
},
{
"epoch": 11.922317314022383,
"grad_norm": 0.21487465500831604,
"learning_rate": 9.537491580131821e-05,
"loss": 0.0192,
"step": 18110
},
{
"epoch": 11.928900592495063,
"grad_norm": 0.20688077807426453,
"learning_rate": 9.53679678258603e-05,
"loss": 0.0088,
"step": 18120
},
{
"epoch": 11.935483870967742,
"grad_norm": 0.1747399866580963,
"learning_rate": 9.536101488902966e-05,
"loss": 0.0111,
"step": 18130
},
{
"epoch": 11.94206714944042,
"grad_norm": 0.27299630641937256,
"learning_rate": 9.535405699158663e-05,
"loss": 0.014,
"step": 18140
},
{
"epoch": 11.948650427913101,
"grad_norm": 0.27545249462127686,
"learning_rate": 9.53470941342921e-05,
"loss": 0.017,
"step": 18150
},
{
"epoch": 11.95523370638578,
"grad_norm": 0.20343594253063202,
"learning_rate": 9.534012631790756e-05,
"loss": 0.0111,
"step": 18160
},
{
"epoch": 11.96181698485846,
"grad_norm": 0.31189754605293274,
"learning_rate": 9.533315354319494e-05,
"loss": 0.0119,
"step": 18170
},
{
"epoch": 11.968400263331139,
"grad_norm": 0.19460438191890717,
"learning_rate": 9.532617581091682e-05,
"loss": 0.0145,
"step": 18180
},
{
"epoch": 11.974983541803818,
"grad_norm": 0.29732516407966614,
"learning_rate": 9.531919312183629e-05,
"loss": 0.0141,
"step": 18190
},
{
"epoch": 11.981566820276498,
"grad_norm": 0.20291005074977875,
"learning_rate": 9.531220547671688e-05,
"loss": 0.015,
"step": 18200
},
{
"epoch": 11.988150098749177,
"grad_norm": 0.2360871136188507,
"learning_rate": 9.530521287632285e-05,
"loss": 0.0139,
"step": 18210
},
{
"epoch": 11.994733377221856,
"grad_norm": 0.2085309624671936,
"learning_rate": 9.529821532141884e-05,
"loss": 0.0163,
"step": 18220
},
{
"epoch": 12.001316655694536,
"grad_norm": 0.3084196150302887,
"learning_rate": 9.52912128127701e-05,
"loss": 0.0173,
"step": 18230
},
{
"epoch": 12.007899934167215,
"grad_norm": 0.24843260645866394,
"learning_rate": 9.528420535114244e-05,
"loss": 0.0129,
"step": 18240
},
{
"epoch": 12.014483212639895,
"grad_norm": 0.1993218958377838,
"learning_rate": 9.527719293730215e-05,
"loss": 0.0108,
"step": 18250
},
{
"epoch": 12.021066491112574,
"grad_norm": 0.2609328627586365,
"learning_rate": 9.527017557201611e-05,
"loss": 0.0105,
"step": 18260
},
{
"epoch": 12.027649769585253,
"grad_norm": 0.2242310792207718,
"learning_rate": 9.526315325605176e-05,
"loss": 0.0133,
"step": 18270
},
{
"epoch": 12.034233048057933,
"grad_norm": 0.20115557312965393,
"learning_rate": 9.525612599017699e-05,
"loss": 0.0128,
"step": 18280
},
{
"epoch": 12.040816326530612,
"grad_norm": 0.27549102902412415,
"learning_rate": 9.524909377516033e-05,
"loss": 0.0148,
"step": 18290
},
{
"epoch": 12.047399605003292,
"grad_norm": 0.3658803701400757,
"learning_rate": 9.524205661177081e-05,
"loss": 0.0168,
"step": 18300
},
{
"epoch": 12.053982883475971,
"grad_norm": 0.4345685839653015,
"learning_rate": 9.523501450077801e-05,
"loss": 0.0139,
"step": 18310
},
{
"epoch": 12.06056616194865,
"grad_norm": 0.20041225850582123,
"learning_rate": 9.522796744295202e-05,
"loss": 0.0129,
"step": 18320
},
{
"epoch": 12.06714944042133,
"grad_norm": 0.1929173320531845,
"learning_rate": 9.522091543906352e-05,
"loss": 0.0126,
"step": 18330
},
{
"epoch": 12.073732718894009,
"grad_norm": 0.31811589002609253,
"learning_rate": 9.521385848988369e-05,
"loss": 0.0184,
"step": 18340
},
{
"epoch": 12.08031599736669,
"grad_norm": 0.17661380767822266,
"learning_rate": 9.520679659618428e-05,
"loss": 0.0163,
"step": 18350
},
{
"epoch": 12.086899275839368,
"grad_norm": 0.37751442193984985,
"learning_rate": 9.519972975873754e-05,
"loss": 0.015,
"step": 18360
},
{
"epoch": 12.093482554312047,
"grad_norm": 0.26706910133361816,
"learning_rate": 9.519265797831633e-05,
"loss": 0.0124,
"step": 18370
},
{
"epoch": 12.100065832784727,
"grad_norm": 0.25929689407348633,
"learning_rate": 9.518558125569399e-05,
"loss": 0.0138,
"step": 18380
},
{
"epoch": 12.106649111257406,
"grad_norm": 0.4082357585430145,
"learning_rate": 9.517849959164442e-05,
"loss": 0.0165,
"step": 18390
},
{
"epoch": 12.113232389730086,
"grad_norm": 0.17698632180690765,
"learning_rate": 9.517141298694205e-05,
"loss": 0.0133,
"step": 18400
},
{
"epoch": 12.119815668202765,
"grad_norm": 0.3541454076766968,
"learning_rate": 9.516432144236188e-05,
"loss": 0.0156,
"step": 18410
},
{
"epoch": 12.126398946675444,
"grad_norm": 0.3601248264312744,
"learning_rate": 9.515722495867941e-05,
"loss": 0.0135,
"step": 18420
},
{
"epoch": 12.132982225148124,
"grad_norm": 0.3534928262233734,
"learning_rate": 9.515012353667072e-05,
"loss": 0.0178,
"step": 18430
},
{
"epoch": 12.139565503620803,
"grad_norm": 0.2444927841424942,
"learning_rate": 9.51430171771124e-05,
"loss": 0.0194,
"step": 18440
},
{
"epoch": 12.146148782093482,
"grad_norm": 0.25982049107551575,
"learning_rate": 9.513590588078159e-05,
"loss": 0.0155,
"step": 18450
},
{
"epoch": 12.152732060566162,
"grad_norm": 0.2432614415884018,
"learning_rate": 9.512878964845597e-05,
"loss": 0.0135,
"step": 18460
},
{
"epoch": 12.15931533903884,
"grad_norm": 0.18943820893764496,
"learning_rate": 9.512166848091377e-05,
"loss": 0.0149,
"step": 18470
},
{
"epoch": 12.165898617511521,
"grad_norm": 0.18879930675029755,
"learning_rate": 9.511454237893376e-05,
"loss": 0.0103,
"step": 18480
},
{
"epoch": 12.1724818959842,
"grad_norm": 0.2340516448020935,
"learning_rate": 9.51074113432952e-05,
"loss": 0.0182,
"step": 18490
},
{
"epoch": 12.179065174456879,
"grad_norm": 0.28328725695610046,
"learning_rate": 9.510027537477797e-05,
"loss": 0.012,
"step": 18500
},
{
"epoch": 12.18564845292956,
"grad_norm": 0.24688591063022614,
"learning_rate": 9.509313447416242e-05,
"loss": 0.0137,
"step": 18510
},
{
"epoch": 12.192231731402238,
"grad_norm": 0.32870709896087646,
"learning_rate": 9.508598864222949e-05,
"loss": 0.0122,
"step": 18520
},
{
"epoch": 12.198815009874918,
"grad_norm": 0.27400925755500793,
"learning_rate": 9.507883787976062e-05,
"loss": 0.0165,
"step": 18530
},
{
"epoch": 12.205398288347597,
"grad_norm": 0.32363277673721313,
"learning_rate": 9.507168218753781e-05,
"loss": 0.0141,
"step": 18540
},
{
"epoch": 12.211981566820276,
"grad_norm": 0.18854600191116333,
"learning_rate": 9.506452156634362e-05,
"loss": 0.0098,
"step": 18550
},
{
"epoch": 12.218564845292956,
"grad_norm": 0.25760045647621155,
"learning_rate": 9.505735601696109e-05,
"loss": 0.0131,
"step": 18560
},
{
"epoch": 12.225148123765635,
"grad_norm": 0.20355257391929626,
"learning_rate": 9.505018554017385e-05,
"loss": 0.0119,
"step": 18570
},
{
"epoch": 12.231731402238315,
"grad_norm": 0.21837636828422546,
"learning_rate": 9.504301013676604e-05,
"loss": 0.0111,
"step": 18580
},
{
"epoch": 12.238314680710994,
"grad_norm": 0.2999340295791626,
"learning_rate": 9.503582980752238e-05,
"loss": 0.019,
"step": 18590
},
{
"epoch": 12.244897959183673,
"grad_norm": 0.2768423557281494,
"learning_rate": 9.502864455322809e-05,
"loss": 0.0131,
"step": 18600
},
{
"epoch": 12.251481237656353,
"grad_norm": 0.31495314836502075,
"learning_rate": 9.502145437466891e-05,
"loss": 0.0115,
"step": 18610
},
{
"epoch": 12.258064516129032,
"grad_norm": 0.2467675507068634,
"learning_rate": 9.501425927263116e-05,
"loss": 0.0172,
"step": 18620
},
{
"epoch": 12.264647794601713,
"grad_norm": 0.18325938284397125,
"learning_rate": 9.500705924790172e-05,
"loss": 0.0123,
"step": 18630
},
{
"epoch": 12.271231073074391,
"grad_norm": 0.2049708366394043,
"learning_rate": 9.499985430126794e-05,
"loss": 0.0162,
"step": 18640
},
{
"epoch": 12.27781435154707,
"grad_norm": 0.19388315081596375,
"learning_rate": 9.499264443351775e-05,
"loss": 0.0116,
"step": 18650
},
{
"epoch": 12.28439763001975,
"grad_norm": 0.37319058179855347,
"learning_rate": 9.498542964543961e-05,
"loss": 0.019,
"step": 18660
},
{
"epoch": 12.290980908492429,
"grad_norm": 0.3051353394985199,
"learning_rate": 9.497820993782252e-05,
"loss": 0.0145,
"step": 18670
},
{
"epoch": 12.297564186965108,
"grad_norm": 0.2269616425037384,
"learning_rate": 9.497098531145601e-05,
"loss": 0.0131,
"step": 18680
},
{
"epoch": 12.304147465437788,
"grad_norm": 0.29061055183410645,
"learning_rate": 9.496375576713017e-05,
"loss": 0.013,
"step": 18690
},
{
"epoch": 12.310730743910467,
"grad_norm": 0.203207865357399,
"learning_rate": 9.49565213056356e-05,
"loss": 0.0097,
"step": 18700
},
{
"epoch": 12.317314022383147,
"grad_norm": 0.252937912940979,
"learning_rate": 9.494928192776342e-05,
"loss": 0.015,
"step": 18710
},
{
"epoch": 12.323897300855826,
"grad_norm": 0.2651543915271759,
"learning_rate": 9.494203763430538e-05,
"loss": 0.0162,
"step": 18720
},
{
"epoch": 12.330480579328505,
"grad_norm": 0.2031412422657013,
"learning_rate": 9.493478842605366e-05,
"loss": 0.0173,
"step": 18730
},
{
"epoch": 12.337063857801185,
"grad_norm": 0.2593730688095093,
"learning_rate": 9.492753430380105e-05,
"loss": 0.0165,
"step": 18740
},
{
"epoch": 12.343647136273864,
"grad_norm": 0.2717714309692383,
"learning_rate": 9.492027526834083e-05,
"loss": 0.0141,
"step": 18750
},
{
"epoch": 12.350230414746544,
"grad_norm": 0.22555111348628998,
"learning_rate": 9.491301132046684e-05,
"loss": 0.0176,
"step": 18760
},
{
"epoch": 12.356813693219223,
"grad_norm": 0.34241417050361633,
"learning_rate": 9.490574246097345e-05,
"loss": 0.0147,
"step": 18770
},
{
"epoch": 12.363396971691902,
"grad_norm": 0.3548526167869568,
"learning_rate": 9.48984686906556e-05,
"loss": 0.0167,
"step": 18780
},
{
"epoch": 12.369980250164582,
"grad_norm": 0.2203076183795929,
"learning_rate": 9.489119001030871e-05,
"loss": 0.0108,
"step": 18790
},
{
"epoch": 12.376563528637261,
"grad_norm": 0.32223716378211975,
"learning_rate": 9.488390642072878e-05,
"loss": 0.0127,
"step": 18800
},
{
"epoch": 12.383146807109942,
"grad_norm": 0.26266780495643616,
"learning_rate": 9.48766179227123e-05,
"loss": 0.0121,
"step": 18810
},
{
"epoch": 12.38973008558262,
"grad_norm": 0.31776121258735657,
"learning_rate": 9.486932451705636e-05,
"loss": 0.0126,
"step": 18820
},
{
"epoch": 12.396313364055299,
"grad_norm": 0.28183192014694214,
"learning_rate": 9.486202620455857e-05,
"loss": 0.0169,
"step": 18830
},
{
"epoch": 12.40289664252798,
"grad_norm": 0.250258207321167,
"learning_rate": 9.485472298601704e-05,
"loss": 0.0124,
"step": 18840
},
{
"epoch": 12.409479921000658,
"grad_norm": 0.23605410754680634,
"learning_rate": 9.484741486223043e-05,
"loss": 0.0124,
"step": 18850
},
{
"epoch": 12.416063199473339,
"grad_norm": 0.3391025960445404,
"learning_rate": 9.484010183399797e-05,
"loss": 0.0135,
"step": 18860
},
{
"epoch": 12.422646477946017,
"grad_norm": 0.2920762598514557,
"learning_rate": 9.483278390211938e-05,
"loss": 0.0099,
"step": 18870
},
{
"epoch": 12.429229756418696,
"grad_norm": 0.2580193877220154,
"learning_rate": 9.482546106739496e-05,
"loss": 0.0252,
"step": 18880
},
{
"epoch": 12.435813034891376,
"grad_norm": 0.26369646191596985,
"learning_rate": 9.48181333306255e-05,
"loss": 0.0156,
"step": 18890
},
{
"epoch": 12.442396313364055,
"grad_norm": 0.20668303966522217,
"learning_rate": 9.481080069261237e-05,
"loss": 0.0121,
"step": 18900
},
{
"epoch": 12.448979591836734,
"grad_norm": 0.23545192182064056,
"learning_rate": 9.480346315415745e-05,
"loss": 0.0163,
"step": 18910
},
{
"epoch": 12.455562870309414,
"grad_norm": 0.33573266863822937,
"learning_rate": 9.479612071606314e-05,
"loss": 0.0167,
"step": 18920
},
{
"epoch": 12.462146148782093,
"grad_norm": 0.3269185721874237,
"learning_rate": 9.478877337913244e-05,
"loss": 0.0133,
"step": 18930
},
{
"epoch": 12.468729427254774,
"grad_norm": 0.3243507444858551,
"learning_rate": 9.478142114416881e-05,
"loss": 0.0161,
"step": 18940
},
{
"epoch": 12.475312705727452,
"grad_norm": 0.32024872303009033,
"learning_rate": 9.47740640119763e-05,
"loss": 0.0144,
"step": 18950
},
{
"epoch": 12.481895984200131,
"grad_norm": 0.28228938579559326,
"learning_rate": 9.476670198335947e-05,
"loss": 0.0143,
"step": 18960
},
{
"epoch": 12.488479262672811,
"grad_norm": 0.21601736545562744,
"learning_rate": 9.47593350591234e-05,
"loss": 0.0137,
"step": 18970
},
{
"epoch": 12.49506254114549,
"grad_norm": 0.2254035770893097,
"learning_rate": 9.475196324007376e-05,
"loss": 0.0169,
"step": 18980
},
{
"epoch": 12.50164581961817,
"grad_norm": 0.20739617943763733,
"learning_rate": 9.474458652701669e-05,
"loss": 0.0125,
"step": 18990
},
{
"epoch": 12.50822909809085,
"grad_norm": 0.3237232565879822,
"learning_rate": 9.473720492075892e-05,
"loss": 0.0216,
"step": 19000
},
{
"epoch": 12.514812376563528,
"grad_norm": 0.2874460518360138,
"learning_rate": 9.472981842210768e-05,
"loss": 0.0161,
"step": 19010
},
{
"epoch": 12.521395655036208,
"grad_norm": 0.2618703246116638,
"learning_rate": 9.472242703187074e-05,
"loss": 0.0115,
"step": 19020
},
{
"epoch": 12.527978933508887,
"grad_norm": 0.36395758390426636,
"learning_rate": 9.471503075085643e-05,
"loss": 0.0191,
"step": 19030
},
{
"epoch": 12.534562211981568,
"grad_norm": 0.26025524735450745,
"learning_rate": 9.470762957987359e-05,
"loss": 0.0147,
"step": 19040
},
{
"epoch": 12.541145490454246,
"grad_norm": 0.2700994908809662,
"learning_rate": 9.470022351973158e-05,
"loss": 0.0145,
"step": 19050
},
{
"epoch": 12.547728768926925,
"grad_norm": 0.32361045479774475,
"learning_rate": 9.469281257124034e-05,
"loss": 0.0157,
"step": 19060
},
{
"epoch": 12.554312047399605,
"grad_norm": 0.3192708492279053,
"learning_rate": 9.46853967352103e-05,
"loss": 0.0211,
"step": 19070
},
{
"epoch": 12.560895325872284,
"grad_norm": 0.42311349511146545,
"learning_rate": 9.467797601245246e-05,
"loss": 0.0132,
"step": 19080
},
{
"epoch": 12.567478604344963,
"grad_norm": 0.23498934507369995,
"learning_rate": 9.467055040377834e-05,
"loss": 0.0164,
"step": 19090
},
{
"epoch": 12.574061882817643,
"grad_norm": 0.21013031899929047,
"learning_rate": 9.466311990999999e-05,
"loss": 0.0166,
"step": 19100
},
{
"epoch": 12.580645161290322,
"grad_norm": 0.3690164089202881,
"learning_rate": 9.465568453193e-05,
"loss": 0.0136,
"step": 19110
},
{
"epoch": 12.587228439763003,
"grad_norm": 0.3338952362537384,
"learning_rate": 9.464824427038148e-05,
"loss": 0.0185,
"step": 19120
},
{
"epoch": 12.593811718235681,
"grad_norm": 0.41204050183296204,
"learning_rate": 9.46407991261681e-05,
"loss": 0.0116,
"step": 19130
},
{
"epoch": 12.60039499670836,
"grad_norm": 0.35593321919441223,
"learning_rate": 9.463334910010404e-05,
"loss": 0.0197,
"step": 19140
},
{
"epoch": 12.60697827518104,
"grad_norm": 0.32464295625686646,
"learning_rate": 9.462589419300403e-05,
"loss": 0.0156,
"step": 19150
},
{
"epoch": 12.61356155365372,
"grad_norm": 0.3284878134727478,
"learning_rate": 9.461843440568333e-05,
"loss": 0.0177,
"step": 19160
},
{
"epoch": 12.6201448321264,
"grad_norm": 0.3329091966152191,
"learning_rate": 9.461096973895773e-05,
"loss": 0.0139,
"step": 19170
},
{
"epoch": 12.626728110599078,
"grad_norm": 0.20790034532546997,
"learning_rate": 9.460350019364355e-05,
"loss": 0.009,
"step": 19180
},
{
"epoch": 12.633311389071757,
"grad_norm": 0.22701913118362427,
"learning_rate": 9.459602577055764e-05,
"loss": 0.0132,
"step": 19190
},
{
"epoch": 12.639894667544437,
"grad_norm": 0.3165176510810852,
"learning_rate": 9.45885464705174e-05,
"loss": 0.0166,
"step": 19200
},
{
"epoch": 12.646477946017116,
"grad_norm": 0.4785653352737427,
"learning_rate": 9.458106229434076e-05,
"loss": 0.019,
"step": 19210
},
{
"epoch": 12.653061224489797,
"grad_norm": 0.4412171542644501,
"learning_rate": 9.457357324284617e-05,
"loss": 0.0182,
"step": 19220
},
{
"epoch": 12.659644502962475,
"grad_norm": 0.33783483505249023,
"learning_rate": 9.456607931685262e-05,
"loss": 0.0182,
"step": 19230
},
{
"epoch": 12.666227781435154,
"grad_norm": 0.2214769423007965,
"learning_rate": 9.455858051717965e-05,
"loss": 0.0165,
"step": 19240
},
{
"epoch": 12.672811059907835,
"grad_norm": 0.26670995354652405,
"learning_rate": 9.45510768446473e-05,
"loss": 0.0203,
"step": 19250
},
{
"epoch": 12.679394338380513,
"grad_norm": 0.2486993819475174,
"learning_rate": 9.454356830007618e-05,
"loss": 0.0133,
"step": 19260
},
{
"epoch": 12.685977616853194,
"grad_norm": 0.25949645042419434,
"learning_rate": 9.45360548842874e-05,
"loss": 0.0131,
"step": 19270
},
{
"epoch": 12.692560895325872,
"grad_norm": 0.23444333672523499,
"learning_rate": 9.452853659810261e-05,
"loss": 0.011,
"step": 19280
},
{
"epoch": 12.699144173798551,
"grad_norm": 0.23792310059070587,
"learning_rate": 9.452101344234401e-05,
"loss": 0.0153,
"step": 19290
},
{
"epoch": 12.705727452271232,
"grad_norm": 0.3448595702648163,
"learning_rate": 9.451348541783431e-05,
"loss": 0.0166,
"step": 19300
},
{
"epoch": 12.71231073074391,
"grad_norm": 0.20378480851650238,
"learning_rate": 9.450595252539678e-05,
"loss": 0.0102,
"step": 19310
},
{
"epoch": 12.71889400921659,
"grad_norm": 0.24386438727378845,
"learning_rate": 9.449841476585518e-05,
"loss": 0.0139,
"step": 19320
},
{
"epoch": 12.72547728768927,
"grad_norm": 0.15812814235687256,
"learning_rate": 9.449087214003384e-05,
"loss": 0.0122,
"step": 19330
},
{
"epoch": 12.732060566161948,
"grad_norm": 0.3400018811225891,
"learning_rate": 9.448332464875765e-05,
"loss": 0.0181,
"step": 19340
},
{
"epoch": 12.738643844634629,
"grad_norm": 0.24390046298503876,
"learning_rate": 9.447577229285192e-05,
"loss": 0.0134,
"step": 19350
},
{
"epoch": 12.745227123107307,
"grad_norm": 0.24113021790981293,
"learning_rate": 9.446821507314261e-05,
"loss": 0.0156,
"step": 19360
},
{
"epoch": 12.751810401579986,
"grad_norm": 0.21532553434371948,
"learning_rate": 9.446065299045617e-05,
"loss": 0.0105,
"step": 19370
},
{
"epoch": 12.758393680052666,
"grad_norm": 0.21309369802474976,
"learning_rate": 9.445308604561955e-05,
"loss": 0.0112,
"step": 19380
},
{
"epoch": 12.764976958525345,
"grad_norm": 0.28644633293151855,
"learning_rate": 9.444551423946028e-05,
"loss": 0.0132,
"step": 19390
},
{
"epoch": 12.771560236998026,
"grad_norm": 0.18560084700584412,
"learning_rate": 9.443793757280638e-05,
"loss": 0.0101,
"step": 19400
},
{
"epoch": 12.778143515470704,
"grad_norm": 0.2123103141784668,
"learning_rate": 9.443035604648646e-05,
"loss": 0.0131,
"step": 19410
},
{
"epoch": 12.784726793943383,
"grad_norm": 0.23565056920051575,
"learning_rate": 9.44227696613296e-05,
"loss": 0.0172,
"step": 19420
},
{
"epoch": 12.791310072416064,
"grad_norm": 0.2035631537437439,
"learning_rate": 9.441517841816542e-05,
"loss": 0.0163,
"step": 19430
},
{
"epoch": 12.797893350888742,
"grad_norm": 0.2873001992702484,
"learning_rate": 9.440758231782413e-05,
"loss": 0.014,
"step": 19440
},
{
"epoch": 12.804476629361423,
"grad_norm": 0.33354443311691284,
"learning_rate": 9.439998136113639e-05,
"loss": 0.0148,
"step": 19450
},
{
"epoch": 12.811059907834101,
"grad_norm": 0.2871869206428528,
"learning_rate": 9.439237554893344e-05,
"loss": 0.0181,
"step": 19460
},
{
"epoch": 12.81764318630678,
"grad_norm": 0.22437499463558197,
"learning_rate": 9.438476488204705e-05,
"loss": 0.0156,
"step": 19470
},
{
"epoch": 12.82422646477946,
"grad_norm": 0.203880175948143,
"learning_rate": 9.43771493613095e-05,
"loss": 0.0117,
"step": 19480
},
{
"epoch": 12.83080974325214,
"grad_norm": 0.27781352400779724,
"learning_rate": 9.436952898755362e-05,
"loss": 0.0136,
"step": 19490
},
{
"epoch": 12.83739302172482,
"grad_norm": 0.20180515944957733,
"learning_rate": 9.436190376161276e-05,
"loss": 0.0112,
"step": 19500
},
{
"epoch": 12.843976300197498,
"grad_norm": 0.29745423793792725,
"learning_rate": 9.43542736843208e-05,
"loss": 0.0121,
"step": 19510
},
{
"epoch": 12.850559578670177,
"grad_norm": 0.21660368144512177,
"learning_rate": 9.434663875651216e-05,
"loss": 0.0137,
"step": 19520
},
{
"epoch": 12.857142857142858,
"grad_norm": 0.18247519433498383,
"learning_rate": 9.433899897902177e-05,
"loss": 0.0146,
"step": 19530
},
{
"epoch": 12.863726135615536,
"grad_norm": 0.23599767684936523,
"learning_rate": 9.433135435268511e-05,
"loss": 0.012,
"step": 19540
},
{
"epoch": 12.870309414088215,
"grad_norm": 0.1869116723537445,
"learning_rate": 9.432370487833819e-05,
"loss": 0.0136,
"step": 19550
},
{
"epoch": 12.876892692560896,
"grad_norm": 0.1903548389673233,
"learning_rate": 9.431605055681756e-05,
"loss": 0.01,
"step": 19560
},
{
"epoch": 12.883475971033574,
"grad_norm": 0.23555710911750793,
"learning_rate": 9.430839138896026e-05,
"loss": 0.0106,
"step": 19570
},
{
"epoch": 12.890059249506255,
"grad_norm": 0.17329998314380646,
"learning_rate": 9.43007273756039e-05,
"loss": 0.0198,
"step": 19580
},
{
"epoch": 12.896642527978933,
"grad_norm": 0.17231988906860352,
"learning_rate": 9.429305851758658e-05,
"loss": 0.0093,
"step": 19590
},
{
"epoch": 12.903225806451612,
"grad_norm": 0.2414647340774536,
"learning_rate": 9.428538481574699e-05,
"loss": 0.0121,
"step": 19600
},
{
"epoch": 12.909809084924293,
"grad_norm": 0.3078465163707733,
"learning_rate": 9.42777062709243e-05,
"loss": 0.0159,
"step": 19610
},
{
"epoch": 12.916392363396971,
"grad_norm": 0.3000302314758301,
"learning_rate": 9.427002288395821e-05,
"loss": 0.0139,
"step": 19620
},
{
"epoch": 12.922975641869652,
"grad_norm": 0.23122508823871613,
"learning_rate": 9.426233465568898e-05,
"loss": 0.0097,
"step": 19630
},
{
"epoch": 12.92955892034233,
"grad_norm": 0.31386280059814453,
"learning_rate": 9.42546415869574e-05,
"loss": 0.0124,
"step": 19640
},
{
"epoch": 12.93614219881501,
"grad_norm": 0.2583659291267395,
"learning_rate": 9.424694367860473e-05,
"loss": 0.0209,
"step": 19650
},
{
"epoch": 12.94272547728769,
"grad_norm": 0.2314862757921219,
"learning_rate": 9.423924093147284e-05,
"loss": 0.0123,
"step": 19660
},
{
"epoch": 12.949308755760368,
"grad_norm": 0.21181035041809082,
"learning_rate": 9.423153334640407e-05,
"loss": 0.0132,
"step": 19670
},
{
"epoch": 12.955892034233049,
"grad_norm": 0.3480297327041626,
"learning_rate": 9.42238209242413e-05,
"loss": 0.0176,
"step": 19680
},
{
"epoch": 12.962475312705728,
"grad_norm": 0.25001290440559387,
"learning_rate": 9.421610366582798e-05,
"loss": 0.0145,
"step": 19690
},
{
"epoch": 12.969058591178406,
"grad_norm": 0.3914773166179657,
"learning_rate": 9.420838157200803e-05,
"loss": 0.0227,
"step": 19700
},
{
"epoch": 12.975641869651087,
"grad_norm": 0.2896583676338196,
"learning_rate": 9.420065464362594e-05,
"loss": 0.0167,
"step": 19710
},
{
"epoch": 12.982225148123765,
"grad_norm": 0.2761892080307007,
"learning_rate": 9.419292288152673e-05,
"loss": 0.0153,
"step": 19720
},
{
"epoch": 12.988808426596446,
"grad_norm": 0.3162051737308502,
"learning_rate": 9.418518628655588e-05,
"loss": 0.0113,
"step": 19730
},
{
"epoch": 12.995391705069125,
"grad_norm": 0.29129651188850403,
"learning_rate": 9.417744485955951e-05,
"loss": 0.0138,
"step": 19740
},
{
"epoch": 13.001974983541803,
"grad_norm": 0.21762390434741974,
"learning_rate": 9.41696986013842e-05,
"loss": 0.0181,
"step": 19750
},
{
"epoch": 13.008558262014484,
"grad_norm": 0.22698360681533813,
"learning_rate": 9.416194751287705e-05,
"loss": 0.0133,
"step": 19760
},
{
"epoch": 13.015141540487162,
"grad_norm": 0.31235796213150024,
"learning_rate": 9.415419159488572e-05,
"loss": 0.0154,
"step": 19770
},
{
"epoch": 13.021724818959841,
"grad_norm": 0.4129425287246704,
"learning_rate": 9.414643084825837e-05,
"loss": 0.0209,
"step": 19780
},
{
"epoch": 13.028308097432522,
"grad_norm": 0.2958117425441742,
"learning_rate": 9.413866527384372e-05,
"loss": 0.0135,
"step": 19790
},
{
"epoch": 13.0348913759052,
"grad_norm": 0.16440927982330322,
"learning_rate": 9.4130894872491e-05,
"loss": 0.0161,
"step": 19800
},
{
"epoch": 13.04147465437788,
"grad_norm": 0.2403261810541153,
"learning_rate": 9.412311964504998e-05,
"loss": 0.0112,
"step": 19810
},
{
"epoch": 13.04805793285056,
"grad_norm": 0.2115643173456192,
"learning_rate": 9.411533959237091e-05,
"loss": 0.0185,
"step": 19820
},
{
"epoch": 13.054641211323238,
"grad_norm": 0.16743060946464539,
"learning_rate": 9.410755471530464e-05,
"loss": 0.0211,
"step": 19830
},
{
"epoch": 13.061224489795919,
"grad_norm": 0.2285161018371582,
"learning_rate": 9.40997650147025e-05,
"loss": 0.0123,
"step": 19840
},
{
"epoch": 13.067807768268597,
"grad_norm": 0.21295054256916046,
"learning_rate": 9.409197049141637e-05,
"loss": 0.0093,
"step": 19850
},
{
"epoch": 13.074391046741278,
"grad_norm": 0.18060113489627838,
"learning_rate": 9.408417114629863e-05,
"loss": 0.0104,
"step": 19860
},
{
"epoch": 13.080974325213957,
"grad_norm": 0.25025269389152527,
"learning_rate": 9.40763669802022e-05,
"loss": 0.018,
"step": 19870
},
{
"epoch": 13.087557603686635,
"grad_norm": 0.22388964891433716,
"learning_rate": 9.406855799398056e-05,
"loss": 0.0162,
"step": 19880
},
{
"epoch": 13.094140882159316,
"grad_norm": 0.21283680200576782,
"learning_rate": 9.406074418848767e-05,
"loss": 0.0162,
"step": 19890
},
{
"epoch": 13.100724160631994,
"grad_norm": 0.31367626786231995,
"learning_rate": 9.405292556457805e-05,
"loss": 0.0175,
"step": 19900
},
{
"epoch": 13.107307439104675,
"grad_norm": 0.32700100541114807,
"learning_rate": 9.404510212310671e-05,
"loss": 0.0171,
"step": 19910
},
{
"epoch": 13.113890717577354,
"grad_norm": 0.3267519474029541,
"learning_rate": 9.403727386492924e-05,
"loss": 0.0146,
"step": 19920
},
{
"epoch": 13.120473996050032,
"grad_norm": 0.2790376543998718,
"learning_rate": 9.40294407909017e-05,
"loss": 0.0137,
"step": 19930
},
{
"epoch": 13.127057274522713,
"grad_norm": 0.3063926100730896,
"learning_rate": 9.40216029018807e-05,
"loss": 0.0164,
"step": 19940
},
{
"epoch": 13.133640552995391,
"grad_norm": 0.2736910581588745,
"learning_rate": 9.401376019872338e-05,
"loss": 0.0117,
"step": 19950
},
{
"epoch": 13.140223831468072,
"grad_norm": 0.30961859226226807,
"learning_rate": 9.400591268228746e-05,
"loss": 0.0184,
"step": 19960
},
{
"epoch": 13.14680710994075,
"grad_norm": 0.2561533749103546,
"learning_rate": 9.399806035343106e-05,
"loss": 0.0185,
"step": 19970
},
{
"epoch": 13.15339038841343,
"grad_norm": 0.2734316289424896,
"learning_rate": 9.399020321301294e-05,
"loss": 0.0134,
"step": 19980
},
{
"epoch": 13.15997366688611,
"grad_norm": 0.2370346337556839,
"learning_rate": 9.398234126189234e-05,
"loss": 0.0173,
"step": 19990
},
{
"epoch": 13.166556945358789,
"grad_norm": 0.300430029630661,
"learning_rate": 9.397447450092902e-05,
"loss": 0.015,
"step": 20000
}
],
"logging_steps": 10,
"max_steps": 100000,
"num_input_tokens_seen": 0,
"num_train_epochs": 66,
"save_steps": 1000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 0.0,
"train_batch_size": 16,
"trial_name": null,
"trial_params": null
}