zephyr-7b-sft-full / trainer_state.json
RikkiXu's picture
Model save
f811818 verified
raw
history blame
94 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 5.0,
"eval_steps": 500,
"global_step": 2890,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0,
"grad_norm": 4.816294846566506,
"learning_rate": 1.7301038062283738e-08,
"loss": 0.8531,
"step": 1
},
{
"epoch": 0.01,
"grad_norm": 4.872596535971759,
"learning_rate": 8.65051903114187e-08,
"loss": 0.8583,
"step": 5
},
{
"epoch": 0.02,
"grad_norm": 3.6310650985999184,
"learning_rate": 1.730103806228374e-07,
"loss": 0.8626,
"step": 10
},
{
"epoch": 0.03,
"grad_norm": 3.744415952772262,
"learning_rate": 2.5951557093425607e-07,
"loss": 0.8594,
"step": 15
},
{
"epoch": 0.03,
"grad_norm": 4.189549391166134,
"learning_rate": 3.460207612456748e-07,
"loss": 0.8542,
"step": 20
},
{
"epoch": 0.04,
"grad_norm": 2.388097158161717,
"learning_rate": 4.325259515570934e-07,
"loss": 0.8489,
"step": 25
},
{
"epoch": 0.05,
"grad_norm": 2.131273138920553,
"learning_rate": 5.190311418685121e-07,
"loss": 0.8319,
"step": 30
},
{
"epoch": 0.06,
"grad_norm": 1.9741009575137782,
"learning_rate": 6.055363321799308e-07,
"loss": 0.8334,
"step": 35
},
{
"epoch": 0.07,
"grad_norm": 1.9300494756762427,
"learning_rate": 6.920415224913496e-07,
"loss": 0.8206,
"step": 40
},
{
"epoch": 0.08,
"grad_norm": 1.7903640743849025,
"learning_rate": 7.785467128027683e-07,
"loss": 0.7872,
"step": 45
},
{
"epoch": 0.09,
"grad_norm": 1.9336160245419374,
"learning_rate": 8.650519031141868e-07,
"loss": 0.8006,
"step": 50
},
{
"epoch": 0.1,
"grad_norm": 1.8895155835527617,
"learning_rate": 9.515570934256056e-07,
"loss": 0.8079,
"step": 55
},
{
"epoch": 0.1,
"grad_norm": 1.802498228874264,
"learning_rate": 1.0380622837370243e-06,
"loss": 0.803,
"step": 60
},
{
"epoch": 0.11,
"grad_norm": 1.777164779599201,
"learning_rate": 1.124567474048443e-06,
"loss": 0.8124,
"step": 65
},
{
"epoch": 0.12,
"grad_norm": 1.8054173138128906,
"learning_rate": 1.2110726643598616e-06,
"loss": 0.8183,
"step": 70
},
{
"epoch": 0.13,
"grad_norm": 1.9350493231536463,
"learning_rate": 1.2975778546712802e-06,
"loss": 0.8029,
"step": 75
},
{
"epoch": 0.14,
"grad_norm": 1.8950537177996267,
"learning_rate": 1.3840830449826992e-06,
"loss": 0.8138,
"step": 80
},
{
"epoch": 0.15,
"grad_norm": 1.8019972698305375,
"learning_rate": 1.4705882352941177e-06,
"loss": 0.8036,
"step": 85
},
{
"epoch": 0.16,
"grad_norm": 1.9102546088528127,
"learning_rate": 1.5570934256055365e-06,
"loss": 0.7893,
"step": 90
},
{
"epoch": 0.16,
"grad_norm": 1.8718974603674414,
"learning_rate": 1.643598615916955e-06,
"loss": 0.7821,
"step": 95
},
{
"epoch": 0.17,
"grad_norm": 1.8905357930834985,
"learning_rate": 1.7301038062283736e-06,
"loss": 0.7811,
"step": 100
},
{
"epoch": 0.18,
"grad_norm": 1.8446962030261018,
"learning_rate": 1.8166089965397926e-06,
"loss": 0.7687,
"step": 105
},
{
"epoch": 0.19,
"grad_norm": 1.9762667024263982,
"learning_rate": 1.9031141868512112e-06,
"loss": 0.7876,
"step": 110
},
{
"epoch": 0.2,
"grad_norm": 1.9200753281543292,
"learning_rate": 1.9896193771626298e-06,
"loss": 0.7856,
"step": 115
},
{
"epoch": 0.21,
"grad_norm": 1.9558456094506125,
"learning_rate": 2.0761245674740485e-06,
"loss": 0.7532,
"step": 120
},
{
"epoch": 0.22,
"grad_norm": 1.9761173872923619,
"learning_rate": 2.1626297577854673e-06,
"loss": 0.7732,
"step": 125
},
{
"epoch": 0.22,
"grad_norm": 1.9910728274837823,
"learning_rate": 2.249134948096886e-06,
"loss": 0.7694,
"step": 130
},
{
"epoch": 0.23,
"grad_norm": 2.1510352769592007,
"learning_rate": 2.335640138408305e-06,
"loss": 0.7612,
"step": 135
},
{
"epoch": 0.24,
"grad_norm": 2.0884221130022373,
"learning_rate": 2.4221453287197232e-06,
"loss": 0.7667,
"step": 140
},
{
"epoch": 0.25,
"grad_norm": 2.0730526163013345,
"learning_rate": 2.508650519031142e-06,
"loss": 0.7734,
"step": 145
},
{
"epoch": 0.26,
"grad_norm": 2.071966255411125,
"learning_rate": 2.5951557093425604e-06,
"loss": 0.7558,
"step": 150
},
{
"epoch": 0.27,
"grad_norm": 2.140324284318863,
"learning_rate": 2.6816608996539796e-06,
"loss": 0.7453,
"step": 155
},
{
"epoch": 0.28,
"grad_norm": 2.0794979181710866,
"learning_rate": 2.7681660899653983e-06,
"loss": 0.7413,
"step": 160
},
{
"epoch": 0.29,
"grad_norm": 2.079407078921554,
"learning_rate": 2.8546712802768167e-06,
"loss": 0.7331,
"step": 165
},
{
"epoch": 0.29,
"grad_norm": 2.0051529944042445,
"learning_rate": 2.9411764705882355e-06,
"loss": 0.751,
"step": 170
},
{
"epoch": 0.3,
"grad_norm": 2.075289040593278,
"learning_rate": 3.027681660899654e-06,
"loss": 0.7369,
"step": 175
},
{
"epoch": 0.31,
"grad_norm": 2.009911706722013,
"learning_rate": 3.114186851211073e-06,
"loss": 0.7239,
"step": 180
},
{
"epoch": 0.32,
"grad_norm": 2.114683663985779,
"learning_rate": 3.200692041522492e-06,
"loss": 0.7219,
"step": 185
},
{
"epoch": 0.33,
"grad_norm": 2.1266461925060343,
"learning_rate": 3.28719723183391e-06,
"loss": 0.7163,
"step": 190
},
{
"epoch": 0.34,
"grad_norm": 2.0699467756030048,
"learning_rate": 3.373702422145329e-06,
"loss": 0.7246,
"step": 195
},
{
"epoch": 0.35,
"grad_norm": 1.9178177043552207,
"learning_rate": 3.4602076124567473e-06,
"loss": 0.715,
"step": 200
},
{
"epoch": 0.35,
"grad_norm": 1.9401101711848872,
"learning_rate": 3.5467128027681665e-06,
"loss": 0.6888,
"step": 205
},
{
"epoch": 0.36,
"grad_norm": 1.7637994756169328,
"learning_rate": 3.6332179930795853e-06,
"loss": 0.7095,
"step": 210
},
{
"epoch": 0.37,
"grad_norm": 1.8879618486444918,
"learning_rate": 3.7197231833910036e-06,
"loss": 0.7159,
"step": 215
},
{
"epoch": 0.38,
"grad_norm": 1.7360031299470728,
"learning_rate": 3.8062283737024224e-06,
"loss": 0.7111,
"step": 220
},
{
"epoch": 0.39,
"grad_norm": 1.6156910333813757,
"learning_rate": 3.892733564013842e-06,
"loss": 0.7136,
"step": 225
},
{
"epoch": 0.4,
"grad_norm": 1.5128961988226293,
"learning_rate": 3.9792387543252595e-06,
"loss": 0.7199,
"step": 230
},
{
"epoch": 0.41,
"grad_norm": 1.5314262584578413,
"learning_rate": 4.065743944636678e-06,
"loss": 0.7051,
"step": 235
},
{
"epoch": 0.42,
"grad_norm": 1.7503445297432856,
"learning_rate": 4.152249134948097e-06,
"loss": 0.6951,
"step": 240
},
{
"epoch": 0.42,
"grad_norm": 1.727205989077596,
"learning_rate": 4.238754325259516e-06,
"loss": 0.6967,
"step": 245
},
{
"epoch": 0.43,
"grad_norm": 1.512173137191571,
"learning_rate": 4.325259515570935e-06,
"loss": 0.6951,
"step": 250
},
{
"epoch": 0.44,
"grad_norm": 1.5906205947939107,
"learning_rate": 4.411764705882353e-06,
"loss": 0.7177,
"step": 255
},
{
"epoch": 0.45,
"grad_norm": 1.4011742158056826,
"learning_rate": 4.498269896193772e-06,
"loss": 0.6955,
"step": 260
},
{
"epoch": 0.46,
"grad_norm": 1.3418261511629705,
"learning_rate": 4.58477508650519e-06,
"loss": 0.6845,
"step": 265
},
{
"epoch": 0.47,
"grad_norm": 1.5187095864301963,
"learning_rate": 4.67128027681661e-06,
"loss": 0.7068,
"step": 270
},
{
"epoch": 0.48,
"grad_norm": 1.62902883041799,
"learning_rate": 4.7577854671280285e-06,
"loss": 0.6937,
"step": 275
},
{
"epoch": 0.48,
"grad_norm": 1.3634212608530258,
"learning_rate": 4.8442906574394464e-06,
"loss": 0.6858,
"step": 280
},
{
"epoch": 0.49,
"grad_norm": 1.5038430309774804,
"learning_rate": 4.930795847750865e-06,
"loss": 0.6988,
"step": 285
},
{
"epoch": 0.5,
"grad_norm": 1.3823728989860273,
"learning_rate": 4.999998176402445e-06,
"loss": 0.6895,
"step": 290
},
{
"epoch": 0.51,
"grad_norm": 1.7851921485205167,
"learning_rate": 4.999934350767369e-06,
"loss": 0.6972,
"step": 295
},
{
"epoch": 0.52,
"grad_norm": 1.5012843187747695,
"learning_rate": 4.99977934791494e-06,
"loss": 0.6909,
"step": 300
},
{
"epoch": 0.53,
"grad_norm": 1.4846464217080884,
"learning_rate": 4.9995331734983985e-06,
"loss": 0.6784,
"step": 305
},
{
"epoch": 0.54,
"grad_norm": 1.4416671840566515,
"learning_rate": 4.999195836496179e-06,
"loss": 0.6885,
"step": 310
},
{
"epoch": 0.54,
"grad_norm": 1.3968335334596063,
"learning_rate": 4.998767349211587e-06,
"loss": 0.6632,
"step": 315
},
{
"epoch": 0.55,
"grad_norm": 1.4727540225441376,
"learning_rate": 4.9982477272723404e-06,
"loss": 0.6997,
"step": 320
},
{
"epoch": 0.56,
"grad_norm": 2.0150510241724295,
"learning_rate": 4.997636989630012e-06,
"loss": 0.681,
"step": 325
},
{
"epoch": 0.57,
"grad_norm": 1.5786095690969104,
"learning_rate": 4.996935158559329e-06,
"loss": 0.7052,
"step": 330
},
{
"epoch": 0.58,
"grad_norm": 1.4930115902628418,
"learning_rate": 4.9961422596573674e-06,
"loss": 0.683,
"step": 335
},
{
"epoch": 0.59,
"grad_norm": 1.417488194165555,
"learning_rate": 4.995258321842611e-06,
"loss": 0.6724,
"step": 340
},
{
"epoch": 0.6,
"grad_norm": 1.5814802052424026,
"learning_rate": 4.994283377353902e-06,
"loss": 0.708,
"step": 345
},
{
"epoch": 0.61,
"grad_norm": 1.626327112384558,
"learning_rate": 4.993217461749266e-06,
"loss": 0.6769,
"step": 350
},
{
"epoch": 0.61,
"grad_norm": 1.4561720499145596,
"learning_rate": 4.992060613904611e-06,
"loss": 0.6827,
"step": 355
},
{
"epoch": 0.62,
"grad_norm": 1.4209579264748098,
"learning_rate": 4.990812876012311e-06,
"loss": 0.6843,
"step": 360
},
{
"epoch": 0.63,
"grad_norm": 1.3963649914734755,
"learning_rate": 4.989474293579669e-06,
"loss": 0.678,
"step": 365
},
{
"epoch": 0.64,
"grad_norm": 1.5340840894447103,
"learning_rate": 4.988044915427257e-06,
"loss": 0.6753,
"step": 370
},
{
"epoch": 0.65,
"grad_norm": 1.4322002446736155,
"learning_rate": 4.986524793687131e-06,
"loss": 0.6913,
"step": 375
},
{
"epoch": 0.66,
"grad_norm": 1.494759555714755,
"learning_rate": 4.984913983800936e-06,
"loss": 0.6601,
"step": 380
},
{
"epoch": 0.67,
"grad_norm": 1.5286688918221005,
"learning_rate": 4.983212544517878e-06,
"loss": 0.6708,
"step": 385
},
{
"epoch": 0.67,
"grad_norm": 1.52282481083145,
"learning_rate": 4.9814205378925895e-06,
"loss": 0.6582,
"step": 390
},
{
"epoch": 0.68,
"grad_norm": 1.6753513410463947,
"learning_rate": 4.979538029282856e-06,
"loss": 0.679,
"step": 395
},
{
"epoch": 0.69,
"grad_norm": 1.5212718767367754,
"learning_rate": 4.977565087347239e-06,
"loss": 0.667,
"step": 400
},
{
"epoch": 0.7,
"grad_norm": 1.4524081871963455,
"learning_rate": 4.975501784042572e-06,
"loss": 0.6707,
"step": 405
},
{
"epoch": 0.71,
"grad_norm": 1.7921858867602187,
"learning_rate": 4.973348194621329e-06,
"loss": 0.6741,
"step": 410
},
{
"epoch": 0.72,
"grad_norm": 1.5845517579748196,
"learning_rate": 4.9711043976288935e-06,
"loss": 0.6791,
"step": 415
},
{
"epoch": 0.73,
"grad_norm": 1.4343033765555364,
"learning_rate": 4.968770474900677e-06,
"loss": 0.6607,
"step": 420
},
{
"epoch": 0.74,
"grad_norm": 1.5295122616615218,
"learning_rate": 4.966346511559149e-06,
"loss": 0.6624,
"step": 425
},
{
"epoch": 0.74,
"grad_norm": 1.4896437285857649,
"learning_rate": 4.963832596010723e-06,
"loss": 0.6677,
"step": 430
},
{
"epoch": 0.75,
"grad_norm": 1.5044244825090314,
"learning_rate": 4.961228819942538e-06,
"loss": 0.6722,
"step": 435
},
{
"epoch": 0.76,
"grad_norm": 1.4992891216972632,
"learning_rate": 4.958535278319109e-06,
"loss": 0.6529,
"step": 440
},
{
"epoch": 0.77,
"grad_norm": 1.5676366345308723,
"learning_rate": 4.955752069378867e-06,
"loss": 0.6682,
"step": 445
},
{
"epoch": 0.78,
"grad_norm": 1.566761422297736,
"learning_rate": 4.952879294630577e-06,
"loss": 0.6674,
"step": 450
},
{
"epoch": 0.79,
"grad_norm": 1.593607408287518,
"learning_rate": 4.949917058849635e-06,
"loss": 0.6685,
"step": 455
},
{
"epoch": 0.8,
"grad_norm": 1.4655841734364505,
"learning_rate": 4.946865470074241e-06,
"loss": 0.6706,
"step": 460
},
{
"epoch": 0.8,
"grad_norm": 1.4680095746651978,
"learning_rate": 4.9437246396014695e-06,
"loss": 0.6503,
"step": 465
},
{
"epoch": 0.81,
"grad_norm": 1.6414158311618077,
"learning_rate": 4.940494681983201e-06,
"loss": 0.6483,
"step": 470
},
{
"epoch": 0.82,
"grad_norm": 1.647070417024019,
"learning_rate": 4.9371757150219475e-06,
"loss": 0.6708,
"step": 475
},
{
"epoch": 0.83,
"grad_norm": 1.5786850858034276,
"learning_rate": 4.933767859766557e-06,
"loss": 0.6406,
"step": 480
},
{
"epoch": 0.84,
"grad_norm": 2.3967395390307664,
"learning_rate": 4.930271240507797e-06,
"loss": 0.6469,
"step": 485
},
{
"epoch": 0.85,
"grad_norm": 1.863770908509767,
"learning_rate": 4.926685984773822e-06,
"loss": 0.6721,
"step": 490
},
{
"epoch": 0.86,
"grad_norm": 1.4091042690875146,
"learning_rate": 4.923012223325522e-06,
"loss": 0.6418,
"step": 495
},
{
"epoch": 0.87,
"grad_norm": 1.4211964673604418,
"learning_rate": 4.919250090151754e-06,
"loss": 0.6384,
"step": 500
},
{
"epoch": 0.87,
"grad_norm": 1.5350233939025446,
"learning_rate": 4.915399722464455e-06,
"loss": 0.6365,
"step": 505
},
{
"epoch": 0.88,
"grad_norm": 1.5608101804229912,
"learning_rate": 4.911461260693639e-06,
"loss": 0.6349,
"step": 510
},
{
"epoch": 0.89,
"grad_norm": 1.471894754845902,
"learning_rate": 4.907434848482268e-06,
"loss": 0.6324,
"step": 515
},
{
"epoch": 0.9,
"grad_norm": 1.5341990166130748,
"learning_rate": 4.903320632681026e-06,
"loss": 0.6434,
"step": 520
},
{
"epoch": 0.91,
"grad_norm": 1.4680803495559631,
"learning_rate": 4.899118763342952e-06,
"loss": 0.6466,
"step": 525
},
{
"epoch": 0.92,
"grad_norm": 1.786833703301667,
"learning_rate": 4.89482939371797e-06,
"loss": 0.6599,
"step": 530
},
{
"epoch": 0.93,
"grad_norm": 1.5749569225927729,
"learning_rate": 4.890452680247305e-06,
"loss": 0.641,
"step": 535
},
{
"epoch": 0.93,
"grad_norm": 1.3756860207273784,
"learning_rate": 4.885988782557771e-06,
"loss": 0.6372,
"step": 540
},
{
"epoch": 0.94,
"grad_norm": 1.6093553203322744,
"learning_rate": 4.88143786345595e-06,
"loss": 0.6053,
"step": 545
},
{
"epoch": 0.95,
"grad_norm": 1.417201052972172,
"learning_rate": 4.876800088922257e-06,
"loss": 0.6288,
"step": 550
},
{
"epoch": 0.96,
"grad_norm": 1.512172943891037,
"learning_rate": 4.872075628104884e-06,
"loss": 0.6352,
"step": 555
},
{
"epoch": 0.97,
"grad_norm": 1.6791678410000617,
"learning_rate": 4.867264653313633e-06,
"loss": 0.6473,
"step": 560
},
{
"epoch": 0.98,
"grad_norm": 1.4591202016380542,
"learning_rate": 4.8623673400136295e-06,
"loss": 0.6364,
"step": 565
},
{
"epoch": 0.99,
"grad_norm": 1.5298614351963449,
"learning_rate": 4.8573838668189225e-06,
"loss": 0.6295,
"step": 570
},
{
"epoch": 0.99,
"grad_norm": 1.5276546684651868,
"learning_rate": 4.852314415485973e-06,
"loss": 0.6331,
"step": 575
},
{
"epoch": 1.0,
"eval_loss": 0.622621476650238,
"eval_runtime": 330.1558,
"eval_samples_per_second": 22.78,
"eval_steps_per_second": 0.357,
"step": 578
},
{
"epoch": 1.0,
"grad_norm": 2.564141000897205,
"learning_rate": 4.847159170907022e-06,
"loss": 0.5882,
"step": 580
},
{
"epoch": 1.01,
"grad_norm": 1.702572539352643,
"learning_rate": 4.841918321103349e-06,
"loss": 0.5574,
"step": 585
},
{
"epoch": 1.02,
"grad_norm": 1.8542783824172295,
"learning_rate": 4.836592057218416e-06,
"loss": 0.5321,
"step": 590
},
{
"epoch": 1.03,
"grad_norm": 1.6168277685945358,
"learning_rate": 4.83118057351089e-06,
"loss": 0.5301,
"step": 595
},
{
"epoch": 1.04,
"grad_norm": 1.7040979617839176,
"learning_rate": 4.825684067347565e-06,
"loss": 0.5369,
"step": 600
},
{
"epoch": 1.05,
"grad_norm": 1.6826187176546037,
"learning_rate": 4.82010273919616e-06,
"loss": 0.521,
"step": 605
},
{
"epoch": 1.06,
"grad_norm": 1.7537542015921341,
"learning_rate": 4.8144367926180084e-06,
"loss": 0.5362,
"step": 610
},
{
"epoch": 1.06,
"grad_norm": 1.5596851176953166,
"learning_rate": 4.8086864342606345e-06,
"loss": 0.5421,
"step": 615
},
{
"epoch": 1.07,
"grad_norm": 1.612414749994373,
"learning_rate": 4.802851873850214e-06,
"loss": 0.5542,
"step": 620
},
{
"epoch": 1.08,
"grad_norm": 1.6043297699907004,
"learning_rate": 4.796933324183928e-06,
"loss": 0.526,
"step": 625
},
{
"epoch": 1.09,
"grad_norm": 1.5705483698698184,
"learning_rate": 4.790931001122202e-06,
"loss": 0.5267,
"step": 630
},
{
"epoch": 1.1,
"grad_norm": 1.6281592397386129,
"learning_rate": 4.784845123580829e-06,
"loss": 0.5367,
"step": 635
},
{
"epoch": 1.11,
"grad_norm": 1.5966059820228211,
"learning_rate": 4.77867591352299e-06,
"loss": 0.535,
"step": 640
},
{
"epoch": 1.12,
"grad_norm": 1.8510817844041834,
"learning_rate": 4.7724235959511566e-06,
"loss": 0.5495,
"step": 645
},
{
"epoch": 1.12,
"grad_norm": 1.8997498768876209,
"learning_rate": 4.766088398898884e-06,
"loss": 0.5403,
"step": 650
},
{
"epoch": 1.13,
"grad_norm": 1.7160489659947078,
"learning_rate": 4.759670553422494e-06,
"loss": 0.5347,
"step": 655
},
{
"epoch": 1.14,
"grad_norm": 1.7971855635226874,
"learning_rate": 4.7531702935926505e-06,
"loss": 0.537,
"step": 660
},
{
"epoch": 1.15,
"grad_norm": 1.7064772479008743,
"learning_rate": 4.74658785648582e-06,
"loss": 0.5579,
"step": 665
},
{
"epoch": 1.16,
"grad_norm": 1.4680823102221994,
"learning_rate": 4.7399234821756285e-06,
"loss": 0.5276,
"step": 670
},
{
"epoch": 1.17,
"grad_norm": 2.0480120228699814,
"learning_rate": 4.733177413724098e-06,
"loss": 0.5417,
"step": 675
},
{
"epoch": 1.18,
"grad_norm": 4.470377211022557,
"learning_rate": 4.726349897172791e-06,
"loss": 0.5292,
"step": 680
},
{
"epoch": 1.19,
"grad_norm": 1.7455236825499205,
"learning_rate": 4.71944118153383e-06,
"loss": 0.5486,
"step": 685
},
{
"epoch": 1.19,
"grad_norm": 1.6229800058296617,
"learning_rate": 4.7124515187808186e-06,
"loss": 0.5403,
"step": 690
},
{
"epoch": 1.2,
"grad_norm": 1.6120491331538838,
"learning_rate": 4.705381163839652e-06,
"loss": 0.5212,
"step": 695
},
{
"epoch": 1.21,
"grad_norm": 1.5707477382476385,
"learning_rate": 4.698230374579216e-06,
"loss": 0.513,
"step": 700
},
{
"epoch": 1.22,
"grad_norm": 1.8038956520870928,
"learning_rate": 4.690999411801987e-06,
"loss": 0.5505,
"step": 705
},
{
"epoch": 1.23,
"grad_norm": 1.4977964665899992,
"learning_rate": 4.683688539234515e-06,
"loss": 0.5447,
"step": 710
},
{
"epoch": 1.24,
"grad_norm": 1.64524294757822,
"learning_rate": 4.676298023517811e-06,
"loss": 0.5252,
"step": 715
},
{
"epoch": 1.25,
"grad_norm": 1.8439638298190486,
"learning_rate": 4.668828134197615e-06,
"loss": 0.5288,
"step": 720
},
{
"epoch": 1.25,
"grad_norm": 1.6551302109627803,
"learning_rate": 4.66127914371457e-06,
"loss": 0.5042,
"step": 725
},
{
"epoch": 1.26,
"grad_norm": 1.8432213805384519,
"learning_rate": 4.653651327394284e-06,
"loss": 0.5075,
"step": 730
},
{
"epoch": 1.27,
"grad_norm": 1.7991942501765485,
"learning_rate": 4.64594496343729e-06,
"loss": 0.5234,
"step": 735
},
{
"epoch": 1.28,
"grad_norm": 1.560979934022783,
"learning_rate": 4.638160332908896e-06,
"loss": 0.5076,
"step": 740
},
{
"epoch": 1.29,
"grad_norm": 1.5466509824875485,
"learning_rate": 4.630297719728939e-06,
"loss": 0.5204,
"step": 745
},
{
"epoch": 1.3,
"grad_norm": 1.7134330421496313,
"learning_rate": 4.622357410661425e-06,
"loss": 0.5136,
"step": 750
},
{
"epoch": 1.31,
"grad_norm": 1.9274332369383436,
"learning_rate": 4.6143396953040735e-06,
"loss": 0.5121,
"step": 755
},
{
"epoch": 1.31,
"grad_norm": 1.4937551003793812,
"learning_rate": 4.606244866077752e-06,
"loss": 0.527,
"step": 760
},
{
"epoch": 1.32,
"grad_norm": 2.658551374926485,
"learning_rate": 4.598073218215817e-06,
"loss": 0.5458,
"step": 765
},
{
"epoch": 1.33,
"grad_norm": 2.224040870079962,
"learning_rate": 4.5898250497533395e-06,
"loss": 0.5436,
"step": 770
},
{
"epoch": 1.34,
"grad_norm": 2.7319048521917635,
"learning_rate": 4.581500661516237e-06,
"loss": 0.5313,
"step": 775
},
{
"epoch": 1.35,
"grad_norm": 1.6998409225129496,
"learning_rate": 4.573100357110306e-06,
"loss": 0.5173,
"step": 780
},
{
"epoch": 1.36,
"grad_norm": 1.5581752222534802,
"learning_rate": 4.564624442910144e-06,
"loss": 0.5301,
"step": 785
},
{
"epoch": 1.37,
"grad_norm": 1.6962356595619175,
"learning_rate": 4.556073228047977e-06,
"loss": 0.5313,
"step": 790
},
{
"epoch": 1.38,
"grad_norm": 1.8219089406617814,
"learning_rate": 4.547447024402385e-06,
"loss": 0.5335,
"step": 795
},
{
"epoch": 1.38,
"grad_norm": 1.62352160937354,
"learning_rate": 4.5387461465869275e-06,
"loss": 0.5459,
"step": 800
},
{
"epoch": 1.39,
"grad_norm": 1.5257648113589735,
"learning_rate": 4.529970911938669e-06,
"loss": 0.5111,
"step": 805
},
{
"epoch": 1.4,
"grad_norm": 2.5973864011536416,
"learning_rate": 4.521121640506605e-06,
"loss": 0.5352,
"step": 810
},
{
"epoch": 1.41,
"grad_norm": 2.1347665405267384,
"learning_rate": 4.512198655039988e-06,
"loss": 0.5208,
"step": 815
},
{
"epoch": 1.42,
"grad_norm": 1.8537528730656059,
"learning_rate": 4.503202280976557e-06,
"loss": 0.5064,
"step": 820
},
{
"epoch": 1.43,
"grad_norm": 2.1504491301566784,
"learning_rate": 4.494132846430672e-06,
"loss": 0.5186,
"step": 825
},
{
"epoch": 1.44,
"grad_norm": 14.695605966854037,
"learning_rate": 4.4849906821813406e-06,
"loss": 0.5349,
"step": 830
},
{
"epoch": 1.44,
"grad_norm": 2.063212121669853,
"learning_rate": 4.475776121660156e-06,
"loss": 0.531,
"step": 835
},
{
"epoch": 1.45,
"grad_norm": 1.834832161110646,
"learning_rate": 4.466489500939138e-06,
"loss": 0.5143,
"step": 840
},
{
"epoch": 1.46,
"grad_norm": 1.7633276487477196,
"learning_rate": 4.457131158718478e-06,
"loss": 0.5331,
"step": 845
},
{
"epoch": 1.47,
"grad_norm": 1.809450693070927,
"learning_rate": 4.447701436314176e-06,
"loss": 0.5283,
"step": 850
},
{
"epoch": 1.48,
"grad_norm": 1.6601361692100707,
"learning_rate": 4.438200677645607e-06,
"loss": 0.5371,
"step": 855
},
{
"epoch": 1.49,
"grad_norm": 1.6494075536487005,
"learning_rate": 4.428629229222962e-06,
"loss": 0.5334,
"step": 860
},
{
"epoch": 1.5,
"grad_norm": 1.5995037205122475,
"learning_rate": 4.418987440134624e-06,
"loss": 0.5171,
"step": 865
},
{
"epoch": 1.51,
"grad_norm": 1.6352796020571934,
"learning_rate": 4.4092756620344246e-06,
"loss": 0.5148,
"step": 870
},
{
"epoch": 1.51,
"grad_norm": 7.045247439882991,
"learning_rate": 4.3994942491288275e-06,
"loss": 1.7556,
"step": 875
},
{
"epoch": 1.52,
"grad_norm": 6.128928116038949,
"learning_rate": 4.389643558164005e-06,
"loss": 0.5763,
"step": 880
},
{
"epoch": 1.53,
"grad_norm": 36.49282493029366,
"learning_rate": 4.379723948412828e-06,
"loss": 0.6,
"step": 885
},
{
"epoch": 1.54,
"grad_norm": 9.579002986726392,
"learning_rate": 4.369735781661763e-06,
"loss": 0.6819,
"step": 890
},
{
"epoch": 1.55,
"grad_norm": 12.266502746290532,
"learning_rate": 4.3596794221976755e-06,
"loss": 0.6053,
"step": 895
},
{
"epoch": 1.56,
"grad_norm": 3.9630821498228874,
"learning_rate": 4.349555236794546e-06,
"loss": 0.6229,
"step": 900
},
{
"epoch": 1.57,
"grad_norm": 3.4771323398284566,
"learning_rate": 4.339363594700093e-06,
"loss": 0.585,
"step": 905
},
{
"epoch": 1.57,
"grad_norm": 2.201457058274829,
"learning_rate": 4.329104867622302e-06,
"loss": 0.5611,
"step": 910
},
{
"epoch": 1.58,
"grad_norm": 1.9654853072206555,
"learning_rate": 4.3187794297158765e-06,
"loss": 0.5648,
"step": 915
},
{
"epoch": 1.59,
"grad_norm": 4.227758915564248,
"learning_rate": 4.308387657568582e-06,
"loss": 0.5481,
"step": 920
},
{
"epoch": 1.6,
"grad_norm": 1.7148546445696602,
"learning_rate": 4.297929930187519e-06,
"loss": 0.548,
"step": 925
},
{
"epoch": 1.61,
"grad_norm": 1.7282534502834466,
"learning_rate": 4.287406628985295e-06,
"loss": 0.5669,
"step": 930
},
{
"epoch": 1.62,
"grad_norm": 1.5604162135037978,
"learning_rate": 4.276818137766118e-06,
"loss": 0.5357,
"step": 935
},
{
"epoch": 1.63,
"grad_norm": 1.7385091646846929,
"learning_rate": 4.266164842711797e-06,
"loss": 0.5371,
"step": 940
},
{
"epoch": 1.63,
"grad_norm": 1.8757343048736053,
"learning_rate": 4.255447132367651e-06,
"loss": 0.5356,
"step": 945
},
{
"epoch": 1.64,
"grad_norm": 1.5821779705858339,
"learning_rate": 4.2446653976283485e-06,
"loss": 0.534,
"step": 950
},
{
"epoch": 1.65,
"grad_norm": 1.7181301166792315,
"learning_rate": 4.233820031723644e-06,
"loss": 0.5192,
"step": 955
},
{
"epoch": 1.66,
"grad_norm": 1.7620628413002903,
"learning_rate": 4.22291143020404e-06,
"loss": 0.527,
"step": 960
},
{
"epoch": 1.67,
"grad_norm": 1.588909360094358,
"learning_rate": 4.211939990926355e-06,
"loss": 0.5205,
"step": 965
},
{
"epoch": 1.68,
"grad_norm": 1.6838208492860955,
"learning_rate": 4.200906114039219e-06,
"loss": 0.5298,
"step": 970
},
{
"epoch": 1.69,
"grad_norm": 1.9081461502343906,
"learning_rate": 4.1898102019684765e-06,
"loss": 0.502,
"step": 975
},
{
"epoch": 1.7,
"grad_norm": 1.4961424837742658,
"learning_rate": 4.1786526594025065e-06,
"loss": 0.5372,
"step": 980
},
{
"epoch": 1.7,
"grad_norm": 1.6252683024976837,
"learning_rate": 4.167433893277473e-06,
"loss": 0.5269,
"step": 985
},
{
"epoch": 1.71,
"grad_norm": 1.5490581530552676,
"learning_rate": 4.156154312762467e-06,
"loss": 0.5285,
"step": 990
},
{
"epoch": 1.72,
"grad_norm": 1.8061470924061633,
"learning_rate": 4.1448143292446e-06,
"loss": 0.5503,
"step": 995
},
{
"epoch": 1.73,
"grad_norm": 1.5936238911397005,
"learning_rate": 4.133414356313988e-06,
"loss": 0.535,
"step": 1000
},
{
"epoch": 1.74,
"grad_norm": 1.5524395421750514,
"learning_rate": 4.121954809748675e-06,
"loss": 0.5278,
"step": 1005
},
{
"epoch": 1.75,
"grad_norm": 1.5740976269214233,
"learning_rate": 4.110436107499462e-06,
"loss": 0.5197,
"step": 1010
},
{
"epoch": 1.76,
"grad_norm": 1.5214013327972284,
"learning_rate": 4.098858669674667e-06,
"loss": 0.5202,
"step": 1015
},
{
"epoch": 1.76,
"grad_norm": 1.5237741216662357,
"learning_rate": 4.087222918524807e-06,
"loss": 0.5088,
"step": 1020
},
{
"epoch": 1.77,
"grad_norm": 1.6738078012815194,
"learning_rate": 4.07552927842719e-06,
"loss": 0.5166,
"step": 1025
},
{
"epoch": 1.78,
"grad_norm": 1.5517613536326162,
"learning_rate": 4.06377817587044e-06,
"loss": 0.4918,
"step": 1030
},
{
"epoch": 1.79,
"grad_norm": 1.431747331311476,
"learning_rate": 4.051970039438945e-06,
"loss": 0.5133,
"step": 1035
},
{
"epoch": 1.8,
"grad_norm": 1.4767269458087147,
"learning_rate": 4.0401052997972235e-06,
"loss": 0.5033,
"step": 1040
},
{
"epoch": 1.81,
"grad_norm": 1.5180384694427076,
"learning_rate": 4.028184389674216e-06,
"loss": 0.5058,
"step": 1045
},
{
"epoch": 1.82,
"grad_norm": 1.5889385254960862,
"learning_rate": 4.016207743847505e-06,
"loss": 0.515,
"step": 1050
},
{
"epoch": 1.83,
"grad_norm": 1.5532809579850755,
"learning_rate": 4.004175799127458e-06,
"loss": 0.5252,
"step": 1055
},
{
"epoch": 1.83,
"grad_norm": 1.5669745733483815,
"learning_rate": 3.992088994341292e-06,
"loss": 0.5237,
"step": 1060
},
{
"epoch": 1.84,
"grad_norm": 1.7971179594367128,
"learning_rate": 3.979947770317077e-06,
"loss": 0.5401,
"step": 1065
},
{
"epoch": 1.85,
"grad_norm": 1.6564666981215765,
"learning_rate": 3.967752569867645e-06,
"loss": 0.5315,
"step": 1070
},
{
"epoch": 1.86,
"grad_norm": 1.596740218503028,
"learning_rate": 3.955503837774458e-06,
"loss": 0.5124,
"step": 1075
},
{
"epoch": 1.87,
"grad_norm": 1.5508137957824288,
"learning_rate": 3.943202020771367e-06,
"loss": 0.5219,
"step": 1080
},
{
"epoch": 1.88,
"grad_norm": 1.7093498845192874,
"learning_rate": 3.9308475675283326e-06,
"loss": 0.5265,
"step": 1085
},
{
"epoch": 1.89,
"grad_norm": 1.5364211220041548,
"learning_rate": 3.9184409286350535e-06,
"loss": 0.5249,
"step": 1090
},
{
"epoch": 1.89,
"grad_norm": 1.588131611670702,
"learning_rate": 3.905982556584537e-06,
"loss": 0.5132,
"step": 1095
},
{
"epoch": 1.9,
"grad_norm": 1.4898401463528494,
"learning_rate": 3.893472905756593e-06,
"loss": 0.5241,
"step": 1100
},
{
"epoch": 1.91,
"grad_norm": 1.655966866153354,
"learning_rate": 3.880912432401265e-06,
"loss": 0.5098,
"step": 1105
},
{
"epoch": 1.92,
"grad_norm": 1.4977265046553758,
"learning_rate": 3.868301594622185e-06,
"loss": 0.5189,
"step": 1110
},
{
"epoch": 1.93,
"grad_norm": 1.45173341143097,
"learning_rate": 3.855640852359869e-06,
"loss": 0.4974,
"step": 1115
},
{
"epoch": 1.94,
"grad_norm": 1.8226392308673234,
"learning_rate": 3.842930667374945e-06,
"loss": 0.5122,
"step": 1120
},
{
"epoch": 1.95,
"grad_norm": 1.479895371145679,
"learning_rate": 3.830171503231302e-06,
"loss": 0.5077,
"step": 1125
},
{
"epoch": 1.96,
"grad_norm": 1.8170301741742978,
"learning_rate": 3.817363825279195e-06,
"loss": 0.5174,
"step": 1130
},
{
"epoch": 1.96,
"grad_norm": 1.6039721848461628,
"learning_rate": 3.8045081006382668e-06,
"loss": 0.5145,
"step": 1135
},
{
"epoch": 1.97,
"grad_norm": 2.198046435353487,
"learning_rate": 3.7916047981805058e-06,
"loss": 0.507,
"step": 1140
},
{
"epoch": 1.98,
"grad_norm": 2.538669249099628,
"learning_rate": 3.778654388513159e-06,
"loss": 0.4926,
"step": 1145
},
{
"epoch": 1.99,
"grad_norm": 1.490519700011853,
"learning_rate": 3.765657343961553e-06,
"loss": 0.5091,
"step": 1150
},
{
"epoch": 2.0,
"grad_norm": 1.609088164054738,
"learning_rate": 3.7526141385518794e-06,
"loss": 0.51,
"step": 1155
},
{
"epoch": 2.0,
"eval_loss": 0.5023414492607117,
"eval_runtime": 329.8071,
"eval_samples_per_second": 22.804,
"eval_steps_per_second": 0.358,
"step": 1156
},
{
"epoch": 2.01,
"grad_norm": 2.3682882817547433,
"learning_rate": 3.739525247993901e-06,
"loss": 0.4345,
"step": 1160
},
{
"epoch": 2.02,
"grad_norm": 2.198186338081821,
"learning_rate": 3.7263911496636003e-06,
"loss": 0.4221,
"step": 1165
},
{
"epoch": 2.02,
"grad_norm": 1.9622396068630974,
"learning_rate": 3.71321232258577e-06,
"loss": 0.4166,
"step": 1170
},
{
"epoch": 2.03,
"grad_norm": 1.9405841953564362,
"learning_rate": 3.6999892474165477e-06,
"loss": 0.4124,
"step": 1175
},
{
"epoch": 2.04,
"grad_norm": 1.6911138242049408,
"learning_rate": 3.6867224064258754e-06,
"loss": 0.3999,
"step": 1180
},
{
"epoch": 2.05,
"grad_norm": 1.805794848826172,
"learning_rate": 3.673412283479916e-06,
"loss": 0.3923,
"step": 1185
},
{
"epoch": 2.06,
"grad_norm": 1.7227193868132855,
"learning_rate": 3.660059364023409e-06,
"loss": 0.3922,
"step": 1190
},
{
"epoch": 2.07,
"grad_norm": 1.5633624388497611,
"learning_rate": 3.6466641350619564e-06,
"loss": 0.4081,
"step": 1195
},
{
"epoch": 2.08,
"grad_norm": 1.6049499642389522,
"learning_rate": 3.6332270851442697e-06,
"loss": 0.3988,
"step": 1200
},
{
"epoch": 2.08,
"grad_norm": 1.6302545860912692,
"learning_rate": 3.619748704344346e-06,
"loss": 0.4153,
"step": 1205
},
{
"epoch": 2.09,
"grad_norm": 1.893580997382411,
"learning_rate": 3.6062294842435958e-06,
"loss": 0.4118,
"step": 1210
},
{
"epoch": 2.1,
"grad_norm": 1.591008467389523,
"learning_rate": 3.5926699179129156e-06,
"loss": 0.3967,
"step": 1215
},
{
"epoch": 2.11,
"grad_norm": 1.8812505523353902,
"learning_rate": 3.579070499894703e-06,
"loss": 0.4105,
"step": 1220
},
{
"epoch": 2.12,
"grad_norm": 1.7312982189755863,
"learning_rate": 3.5654317261848182e-06,
"loss": 0.4088,
"step": 1225
},
{
"epoch": 2.13,
"grad_norm": 1.715794291601741,
"learning_rate": 3.5517540942144985e-06,
"loss": 0.3837,
"step": 1230
},
{
"epoch": 2.14,
"grad_norm": 1.571536156806173,
"learning_rate": 3.538038102832213e-06,
"loss": 0.4047,
"step": 1235
},
{
"epoch": 2.15,
"grad_norm": 1.6926403850601441,
"learning_rate": 3.5242842522854687e-06,
"loss": 0.4097,
"step": 1240
},
{
"epoch": 2.15,
"grad_norm": 1.5865520678177705,
"learning_rate": 3.5104930442025665e-06,
"loss": 0.3988,
"step": 1245
},
{
"epoch": 2.16,
"grad_norm": 1.6304875271885666,
"learning_rate": 3.4966649815743055e-06,
"loss": 0.4002,
"step": 1250
},
{
"epoch": 2.17,
"grad_norm": 1.5709135265237464,
"learning_rate": 3.4828005687356382e-06,
"loss": 0.4115,
"step": 1255
},
{
"epoch": 2.18,
"grad_norm": 1.6424713964605273,
"learning_rate": 3.4689003113472754e-06,
"loss": 0.3978,
"step": 1260
},
{
"epoch": 2.19,
"grad_norm": 1.8859757363715237,
"learning_rate": 3.454964716377247e-06,
"loss": 0.4203,
"step": 1265
},
{
"epoch": 2.2,
"grad_norm": 1.637179971274298,
"learning_rate": 3.440994292082408e-06,
"loss": 0.3908,
"step": 1270
},
{
"epoch": 2.21,
"grad_norm": 25.023011080924313,
"learning_rate": 3.4269895479899023e-06,
"loss": 0.4038,
"step": 1275
},
{
"epoch": 2.21,
"grad_norm": 2.830979911281441,
"learning_rate": 3.412950994878582e-06,
"loss": 0.4047,
"step": 1280
},
{
"epoch": 2.22,
"grad_norm": 1.8263955076269602,
"learning_rate": 3.3988791447603763e-06,
"loss": 0.3963,
"step": 1285
},
{
"epoch": 2.23,
"grad_norm": 1.6584308792186484,
"learning_rate": 3.3847745108616157e-06,
"loss": 0.4049,
"step": 1290
},
{
"epoch": 2.24,
"grad_norm": 1.868836216780196,
"learning_rate": 3.3706376076043183e-06,
"loss": 0.4123,
"step": 1295
},
{
"epoch": 2.25,
"grad_norm": 1.704922866865318,
"learning_rate": 3.356468950587424e-06,
"loss": 0.3919,
"step": 1300
},
{
"epoch": 2.26,
"grad_norm": 1.8511972094459495,
"learning_rate": 3.3422690565679895e-06,
"loss": 0.4118,
"step": 1305
},
{
"epoch": 2.27,
"grad_norm": 1.5826425092029799,
"learning_rate": 3.3280384434423447e-06,
"loss": 0.4072,
"step": 1310
},
{
"epoch": 2.28,
"grad_norm": 2.8458291903299555,
"learning_rate": 3.3137776302272017e-06,
"loss": 0.4044,
"step": 1315
},
{
"epoch": 2.28,
"grad_norm": 2.696905442165675,
"learning_rate": 3.2994871370407242e-06,
"loss": 0.405,
"step": 1320
},
{
"epoch": 2.29,
"grad_norm": 1.8719365939252683,
"learning_rate": 3.2851674850835615e-06,
"loss": 0.4091,
"step": 1325
},
{
"epoch": 2.3,
"grad_norm": 3.602956750004364,
"learning_rate": 3.2708191966198344e-06,
"loss": 0.3845,
"step": 1330
},
{
"epoch": 2.31,
"grad_norm": 1.7107089177149095,
"learning_rate": 3.2564427949580907e-06,
"loss": 0.4128,
"step": 1335
},
{
"epoch": 2.32,
"grad_norm": 1.782125003384775,
"learning_rate": 3.2420388044322204e-06,
"loss": 0.3957,
"step": 1340
},
{
"epoch": 2.33,
"grad_norm": 1.7157313501617861,
"learning_rate": 3.2276077503823276e-06,
"loss": 0.4164,
"step": 1345
},
{
"epoch": 2.34,
"grad_norm": 1.6189210159528598,
"learning_rate": 3.213150159135575e-06,
"loss": 0.4017,
"step": 1350
},
{
"epoch": 2.34,
"grad_norm": 1.7965605988243851,
"learning_rate": 3.198666557986984e-06,
"loss": 0.3995,
"step": 1355
},
{
"epoch": 2.35,
"grad_norm": 1.6750382286037944,
"learning_rate": 3.184157475180208e-06,
"loss": 0.4065,
"step": 1360
},
{
"epoch": 2.36,
"grad_norm": 1.7466143412529116,
"learning_rate": 3.169623439888259e-06,
"loss": 0.3913,
"step": 1365
},
{
"epoch": 2.37,
"grad_norm": 1.7094098689820845,
"learning_rate": 3.1550649821942165e-06,
"loss": 0.4055,
"step": 1370
},
{
"epoch": 2.38,
"grad_norm": 1.6546779771359428,
"learning_rate": 3.1404826330718884e-06,
"loss": 0.4072,
"step": 1375
},
{
"epoch": 2.39,
"grad_norm": 1.5822130245289208,
"learning_rate": 3.1258769243664457e-06,
"loss": 0.396,
"step": 1380
},
{
"epoch": 2.4,
"grad_norm": 1.6175930524108444,
"learning_rate": 3.1112483887750285e-06,
"loss": 0.4009,
"step": 1385
},
{
"epoch": 2.4,
"grad_norm": 1.6572189285469252,
"learning_rate": 3.0965975598273147e-06,
"loss": 0.4135,
"step": 1390
},
{
"epoch": 2.41,
"grad_norm": 1.7351755497284653,
"learning_rate": 3.0819249718660625e-06,
"loss": 0.4077,
"step": 1395
},
{
"epoch": 2.42,
"grad_norm": 1.722331718347925,
"learning_rate": 3.0672311600276205e-06,
"loss": 0.3961,
"step": 1400
},
{
"epoch": 2.43,
"grad_norm": 2.8787903703341704,
"learning_rate": 3.052516660222412e-06,
"loss": 0.4013,
"step": 1405
},
{
"epoch": 2.44,
"grad_norm": 1.7609799996300464,
"learning_rate": 3.0377820091153894e-06,
"loss": 0.4182,
"step": 1410
},
{
"epoch": 2.45,
"grad_norm": 1.6624761502383696,
"learning_rate": 3.0230277441064576e-06,
"loss": 0.4174,
"step": 1415
},
{
"epoch": 2.46,
"grad_norm": 1.6422107924968583,
"learning_rate": 3.0082544033108792e-06,
"loss": 0.3968,
"step": 1420
},
{
"epoch": 2.47,
"grad_norm": 1.6979969793176557,
"learning_rate": 2.993462525539646e-06,
"loss": 0.3893,
"step": 1425
},
{
"epoch": 2.47,
"grad_norm": 2.0089020705022027,
"learning_rate": 2.9786526502798245e-06,
"loss": 0.401,
"step": 1430
},
{
"epoch": 2.48,
"grad_norm": 2.4328077084567035,
"learning_rate": 2.9638253176748868e-06,
"loss": 0.4041,
"step": 1435
},
{
"epoch": 2.49,
"grad_norm": 2.1658392118895096,
"learning_rate": 2.948981068505004e-06,
"loss": 0.4152,
"step": 1440
},
{
"epoch": 2.5,
"grad_norm": 1.9075735219009673,
"learning_rate": 2.9341204441673267e-06,
"loss": 0.4019,
"step": 1445
},
{
"epoch": 2.51,
"grad_norm": 79.58152669185955,
"learning_rate": 2.9192439866562363e-06,
"loss": 0.3986,
"step": 1450
},
{
"epoch": 2.52,
"grad_norm": 1.9783510669105098,
"learning_rate": 2.904352238543582e-06,
"loss": 0.3977,
"step": 1455
},
{
"epoch": 2.53,
"grad_norm": 2.0408912753861452,
"learning_rate": 2.889445742958886e-06,
"loss": 0.395,
"step": 1460
},
{
"epoch": 2.53,
"grad_norm": 2.1287347922745563,
"learning_rate": 2.8745250435695433e-06,
"loss": 0.3944,
"step": 1465
},
{
"epoch": 2.54,
"grad_norm": 1.9220277314654948,
"learning_rate": 2.859590684560982e-06,
"loss": 0.4066,
"step": 1470
},
{
"epoch": 2.55,
"grad_norm": 1.7440202984118327,
"learning_rate": 2.844643210616824e-06,
"loss": 0.3933,
"step": 1475
},
{
"epoch": 2.56,
"grad_norm": 4.515419055182269,
"learning_rate": 2.8296831668990195e-06,
"loss": 0.408,
"step": 1480
},
{
"epoch": 2.57,
"grad_norm": 1.804786297673,
"learning_rate": 2.8147110990279576e-06,
"loss": 0.3895,
"step": 1485
},
{
"epoch": 2.58,
"grad_norm": 1.8010185425321945,
"learning_rate": 2.7997275530625715e-06,
"loss": 0.4007,
"step": 1490
},
{
"epoch": 2.59,
"grad_norm": 1.7762627094164511,
"learning_rate": 2.7847330754804257e-06,
"loss": 0.4047,
"step": 1495
},
{
"epoch": 2.6,
"grad_norm": 1.713415455379404,
"learning_rate": 2.7697282131577752e-06,
"loss": 0.3981,
"step": 1500
},
{
"epoch": 2.6,
"grad_norm": 1.8140754605201832,
"learning_rate": 2.754713513349627e-06,
"loss": 0.3976,
"step": 1505
},
{
"epoch": 2.61,
"grad_norm": 1.925171255479977,
"learning_rate": 2.7396895236697833e-06,
"loss": 0.3839,
"step": 1510
},
{
"epoch": 2.62,
"grad_norm": 1.6916338723474953,
"learning_rate": 2.724656792070861e-06,
"loss": 0.4009,
"step": 1515
},
{
"epoch": 2.63,
"grad_norm": 1.7468445915903061,
"learning_rate": 2.7096158668243105e-06,
"loss": 0.3954,
"step": 1520
},
{
"epoch": 2.64,
"grad_norm": 1.7964976583564236,
"learning_rate": 2.6945672965004236e-06,
"loss": 0.3943,
"step": 1525
},
{
"epoch": 2.65,
"grad_norm": 1.6933144469510995,
"learning_rate": 2.679511629948319e-06,
"loss": 0.3958,
"step": 1530
},
{
"epoch": 2.66,
"grad_norm": 1.7617290180955545,
"learning_rate": 2.664449416275928e-06,
"loss": 0.3926,
"step": 1535
},
{
"epoch": 2.66,
"grad_norm": 1.6597635128518158,
"learning_rate": 2.64938120482997e-06,
"loss": 0.4052,
"step": 1540
},
{
"epoch": 2.67,
"grad_norm": 2.1579541798227213,
"learning_rate": 2.6343075451759098e-06,
"loss": 0.3962,
"step": 1545
},
{
"epoch": 2.68,
"grad_norm": 1.6871485325523872,
"learning_rate": 2.619228987077923e-06,
"loss": 0.3975,
"step": 1550
},
{
"epoch": 2.69,
"grad_norm": 2.0167255639712436,
"learning_rate": 2.604146080478839e-06,
"loss": 0.3794,
"step": 1555
},
{
"epoch": 2.7,
"grad_norm": 1.8181334973476126,
"learning_rate": 2.5890593754800826e-06,
"loss": 0.4061,
"step": 1560
},
{
"epoch": 2.71,
"grad_norm": 1.753744913485219,
"learning_rate": 2.573969422321617e-06,
"loss": 0.386,
"step": 1565
},
{
"epoch": 2.72,
"grad_norm": 1.7053212636407111,
"learning_rate": 2.558876771361869e-06,
"loss": 0.3835,
"step": 1570
},
{
"epoch": 2.72,
"grad_norm": 2.0262024426335903,
"learning_rate": 2.5437819730576608e-06,
"loss": 0.3925,
"step": 1575
},
{
"epoch": 2.73,
"grad_norm": 1.7593848195979234,
"learning_rate": 2.528685577944131e-06,
"loss": 0.3856,
"step": 1580
},
{
"epoch": 2.74,
"grad_norm": 1.7817581606319834,
"learning_rate": 2.5135881366146585e-06,
"loss": 0.379,
"step": 1585
},
{
"epoch": 2.75,
"grad_norm": 1.9582822708483771,
"learning_rate": 2.498490199700778e-06,
"loss": 0.4028,
"step": 1590
},
{
"epoch": 2.76,
"grad_norm": 1.737025758915202,
"learning_rate": 2.4833923178520984e-06,
"loss": 0.3918,
"step": 1595
},
{
"epoch": 2.77,
"grad_norm": 1.8457649877934572,
"learning_rate": 2.468295041716223e-06,
"loss": 0.3869,
"step": 1600
},
{
"epoch": 2.78,
"grad_norm": 1.7058419645908771,
"learning_rate": 2.4531989219186626e-06,
"loss": 0.3969,
"step": 1605
},
{
"epoch": 2.79,
"grad_norm": 1.7424467938884878,
"learning_rate": 2.4381045090427525e-06,
"loss": 0.3944,
"step": 1610
},
{
"epoch": 2.79,
"grad_norm": 1.7229435032812541,
"learning_rate": 2.4230123536095746e-06,
"loss": 0.392,
"step": 1615
},
{
"epoch": 2.8,
"grad_norm": 1.6940195802109876,
"learning_rate": 2.4079230060578787e-06,
"loss": 0.3921,
"step": 1620
},
{
"epoch": 2.81,
"grad_norm": 1.926203653659556,
"learning_rate": 2.3928370167240037e-06,
"loss": 0.3772,
"step": 1625
},
{
"epoch": 2.82,
"grad_norm": 2.3907546286805443,
"learning_rate": 2.3777549358218105e-06,
"loss": 0.3829,
"step": 1630
},
{
"epoch": 2.83,
"grad_norm": 1.7061727789752485,
"learning_rate": 2.3626773134226114e-06,
"loss": 0.3821,
"step": 1635
},
{
"epoch": 2.84,
"grad_norm": 4.31283876155316,
"learning_rate": 2.3476046994351085e-06,
"loss": 0.3885,
"step": 1640
},
{
"epoch": 2.85,
"grad_norm": 1.815971909695708,
"learning_rate": 2.332537643585337e-06,
"loss": 0.4065,
"step": 1645
},
{
"epoch": 2.85,
"grad_norm": 1.8273993097485424,
"learning_rate": 2.3174766953966178e-06,
"loss": 0.403,
"step": 1650
},
{
"epoch": 2.86,
"grad_norm": 2.3245389527585347,
"learning_rate": 2.302422404169516e-06,
"loss": 0.3813,
"step": 1655
},
{
"epoch": 2.87,
"grad_norm": 2.5008928168129305,
"learning_rate": 2.2873753189618016e-06,
"loss": 0.3958,
"step": 1660
},
{
"epoch": 2.88,
"grad_norm": 1.7982648722278374,
"learning_rate": 2.272335988568429e-06,
"loss": 0.3939,
"step": 1665
},
{
"epoch": 2.89,
"grad_norm": 1.7113353706744565,
"learning_rate": 2.257304961501521e-06,
"loss": 0.3831,
"step": 1670
},
{
"epoch": 2.9,
"grad_norm": 1.8571633665118874,
"learning_rate": 2.242282785970361e-06,
"loss": 0.4036,
"step": 1675
},
{
"epoch": 2.91,
"grad_norm": 1.6973636101504117,
"learning_rate": 2.2272700098614022e-06,
"loss": 0.3897,
"step": 1680
},
{
"epoch": 2.92,
"grad_norm": 1.8121544488710108,
"learning_rate": 2.2122671807182847e-06,
"loss": 0.3997,
"step": 1685
},
{
"epoch": 2.92,
"grad_norm": 1.7018502297212512,
"learning_rate": 2.1972748457218608e-06,
"loss": 0.3907,
"step": 1690
},
{
"epoch": 2.93,
"grad_norm": 1.5654175282292644,
"learning_rate": 2.182293551670245e-06,
"loss": 0.3849,
"step": 1695
},
{
"epoch": 2.94,
"grad_norm": 1.8131129781015132,
"learning_rate": 2.167323844958867e-06,
"loss": 0.3991,
"step": 1700
},
{
"epoch": 2.95,
"grad_norm": 1.6556951661263792,
"learning_rate": 2.152366271560543e-06,
"loss": 0.3876,
"step": 1705
},
{
"epoch": 2.96,
"grad_norm": 1.6927349202503557,
"learning_rate": 2.1374213770055692e-06,
"loss": 0.3942,
"step": 1710
},
{
"epoch": 2.97,
"grad_norm": 1.8331415491875973,
"learning_rate": 2.122489706361818e-06,
"loss": 0.3791,
"step": 1715
},
{
"epoch": 2.98,
"grad_norm": 1.8546912165375895,
"learning_rate": 2.107571804214861e-06,
"loss": 0.3863,
"step": 1720
},
{
"epoch": 2.98,
"grad_norm": 1.8701558336845174,
"learning_rate": 2.0926682146481094e-06,
"loss": 0.3918,
"step": 1725
},
{
"epoch": 2.99,
"grad_norm": 1.6438729463581823,
"learning_rate": 2.077779481222967e-06,
"loss": 0.4058,
"step": 1730
},
{
"epoch": 3.0,
"eval_loss": 0.41721072793006897,
"eval_runtime": 329.8932,
"eval_samples_per_second": 22.798,
"eval_steps_per_second": 0.358,
"step": 1734
},
{
"epoch": 3.0,
"grad_norm": 3.362012279132523,
"learning_rate": 2.0629061469590046e-06,
"loss": 0.3616,
"step": 1735
},
{
"epoch": 3.01,
"grad_norm": 2.405517474268548,
"learning_rate": 2.048048754314162e-06,
"loss": 0.3281,
"step": 1740
},
{
"epoch": 3.02,
"grad_norm": 2.252602528540252,
"learning_rate": 2.0332078451649557e-06,
"loss": 0.3167,
"step": 1745
},
{
"epoch": 3.03,
"grad_norm": 1.8994332344961664,
"learning_rate": 2.0183839607867177e-06,
"loss": 0.3061,
"step": 1750
},
{
"epoch": 3.04,
"grad_norm": 2.0025910547043138,
"learning_rate": 2.003577641833859e-06,
"loss": 0.3205,
"step": 1755
},
{
"epoch": 3.04,
"grad_norm": 1.8748618784934366,
"learning_rate": 1.988789428320141e-06,
"loss": 0.3101,
"step": 1760
},
{
"epoch": 3.05,
"grad_norm": 1.8182888367049812,
"learning_rate": 1.9740198595989935e-06,
"loss": 0.3071,
"step": 1765
},
{
"epoch": 3.06,
"grad_norm": 1.9091898687921862,
"learning_rate": 1.959269474343832e-06,
"loss": 0.308,
"step": 1770
},
{
"epoch": 3.07,
"grad_norm": 1.7973778535481801,
"learning_rate": 1.9445388105284157e-06,
"loss": 0.3031,
"step": 1775
},
{
"epoch": 3.08,
"grad_norm": 1.7575417892395457,
"learning_rate": 1.9298284054072278e-06,
"loss": 0.3063,
"step": 1780
},
{
"epoch": 3.09,
"grad_norm": 1.8133263463584137,
"learning_rate": 1.9151387954958792e-06,
"loss": 0.3008,
"step": 1785
},
{
"epoch": 3.1,
"grad_norm": 1.892423366282607,
"learning_rate": 1.9004705165515403e-06,
"loss": 0.33,
"step": 1790
},
{
"epoch": 3.11,
"grad_norm": 3.4152436955589596,
"learning_rate": 1.885824103553404e-06,
"loss": 0.2967,
"step": 1795
},
{
"epoch": 3.11,
"grad_norm": 1.766725631679472,
"learning_rate": 1.8712000906831684e-06,
"loss": 0.3134,
"step": 1800
},
{
"epoch": 3.12,
"grad_norm": 1.7559925086788295,
"learning_rate": 1.8565990113055615e-06,
"loss": 0.3117,
"step": 1805
},
{
"epoch": 3.13,
"grad_norm": 1.7744634591819772,
"learning_rate": 1.8420213979488805e-06,
"loss": 0.3188,
"step": 1810
},
{
"epoch": 3.14,
"grad_norm": 1.7585191953453494,
"learning_rate": 1.8274677822855775e-06,
"loss": 0.3052,
"step": 1815
},
{
"epoch": 3.15,
"grad_norm": 1.8690590027859153,
"learning_rate": 1.81293869511286e-06,
"loss": 0.3113,
"step": 1820
},
{
"epoch": 3.16,
"grad_norm": 1.8668392799746427,
"learning_rate": 1.7984346663333415e-06,
"loss": 0.3115,
"step": 1825
},
{
"epoch": 3.17,
"grad_norm": 1.8555764810481752,
"learning_rate": 1.783956224935705e-06,
"loss": 0.3198,
"step": 1830
},
{
"epoch": 3.17,
"grad_norm": 1.7812617913871858,
"learning_rate": 1.7695038989754179e-06,
"loss": 0.3145,
"step": 1835
},
{
"epoch": 3.18,
"grad_norm": 1.7431068806527559,
"learning_rate": 1.7550782155554664e-06,
"loss": 0.2926,
"step": 1840
},
{
"epoch": 3.19,
"grad_norm": 1.8768744877192187,
"learning_rate": 1.7406797008071351e-06,
"loss": 0.3113,
"step": 1845
},
{
"epoch": 3.2,
"grad_norm": 1.8667603957089496,
"learning_rate": 1.7263088798708206e-06,
"loss": 0.3179,
"step": 1850
},
{
"epoch": 3.21,
"grad_norm": 2.1299587342389072,
"learning_rate": 1.7119662768768694e-06,
"loss": 0.3171,
"step": 1855
},
{
"epoch": 3.22,
"grad_norm": 1.7486090043188862,
"learning_rate": 1.6976524149264707e-06,
"loss": 0.3364,
"step": 1860
},
{
"epoch": 3.23,
"grad_norm": 2.728922445411206,
"learning_rate": 1.6833678160725746e-06,
"loss": 0.3104,
"step": 1865
},
{
"epoch": 3.24,
"grad_norm": 1.662819449057055,
"learning_rate": 1.6691130013008514e-06,
"loss": 0.3128,
"step": 1870
},
{
"epoch": 3.24,
"grad_norm": 1.944417319707403,
"learning_rate": 1.654888490510691e-06,
"loss": 0.3136,
"step": 1875
},
{
"epoch": 3.25,
"grad_norm": 2.543538356158386,
"learning_rate": 1.6406948024962438e-06,
"loss": 0.3139,
"step": 1880
},
{
"epoch": 3.26,
"grad_norm": 1.8386914789973714,
"learning_rate": 1.6265324549274924e-06,
"loss": 0.3164,
"step": 1885
},
{
"epoch": 3.27,
"grad_norm": 1.8542366087207915,
"learning_rate": 1.6124019643313783e-06,
"loss": 0.3086,
"step": 1890
},
{
"epoch": 3.28,
"grad_norm": 1.774546610521189,
"learning_rate": 1.59830384607296e-06,
"loss": 0.3128,
"step": 1895
},
{
"epoch": 3.29,
"grad_norm": 2.1592993916723007,
"learning_rate": 1.5842386143366157e-06,
"loss": 0.3102,
"step": 1900
},
{
"epoch": 3.3,
"grad_norm": 1.8046821102537511,
"learning_rate": 1.570206782107294e-06,
"loss": 0.3156,
"step": 1905
},
{
"epoch": 3.3,
"grad_norm": 1.873775956948585,
"learning_rate": 1.5562088611518007e-06,
"loss": 0.3093,
"step": 1910
},
{
"epoch": 3.31,
"grad_norm": 1.8924372370403861,
"learning_rate": 1.542245362000134e-06,
"loss": 0.3027,
"step": 1915
},
{
"epoch": 3.32,
"grad_norm": 1.879324682842581,
"learning_rate": 1.528316793926866e-06,
"loss": 0.3263,
"step": 1920
},
{
"epoch": 3.33,
"grad_norm": 2.1352207068371465,
"learning_rate": 1.5144236649325692e-06,
"loss": 0.3163,
"step": 1925
},
{
"epoch": 3.34,
"grad_norm": 2.164953693631762,
"learning_rate": 1.5005664817252844e-06,
"loss": 0.3104,
"step": 1930
},
{
"epoch": 3.35,
"grad_norm": 1.6787075265475386,
"learning_rate": 1.486745749702047e-06,
"loss": 0.3004,
"step": 1935
},
{
"epoch": 3.36,
"grad_norm": 2.3266338701503537,
"learning_rate": 1.4729619729304486e-06,
"loss": 0.3084,
"step": 1940
},
{
"epoch": 3.37,
"grad_norm": 2.1291831085187827,
"learning_rate": 1.4592156541302542e-06,
"loss": 0.3212,
"step": 1945
},
{
"epoch": 3.37,
"grad_norm": 1.8834065519660157,
"learning_rate": 1.4455072946550688e-06,
"loss": 0.3018,
"step": 1950
},
{
"epoch": 3.38,
"grad_norm": 1.7558438161458612,
"learning_rate": 1.4318373944740485e-06,
"loss": 0.3151,
"step": 1955
},
{
"epoch": 3.39,
"grad_norm": 1.945827671485564,
"learning_rate": 1.4182064521536693e-06,
"loss": 0.3376,
"step": 1960
},
{
"epoch": 3.4,
"grad_norm": 1.807537024750888,
"learning_rate": 1.4046149648395451e-06,
"loss": 0.3275,
"step": 1965
},
{
"epoch": 3.41,
"grad_norm": 2.0683028288702885,
"learning_rate": 1.391063428238288e-06,
"loss": 0.3141,
"step": 1970
},
{
"epoch": 3.42,
"grad_norm": 2.0081489992818566,
"learning_rate": 1.3775523365994364e-06,
"loss": 0.3181,
"step": 1975
},
{
"epoch": 3.43,
"grad_norm": 2.056354755418753,
"learning_rate": 1.3640821826974272e-06,
"loss": 0.3203,
"step": 1980
},
{
"epoch": 3.43,
"grad_norm": 1.8074911366312743,
"learning_rate": 1.3506534578136184e-06,
"loss": 0.3056,
"step": 1985
},
{
"epoch": 3.44,
"grad_norm": 1.9405362029094402,
"learning_rate": 1.3372666517183813e-06,
"loss": 0.3145,
"step": 1990
},
{
"epoch": 3.45,
"grad_norm": 1.919499046536955,
"learning_rate": 1.323922252653228e-06,
"loss": 0.315,
"step": 1995
},
{
"epoch": 3.46,
"grad_norm": 1.9377990366808608,
"learning_rate": 1.3106207473130072e-06,
"loss": 0.3182,
"step": 2000
},
{
"epoch": 3.47,
"grad_norm": 2.342962414340334,
"learning_rate": 1.2973626208281568e-06,
"loss": 0.3166,
"step": 2005
},
{
"epoch": 3.48,
"grad_norm": 2.212340995208452,
"learning_rate": 1.284148356747007e-06,
"loss": 0.3141,
"step": 2010
},
{
"epoch": 3.49,
"grad_norm": 1.8649674715590283,
"learning_rate": 1.2709784370181424e-06,
"loss": 0.3077,
"step": 2015
},
{
"epoch": 3.49,
"grad_norm": 1.8611701311779156,
"learning_rate": 1.257853341972833e-06,
"loss": 0.3052,
"step": 2020
},
{
"epoch": 3.5,
"grad_norm": 1.7753559046449237,
"learning_rate": 1.244773550307506e-06,
"loss": 0.3109,
"step": 2025
},
{
"epoch": 3.51,
"grad_norm": 2.016998664762509,
"learning_rate": 1.23173953906629e-06,
"loss": 0.3023,
"step": 2030
},
{
"epoch": 3.52,
"grad_norm": 1.8857492837767753,
"learning_rate": 1.21875178362362e-06,
"loss": 0.3247,
"step": 2035
},
{
"epoch": 3.53,
"grad_norm": 1.868969694539886,
"learning_rate": 1.205810757666894e-06,
"loss": 0.3038,
"step": 2040
},
{
"epoch": 3.54,
"grad_norm": 1.8277877381221046,
"learning_rate": 1.1929169331792018e-06,
"loss": 0.3089,
"step": 2045
},
{
"epoch": 3.55,
"grad_norm": 1.7809098288999228,
"learning_rate": 1.1800707804221068e-06,
"loss": 0.3127,
"step": 2050
},
{
"epoch": 3.56,
"grad_norm": 1.9048483124043887,
"learning_rate": 1.1672727679184984e-06,
"loss": 0.3227,
"step": 2055
},
{
"epoch": 3.56,
"grad_norm": 1.7713895884274724,
"learning_rate": 1.154523362435499e-06,
"loss": 0.3149,
"step": 2060
},
{
"epoch": 3.57,
"grad_norm": 1.8725797518727274,
"learning_rate": 1.1418230289674456e-06,
"loss": 0.3021,
"step": 2065
},
{
"epoch": 3.58,
"grad_norm": 2.122666453139922,
"learning_rate": 1.1291722307189285e-06,
"loss": 0.3075,
"step": 2070
},
{
"epoch": 3.59,
"grad_norm": 1.9298160681079481,
"learning_rate": 1.1165714290878962e-06,
"loss": 0.3139,
"step": 2075
},
{
"epoch": 3.6,
"grad_norm": 1.8940463808973838,
"learning_rate": 1.1040210836488286e-06,
"loss": 0.3157,
"step": 2080
},
{
"epoch": 3.61,
"grad_norm": 2.061247706009593,
"learning_rate": 1.091521652135976e-06,
"loss": 0.3196,
"step": 2085
},
{
"epoch": 3.62,
"grad_norm": 1.811438510725072,
"learning_rate": 1.0790735904266616e-06,
"loss": 0.3148,
"step": 2090
},
{
"epoch": 3.62,
"grad_norm": 1.805396357161127,
"learning_rate": 1.0666773525246593e-06,
"loss": 0.3036,
"step": 2095
},
{
"epoch": 3.63,
"grad_norm": 2.045401978025098,
"learning_rate": 1.0543333905436325e-06,
"loss": 0.3132,
"step": 2100
},
{
"epoch": 3.64,
"grad_norm": 1.9050403870259225,
"learning_rate": 1.0420421546906458e-06,
"loss": 0.3149,
"step": 2105
},
{
"epoch": 3.65,
"grad_norm": 1.8742997867603652,
"learning_rate": 1.0298040932497436e-06,
"loss": 0.3289,
"step": 2110
},
{
"epoch": 3.66,
"grad_norm": 1.877012236122311,
"learning_rate": 1.0176196525656032e-06,
"loss": 0.3183,
"step": 2115
},
{
"epoch": 3.67,
"grad_norm": 1.873184037966506,
"learning_rate": 1.0054892770272506e-06,
"loss": 0.3092,
"step": 2120
},
{
"epoch": 3.68,
"grad_norm": 2.0504833760417434,
"learning_rate": 9.934134090518593e-07,
"loss": 0.3277,
"step": 2125
},
{
"epoch": 3.69,
"grad_norm": 1.9884415960242454,
"learning_rate": 9.813924890686107e-07,
"loss": 0.313,
"step": 2130
},
{
"epoch": 3.69,
"grad_norm": 1.83107837229764,
"learning_rate": 9.694269555026315e-07,
"loss": 0.3075,
"step": 2135
},
{
"epoch": 3.7,
"grad_norm": 1.764340908962033,
"learning_rate": 9.575172447590032e-07,
"loss": 0.3193,
"step": 2140
},
{
"epoch": 3.71,
"grad_norm": 1.8477826895669585,
"learning_rate": 9.456637912068467e-07,
"loss": 0.3125,
"step": 2145
},
{
"epoch": 3.72,
"grad_norm": 1.6877441904666532,
"learning_rate": 9.338670271634778e-07,
"loss": 0.308,
"step": 2150
},
{
"epoch": 3.73,
"grad_norm": 1.729827667583889,
"learning_rate": 9.221273828786431e-07,
"loss": 0.3002,
"step": 2155
},
{
"epoch": 3.74,
"grad_norm": 1.8639866274721397,
"learning_rate": 9.104452865188254e-07,
"loss": 0.3168,
"step": 2160
},
{
"epoch": 3.75,
"grad_norm": 1.8584083780028213,
"learning_rate": 8.988211641516289e-07,
"loss": 0.3214,
"step": 2165
},
{
"epoch": 3.75,
"grad_norm": 1.8211317436181427,
"learning_rate": 8.872554397302388e-07,
"loss": 0.3109,
"step": 2170
},
{
"epoch": 3.76,
"grad_norm": 1.8219334782565464,
"learning_rate": 8.757485350779609e-07,
"loss": 0.301,
"step": 2175
},
{
"epoch": 3.77,
"grad_norm": 1.9771362675742574,
"learning_rate": 8.64300869872832e-07,
"loss": 0.3048,
"step": 2180
},
{
"epoch": 3.78,
"grad_norm": 1.875073162601576,
"learning_rate": 8.529128616323224e-07,
"loss": 0.2992,
"step": 2185
},
{
"epoch": 3.79,
"grad_norm": 1.8832489255751728,
"learning_rate": 8.415849256980976e-07,
"loss": 0.3054,
"step": 2190
},
{
"epoch": 3.8,
"grad_norm": 10.069268574653966,
"learning_rate": 8.303174752208786e-07,
"loss": 0.3205,
"step": 2195
},
{
"epoch": 3.81,
"grad_norm": 1.8175623122049562,
"learning_rate": 8.191109211453688e-07,
"loss": 0.3166,
"step": 2200
},
{
"epoch": 3.81,
"grad_norm": 1.884935738343672,
"learning_rate": 8.079656721952686e-07,
"loss": 0.3199,
"step": 2205
},
{
"epoch": 3.82,
"grad_norm": 1.8325908870427998,
"learning_rate": 7.968821348583644e-07,
"loss": 0.3017,
"step": 2210
},
{
"epoch": 3.83,
"grad_norm": 1.8664284061304677,
"learning_rate": 7.858607133717117e-07,
"loss": 0.3139,
"step": 2215
},
{
"epoch": 3.84,
"grad_norm": 1.8379727282817748,
"learning_rate": 7.749018097068808e-07,
"loss": 0.3023,
"step": 2220
},
{
"epoch": 3.85,
"grad_norm": 1.7887589319537402,
"learning_rate": 7.640058235553044e-07,
"loss": 0.3019,
"step": 2225
},
{
"epoch": 3.86,
"grad_norm": 1.7307328404201077,
"learning_rate": 7.531731523136976e-07,
"loss": 0.3121,
"step": 2230
},
{
"epoch": 3.87,
"grad_norm": 1.8359406746662572,
"learning_rate": 7.424041910695631e-07,
"loss": 0.3223,
"step": 2235
},
{
"epoch": 3.88,
"grad_norm": 1.866144889812394,
"learning_rate": 7.316993325867827e-07,
"loss": 0.313,
"step": 2240
},
{
"epoch": 3.88,
"grad_norm": 1.8657860951686938,
"learning_rate": 7.210589672912935e-07,
"loss": 0.314,
"step": 2245
},
{
"epoch": 3.89,
"grad_norm": 1.9579670233637116,
"learning_rate": 7.104834832568438e-07,
"loss": 0.2997,
"step": 2250
},
{
"epoch": 3.9,
"grad_norm": 1.7626608239046955,
"learning_rate": 6.999732661908456e-07,
"loss": 0.2939,
"step": 2255
},
{
"epoch": 3.91,
"grad_norm": 1.8904507508265878,
"learning_rate": 6.895286994203037e-07,
"loss": 0.3173,
"step": 2260
},
{
"epoch": 3.92,
"grad_norm": 1.8375475778504446,
"learning_rate": 6.79150163877835e-07,
"loss": 0.2928,
"step": 2265
},
{
"epoch": 3.93,
"grad_norm": 2.071120288178449,
"learning_rate": 6.688380380877755e-07,
"loss": 0.3104,
"step": 2270
},
{
"epoch": 3.94,
"grad_norm": 1.8806639293010023,
"learning_rate": 6.585926981523761e-07,
"loss": 0.3113,
"step": 2275
},
{
"epoch": 3.94,
"grad_norm": 1.7866185026537371,
"learning_rate": 6.484145177380824e-07,
"loss": 0.3053,
"step": 2280
},
{
"epoch": 3.95,
"grad_norm": 1.8712955462594827,
"learning_rate": 6.3830386806191e-07,
"loss": 0.3054,
"step": 2285
},
{
"epoch": 3.96,
"grad_norm": 1.8278128484701799,
"learning_rate": 6.282611178779039e-07,
"loss": 0.3226,
"step": 2290
},
{
"epoch": 3.97,
"grad_norm": 1.7845284828008048,
"learning_rate": 6.182866334636889e-07,
"loss": 0.2995,
"step": 2295
},
{
"epoch": 3.98,
"grad_norm": 1.908811432566092,
"learning_rate": 6.083807786071111e-07,
"loss": 0.3222,
"step": 2300
},
{
"epoch": 3.99,
"grad_norm": 1.7539407257874886,
"learning_rate": 5.985439145929712e-07,
"loss": 0.3106,
"step": 2305
},
{
"epoch": 4.0,
"grad_norm": 1.765533340506495,
"learning_rate": 5.887764001898441e-07,
"loss": 0.3003,
"step": 2310
},
{
"epoch": 4.0,
"eval_loss": 0.37727871537208557,
"eval_runtime": 330.5667,
"eval_samples_per_second": 22.752,
"eval_steps_per_second": 0.357,
"step": 2312
},
{
"epoch": 4.01,
"grad_norm": 2.6249172724373158,
"learning_rate": 5.790785916369987e-07,
"loss": 0.2687,
"step": 2315
},
{
"epoch": 4.01,
"grad_norm": 2.0234365981193108,
"learning_rate": 5.694508426314024e-07,
"loss": 0.2614,
"step": 2320
},
{
"epoch": 4.02,
"grad_norm": 2.0671265707120643,
"learning_rate": 5.598935043148216e-07,
"loss": 0.2575,
"step": 2325
},
{
"epoch": 4.03,
"grad_norm": 2.122780230514674,
"learning_rate": 5.504069252610148e-07,
"loss": 0.2634,
"step": 2330
},
{
"epoch": 4.04,
"grad_norm": 1.943804613668437,
"learning_rate": 5.409914514630205e-07,
"loss": 0.2682,
"step": 2335
},
{
"epoch": 4.05,
"grad_norm": 2.025428441019771,
"learning_rate": 5.316474263205352e-07,
"loss": 0.2499,
"step": 2340
},
{
"epoch": 4.06,
"grad_norm": 1.8116891241805655,
"learning_rate": 5.223751906273936e-07,
"loss": 0.2587,
"step": 2345
},
{
"epoch": 4.07,
"grad_norm": 1.9094711728143186,
"learning_rate": 5.131750825591353e-07,
"loss": 0.2624,
"step": 2350
},
{
"epoch": 4.07,
"grad_norm": 1.792419723948987,
"learning_rate": 5.04047437660673e-07,
"loss": 0.2596,
"step": 2355
},
{
"epoch": 4.08,
"grad_norm": 1.8856130920393068,
"learning_rate": 4.94992588834054e-07,
"loss": 0.2696,
"step": 2360
},
{
"epoch": 4.09,
"grad_norm": 4.886142600320586,
"learning_rate": 4.860108663263185e-07,
"loss": 0.279,
"step": 2365
},
{
"epoch": 4.1,
"grad_norm": 1.831554357960856,
"learning_rate": 4.77102597717454e-07,
"loss": 0.2627,
"step": 2370
},
{
"epoch": 4.11,
"grad_norm": 1.8619320299708513,
"learning_rate": 4.682681079084492e-07,
"loss": 0.2492,
"step": 2375
},
{
"epoch": 4.12,
"grad_norm": 2.026722052142745,
"learning_rate": 4.5950771910944603e-07,
"loss": 0.2795,
"step": 2380
},
{
"epoch": 4.13,
"grad_norm": 1.8283283261223273,
"learning_rate": 4.5082175082798244e-07,
"loss": 0.2513,
"step": 2385
},
{
"epoch": 4.13,
"grad_norm": 1.959431295136227,
"learning_rate": 4.422105198573451e-07,
"loss": 0.2649,
"step": 2390
},
{
"epoch": 4.14,
"grad_norm": 1.7723428484005033,
"learning_rate": 4.3367434026501317e-07,
"loss": 0.2555,
"step": 2395
},
{
"epoch": 4.15,
"grad_norm": 1.978790482643724,
"learning_rate": 4.2521352338120157e-07,
"loss": 0.2611,
"step": 2400
},
{
"epoch": 4.16,
"grad_norm": 1.8319604787059154,
"learning_rate": 4.1682837778750956e-07,
"loss": 0.2525,
"step": 2405
},
{
"epoch": 4.17,
"grad_norm": 2.055092070648486,
"learning_rate": 4.085192093056667e-07,
"loss": 0.2683,
"step": 2410
},
{
"epoch": 4.18,
"grad_norm": 1.8816551471726852,
"learning_rate": 4.0028632098637335e-07,
"loss": 0.2599,
"step": 2415
},
{
"epoch": 4.19,
"grad_norm": 1.8539364404128353,
"learning_rate": 3.9213001309825444e-07,
"loss": 0.2564,
"step": 2420
},
{
"epoch": 4.2,
"grad_norm": 1.802564889277574,
"learning_rate": 3.8405058311690473e-07,
"loss": 0.2604,
"step": 2425
},
{
"epoch": 4.2,
"grad_norm": 1.921786615001957,
"learning_rate": 3.7604832571403823e-07,
"loss": 0.2577,
"step": 2430
},
{
"epoch": 4.21,
"grad_norm": 1.8432967570763548,
"learning_rate": 3.681235327467453e-07,
"loss": 0.2482,
"step": 2435
},
{
"epoch": 4.22,
"grad_norm": 1.8816609521253742,
"learning_rate": 3.6027649324684417e-07,
"loss": 0.2598,
"step": 2440
},
{
"epoch": 4.23,
"grad_norm": 1.8554972854976062,
"learning_rate": 3.5250749341034007e-07,
"loss": 0.2626,
"step": 2445
},
{
"epoch": 4.24,
"grad_norm": 1.7689335842068374,
"learning_rate": 3.448168165869886e-07,
"loss": 0.2501,
"step": 2450
},
{
"epoch": 4.25,
"grad_norm": 1.9815485309472562,
"learning_rate": 3.372047432699607e-07,
"loss": 0.2605,
"step": 2455
},
{
"epoch": 4.26,
"grad_norm": 1.8170369399087385,
"learning_rate": 3.2967155108561076e-07,
"loss": 0.25,
"step": 2460
},
{
"epoch": 4.26,
"grad_norm": 2.4030034838076886,
"learning_rate": 3.222175147833556e-07,
"loss": 0.2651,
"step": 2465
},
{
"epoch": 4.27,
"grad_norm": 1.9136750515911383,
"learning_rate": 3.148429062256497e-07,
"loss": 0.2621,
"step": 2470
},
{
"epoch": 4.28,
"grad_norm": 1.91289336281638,
"learning_rate": 3.075479943780693e-07,
"loss": 0.2608,
"step": 2475
},
{
"epoch": 4.29,
"grad_norm": 1.916237195499518,
"learning_rate": 3.0033304529950733e-07,
"loss": 0.2591,
"step": 2480
},
{
"epoch": 4.3,
"grad_norm": 1.7917439220204743,
"learning_rate": 2.931983221324664e-07,
"loss": 0.2559,
"step": 2485
},
{
"epoch": 4.31,
"grad_norm": 1.8928174769958723,
"learning_rate": 2.861440850934599e-07,
"loss": 0.2554,
"step": 2490
},
{
"epoch": 4.32,
"grad_norm": 1.8697064920019573,
"learning_rate": 2.7917059146352703e-07,
"loss": 0.2529,
"step": 2495
},
{
"epoch": 4.33,
"grad_norm": 1.8149915602515383,
"learning_rate": 2.722780955788448e-07,
"loss": 0.2507,
"step": 2500
},
{
"epoch": 4.33,
"grad_norm": 1.8085679954098952,
"learning_rate": 2.6546684882145097e-07,
"loss": 0.2692,
"step": 2505
},
{
"epoch": 4.34,
"grad_norm": 1.8501785801054293,
"learning_rate": 2.587370996100802e-07,
"loss": 0.2635,
"step": 2510
},
{
"epoch": 4.35,
"grad_norm": 1.7841943537417788,
"learning_rate": 2.520890933911002e-07,
"loss": 0.2726,
"step": 2515
},
{
"epoch": 4.36,
"grad_norm": 1.8236899418180625,
"learning_rate": 2.4552307262956154e-07,
"loss": 0.2629,
"step": 2520
},
{
"epoch": 4.37,
"grad_norm": 1.8527410868403758,
"learning_rate": 2.3903927680035357e-07,
"loss": 0.2601,
"step": 2525
},
{
"epoch": 4.38,
"grad_norm": 1.9912215998190843,
"learning_rate": 2.3263794237947084e-07,
"loss": 0.2541,
"step": 2530
},
{
"epoch": 4.39,
"grad_norm": 2.06578491146987,
"learning_rate": 2.2631930283538745e-07,
"loss": 0.2501,
"step": 2535
},
{
"epoch": 4.39,
"grad_norm": 1.8706250122502321,
"learning_rate": 2.200835886205438e-07,
"loss": 0.2625,
"step": 2540
},
{
"epoch": 4.4,
"grad_norm": 1.9781271983895208,
"learning_rate": 2.139310271629405e-07,
"loss": 0.2666,
"step": 2545
},
{
"epoch": 4.41,
"grad_norm": 1.7931386269528835,
"learning_rate": 2.07861842857843e-07,
"loss": 0.2693,
"step": 2550
},
{
"epoch": 4.42,
"grad_norm": 1.892205648139564,
"learning_rate": 2.0187625705959862e-07,
"loss": 0.2624,
"step": 2555
},
{
"epoch": 4.43,
"grad_norm": 1.8830690882464614,
"learning_rate": 1.9597448807356368e-07,
"loss": 0.2621,
"step": 2560
},
{
"epoch": 4.44,
"grad_norm": 2.792218645014677,
"learning_rate": 1.901567511481389e-07,
"loss": 0.2601,
"step": 2565
},
{
"epoch": 4.45,
"grad_norm": 1.896041115050084,
"learning_rate": 1.8442325846692254e-07,
"loss": 0.2659,
"step": 2570
},
{
"epoch": 4.46,
"grad_norm": 1.8177273159535672,
"learning_rate": 1.787742191409686e-07,
"loss": 0.2607,
"step": 2575
},
{
"epoch": 4.46,
"grad_norm": 1.8209339547730556,
"learning_rate": 1.7320983920116246e-07,
"loss": 0.2543,
"step": 2580
},
{
"epoch": 4.47,
"grad_norm": 1.8208127681176174,
"learning_rate": 1.6773032159070474e-07,
"loss": 0.2559,
"step": 2585
},
{
"epoch": 4.48,
"grad_norm": 1.8887316977772421,
"learning_rate": 1.6233586615771097e-07,
"loss": 0.2675,
"step": 2590
},
{
"epoch": 4.49,
"grad_norm": 1.8402016996115687,
"learning_rate": 1.570266696479214e-07,
"loss": 0.2708,
"step": 2595
},
{
"epoch": 4.5,
"grad_norm": 1.8135826560339299,
"learning_rate": 1.5180292569752675e-07,
"loss": 0.2663,
"step": 2600
},
{
"epoch": 4.51,
"grad_norm": 1.8932652503035179,
"learning_rate": 1.4666482482610517e-07,
"loss": 0.2562,
"step": 2605
},
{
"epoch": 4.52,
"grad_norm": 1.923411155855795,
"learning_rate": 1.4161255442967314e-07,
"loss": 0.2535,
"step": 2610
},
{
"epoch": 4.52,
"grad_norm": 1.8186664689085035,
"learning_rate": 1.3664629877385233e-07,
"loss": 0.2536,
"step": 2615
},
{
"epoch": 4.53,
"grad_norm": 1.8324657945327985,
"learning_rate": 1.317662389871477e-07,
"loss": 0.2626,
"step": 2620
},
{
"epoch": 4.54,
"grad_norm": 1.825266320838444,
"learning_rate": 1.2697255305434087e-07,
"loss": 0.2669,
"step": 2625
},
{
"epoch": 4.55,
"grad_norm": 1.8907649399842439,
"learning_rate": 1.22265415810002e-07,
"loss": 0.2637,
"step": 2630
},
{
"epoch": 4.56,
"grad_norm": 1.8689930525310243,
"learning_rate": 1.1764499893210879e-07,
"loss": 0.2625,
"step": 2635
},
{
"epoch": 4.57,
"grad_norm": 1.7851298479073345,
"learning_rate": 1.1311147093578778e-07,
"loss": 0.2515,
"step": 2640
},
{
"epoch": 4.58,
"grad_norm": 1.9022961798833327,
"learning_rate": 1.0866499716716855e-07,
"loss": 0.2683,
"step": 2645
},
{
"epoch": 4.58,
"grad_norm": 1.79257289914677,
"learning_rate": 1.0430573979735154e-07,
"loss": 0.2654,
"step": 2650
},
{
"epoch": 4.59,
"grad_norm": 1.9177553807521017,
"learning_rate": 1.000338578164936e-07,
"loss": 0.2607,
"step": 2655
},
{
"epoch": 4.6,
"grad_norm": 1.9212642014661736,
"learning_rate": 9.584950702801244e-08,
"loss": 0.2748,
"step": 2660
},
{
"epoch": 4.61,
"grad_norm": 1.8444742116297173,
"learning_rate": 9.175284004289853e-08,
"loss": 0.2695,
"step": 2665
},
{
"epoch": 4.62,
"grad_norm": 1.879187416864439,
"learning_rate": 8.774400627415498e-08,
"loss": 0.2648,
"step": 2670
},
{
"epoch": 4.63,
"grad_norm": 1.853575218259344,
"learning_rate": 8.382315193134372e-08,
"loss": 0.2531,
"step": 2675
},
{
"epoch": 4.64,
"grad_norm": 1.8274985226049987,
"learning_rate": 7.999042001525598e-08,
"loss": 0.2608,
"step": 2680
},
{
"epoch": 4.65,
"grad_norm": 1.9822786870303424,
"learning_rate": 7.624595031269388e-08,
"loss": 0.2739,
"step": 2685
},
{
"epoch": 4.65,
"grad_norm": 2.395868204461241,
"learning_rate": 7.258987939137596e-08,
"loss": 0.2608,
"step": 2690
},
{
"epoch": 4.66,
"grad_norm": 1.9124430429661154,
"learning_rate": 6.902234059495222e-08,
"loss": 0.2621,
"step": 2695
},
{
"epoch": 4.67,
"grad_norm": 1.896035807781804,
"learning_rate": 6.554346403814387e-08,
"loss": 0.2604,
"step": 2700
},
{
"epoch": 4.68,
"grad_norm": 1.881027284048797,
"learning_rate": 6.215337660199632e-08,
"loss": 0.2457,
"step": 2705
},
{
"epoch": 4.69,
"grad_norm": 1.9007017472534657,
"learning_rate": 5.8852201929252005e-08,
"loss": 0.254,
"step": 2710
},
{
"epoch": 4.7,
"grad_norm": 1.8930962106170162,
"learning_rate": 5.5640060419840655e-08,
"loss": 0.264,
"step": 2715
},
{
"epoch": 4.71,
"grad_norm": 1.7660321817481,
"learning_rate": 5.2517069226488694e-08,
"loss": 0.2845,
"step": 2720
},
{
"epoch": 4.71,
"grad_norm": 1.794271544889709,
"learning_rate": 4.9483342250445934e-08,
"loss": 0.2657,
"step": 2725
},
{
"epoch": 4.72,
"grad_norm": 1.8831928024041047,
"learning_rate": 4.653899013733115e-08,
"loss": 0.2622,
"step": 2730
},
{
"epoch": 4.73,
"grad_norm": 1.8254137007243867,
"learning_rate": 4.368412027309754e-08,
"loss": 0.2613,
"step": 2735
},
{
"epoch": 4.74,
"grad_norm": 1.921264151045264,
"learning_rate": 4.0918836780115265e-08,
"loss": 0.2479,
"step": 2740
},
{
"epoch": 4.75,
"grad_norm": 1.8886495539506756,
"learning_rate": 3.8243240513374524e-08,
"loss": 0.2554,
"step": 2745
},
{
"epoch": 4.76,
"grad_norm": 1.842345668470861,
"learning_rate": 3.5657429056806523e-08,
"loss": 0.261,
"step": 2750
},
{
"epoch": 4.77,
"grad_norm": 1.8470545043429003,
"learning_rate": 3.316149671972552e-08,
"loss": 0.2451,
"step": 2755
},
{
"epoch": 4.78,
"grad_norm": 1.898197482257326,
"learning_rate": 3.0755534533387365e-08,
"loss": 0.2605,
"step": 2760
},
{
"epoch": 4.78,
"grad_norm": 1.7715397317439983,
"learning_rate": 2.8439630247671934e-08,
"loss": 0.247,
"step": 2765
},
{
"epoch": 4.79,
"grad_norm": 1.8519562470145452,
"learning_rate": 2.6213868327880088e-08,
"loss": 0.2736,
"step": 2770
},
{
"epoch": 4.8,
"grad_norm": 1.9175537155386098,
"learning_rate": 2.4078329951655044e-08,
"loss": 0.2617,
"step": 2775
},
{
"epoch": 4.81,
"grad_norm": 1.8142907346054091,
"learning_rate": 2.2033093006021133e-08,
"loss": 0.2653,
"step": 2780
},
{
"epoch": 4.82,
"grad_norm": 1.7543346418920545,
"learning_rate": 2.007823208454218e-08,
"loss": 0.2478,
"step": 2785
},
{
"epoch": 4.83,
"grad_norm": 1.817163533821061,
"learning_rate": 1.821381848460202e-08,
"loss": 0.2513,
"step": 2790
},
{
"epoch": 4.84,
"grad_norm": 1.9039285159704047,
"learning_rate": 1.643992020480406e-08,
"loss": 0.257,
"step": 2795
},
{
"epoch": 4.84,
"grad_norm": 1.8371177889007853,
"learning_rate": 1.4756601942490789e-08,
"loss": 0.2497,
"step": 2800
},
{
"epoch": 4.85,
"grad_norm": 1.8366707128631479,
"learning_rate": 1.3163925091384532e-08,
"loss": 0.2531,
"step": 2805
},
{
"epoch": 4.86,
"grad_norm": 2.363794421622485,
"learning_rate": 1.1661947739347867e-08,
"loss": 0.2682,
"step": 2810
},
{
"epoch": 4.87,
"grad_norm": 1.820365380182807,
"learning_rate": 1.0250724666265588e-08,
"loss": 0.265,
"step": 2815
},
{
"epoch": 4.88,
"grad_norm": 1.8970735871610287,
"learning_rate": 8.930307342046307e-09,
"loss": 0.2625,
"step": 2820
},
{
"epoch": 4.89,
"grad_norm": 1.9794621227492666,
"learning_rate": 7.700743924745902e-09,
"loss": 0.2743,
"step": 2825
},
{
"epoch": 4.9,
"grad_norm": 2.1030207809550574,
"learning_rate": 6.5620792588097505e-09,
"loss": 0.2625,
"step": 2830
},
{
"epoch": 4.9,
"grad_norm": 1.8031485505651268,
"learning_rate": 5.51435487343932e-09,
"loss": 0.2589,
"step": 2835
},
{
"epoch": 4.91,
"grad_norm": 1.862632623834731,
"learning_rate": 4.557608981075323e-09,
"loss": 0.2651,
"step": 2840
},
{
"epoch": 4.92,
"grad_norm": 1.796005099020042,
"learning_rate": 3.691876476005496e-09,
"loss": 0.2537,
"step": 2845
},
{
"epoch": 4.93,
"grad_norm": 1.7492013741327732,
"learning_rate": 2.917188933091175e-09,
"loss": 0.2461,
"step": 2850
},
{
"epoch": 4.94,
"grad_norm": 1.9157963617346956,
"learning_rate": 2.233574606616273e-09,
"loss": 0.2661,
"step": 2855
},
{
"epoch": 4.95,
"grad_norm": 1.8994803862391993,
"learning_rate": 1.6410584292561592e-09,
"loss": 0.2665,
"step": 2860
},
{
"epoch": 4.96,
"grad_norm": 1.846063403180044,
"learning_rate": 1.139662011168663e-09,
"loss": 0.2503,
"step": 2865
},
{
"epoch": 4.97,
"grad_norm": 1.8871406230680454,
"learning_rate": 7.294036392066495e-10,
"loss": 0.2601,
"step": 2870
},
{
"epoch": 4.97,
"grad_norm": 1.7693402402676852,
"learning_rate": 4.1029827624911033e-10,
"loss": 0.2494,
"step": 2875
},
{
"epoch": 4.98,
"grad_norm": 1.8544595991462598,
"learning_rate": 1.8235756065770882e-10,
"loss": 0.2647,
"step": 2880
},
{
"epoch": 4.99,
"grad_norm": 1.9426687566855905,
"learning_rate": 4.5589805850454475e-11,
"loss": 0.2615,
"step": 2885
},
{
"epoch": 5.0,
"grad_norm": 1.8892335398265772,
"learning_rate": 0.0,
"loss": 0.2508,
"step": 2890
},
{
"epoch": 5.0,
"eval_loss": 0.375461608171463,
"eval_runtime": 329.8865,
"eval_samples_per_second": 22.799,
"eval_steps_per_second": 0.358,
"step": 2890
},
{
"epoch": 5.0,
"step": 2890,
"total_flos": 2420428819660800.0,
"train_loss": 0.44474206024387714,
"train_runtime": 64422.034,
"train_samples_per_second": 5.737,
"train_steps_per_second": 0.045
}
],
"logging_steps": 5,
"max_steps": 2890,
"num_input_tokens_seen": 0,
"num_train_epochs": 5,
"save_steps": 500,
"total_flos": 2420428819660800.0,
"train_batch_size": 16,
"trial_name": null,
"trial_params": null
}