Omriy123's picture
🍻 cheers
2636c3c verified
{
"best_metric": 0.041698116809129715,
"best_model_checkpoint": "vit_epochs5_batch32_lr5e-05_size224_tiles2_seed1_q1\\checkpoint-1407",
"epoch": 5.0,
"eval_steps": 500,
"global_step": 2345,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.010660980810234541,
"grad_norm": 0.2517988979816437,
"learning_rate": 4.989339019189766e-05,
"loss": 0.0219,
"step": 5
},
{
"epoch": 0.021321961620469083,
"grad_norm": 0.007871707901358604,
"learning_rate": 4.978678038379531e-05,
"loss": 0.0584,
"step": 10
},
{
"epoch": 0.031982942430703626,
"grad_norm": 0.010467108339071274,
"learning_rate": 4.9680170575692967e-05,
"loss": 0.0044,
"step": 15
},
{
"epoch": 0.042643923240938165,
"grad_norm": 7.733188152313232,
"learning_rate": 4.957356076759062e-05,
"loss": 0.0464,
"step": 20
},
{
"epoch": 0.053304904051172705,
"grad_norm": 4.940289497375488,
"learning_rate": 4.9466950959488276e-05,
"loss": 0.0326,
"step": 25
},
{
"epoch": 0.06396588486140725,
"grad_norm": 6.546672344207764,
"learning_rate": 4.936034115138593e-05,
"loss": 0.0669,
"step": 30
},
{
"epoch": 0.07462686567164178,
"grad_norm": 0.008308369666337967,
"learning_rate": 4.9253731343283586e-05,
"loss": 0.0297,
"step": 35
},
{
"epoch": 0.08528784648187633,
"grad_norm": 2.707502841949463,
"learning_rate": 4.914712153518124e-05,
"loss": 0.0425,
"step": 40
},
{
"epoch": 0.09594882729211088,
"grad_norm": 6.958878993988037,
"learning_rate": 4.904051172707889e-05,
"loss": 0.0495,
"step": 45
},
{
"epoch": 0.10660980810234541,
"grad_norm": 0.22029121220111847,
"learning_rate": 4.893390191897655e-05,
"loss": 0.064,
"step": 50
},
{
"epoch": 0.11727078891257996,
"grad_norm": 4.142677307128906,
"learning_rate": 4.88272921108742e-05,
"loss": 0.0195,
"step": 55
},
{
"epoch": 0.1279317697228145,
"grad_norm": 3.936800956726074,
"learning_rate": 4.872068230277186e-05,
"loss": 0.0878,
"step": 60
},
{
"epoch": 0.13859275053304904,
"grad_norm": 0.10230299085378647,
"learning_rate": 4.861407249466951e-05,
"loss": 0.0049,
"step": 65
},
{
"epoch": 0.14925373134328357,
"grad_norm": 0.14174959063529968,
"learning_rate": 4.850746268656717e-05,
"loss": 0.0256,
"step": 70
},
{
"epoch": 0.15991471215351813,
"grad_norm": 0.06508401036262512,
"learning_rate": 4.840085287846482e-05,
"loss": 0.0261,
"step": 75
},
{
"epoch": 0.17057569296375266,
"grad_norm": 0.27708670496940613,
"learning_rate": 4.829424307036248e-05,
"loss": 0.0487,
"step": 80
},
{
"epoch": 0.1812366737739872,
"grad_norm": 3.022700786590576,
"learning_rate": 4.8187633262260126e-05,
"loss": 0.0344,
"step": 85
},
{
"epoch": 0.19189765458422176,
"grad_norm": 1.0642136335372925,
"learning_rate": 4.808102345415779e-05,
"loss": 0.0081,
"step": 90
},
{
"epoch": 0.2025586353944563,
"grad_norm": 0.2817803919315338,
"learning_rate": 4.7974413646055436e-05,
"loss": 0.0048,
"step": 95
},
{
"epoch": 0.21321961620469082,
"grad_norm": 0.05745874345302582,
"learning_rate": 4.78678038379531e-05,
"loss": 0.0062,
"step": 100
},
{
"epoch": 0.22388059701492538,
"grad_norm": 5.531677722930908,
"learning_rate": 4.7761194029850745e-05,
"loss": 0.063,
"step": 105
},
{
"epoch": 0.2345415778251599,
"grad_norm": 0.011702122166752815,
"learning_rate": 4.765458422174841e-05,
"loss": 0.0287,
"step": 110
},
{
"epoch": 0.24520255863539445,
"grad_norm": 2.944314479827881,
"learning_rate": 4.7547974413646055e-05,
"loss": 0.0442,
"step": 115
},
{
"epoch": 0.255863539445629,
"grad_norm": 0.014090812765061855,
"learning_rate": 4.7441364605543716e-05,
"loss": 0.0037,
"step": 120
},
{
"epoch": 0.26652452025586354,
"grad_norm": 1.7141848802566528,
"learning_rate": 4.7334754797441364e-05,
"loss": 0.018,
"step": 125
},
{
"epoch": 0.2771855010660981,
"grad_norm": 0.01295707281678915,
"learning_rate": 4.7228144989339026e-05,
"loss": 0.0953,
"step": 130
},
{
"epoch": 0.2878464818763326,
"grad_norm": 1.4553868770599365,
"learning_rate": 4.7121535181236674e-05,
"loss": 0.0932,
"step": 135
},
{
"epoch": 0.29850746268656714,
"grad_norm": 0.045734040439128876,
"learning_rate": 4.7014925373134335e-05,
"loss": 0.0024,
"step": 140
},
{
"epoch": 0.3091684434968017,
"grad_norm": 0.09445302188396454,
"learning_rate": 4.690831556503198e-05,
"loss": 0.0884,
"step": 145
},
{
"epoch": 0.31982942430703626,
"grad_norm": 1.2878981828689575,
"learning_rate": 4.6801705756929645e-05,
"loss": 0.0092,
"step": 150
},
{
"epoch": 0.3304904051172708,
"grad_norm": 1.585829257965088,
"learning_rate": 4.669509594882729e-05,
"loss": 0.0101,
"step": 155
},
{
"epoch": 0.3411513859275053,
"grad_norm": 0.014539445750415325,
"learning_rate": 4.658848614072495e-05,
"loss": 0.0274,
"step": 160
},
{
"epoch": 0.35181236673773986,
"grad_norm": 0.10462962090969086,
"learning_rate": 4.64818763326226e-05,
"loss": 0.0362,
"step": 165
},
{
"epoch": 0.3624733475479744,
"grad_norm": 3.7475221157073975,
"learning_rate": 4.637526652452026e-05,
"loss": 0.0176,
"step": 170
},
{
"epoch": 0.373134328358209,
"grad_norm": 1.356924295425415,
"learning_rate": 4.626865671641791e-05,
"loss": 0.0346,
"step": 175
},
{
"epoch": 0.3837953091684435,
"grad_norm": 6.245541095733643,
"learning_rate": 4.6162046908315566e-05,
"loss": 0.0514,
"step": 180
},
{
"epoch": 0.39445628997867804,
"grad_norm": 1.1078875064849854,
"learning_rate": 4.605543710021322e-05,
"loss": 0.0033,
"step": 185
},
{
"epoch": 0.4051172707889126,
"grad_norm": 3.648223638534546,
"learning_rate": 4.5948827292110876e-05,
"loss": 0.0196,
"step": 190
},
{
"epoch": 0.4157782515991471,
"grad_norm": 0.021989647299051285,
"learning_rate": 4.584221748400853e-05,
"loss": 0.0046,
"step": 195
},
{
"epoch": 0.42643923240938164,
"grad_norm": 0.09789646416902542,
"learning_rate": 4.5735607675906185e-05,
"loss": 0.0158,
"step": 200
},
{
"epoch": 0.43710021321961623,
"grad_norm": 0.010587678290903568,
"learning_rate": 4.562899786780384e-05,
"loss": 0.0332,
"step": 205
},
{
"epoch": 0.44776119402985076,
"grad_norm": 0.02128026634454727,
"learning_rate": 4.5522388059701495e-05,
"loss": 0.0352,
"step": 210
},
{
"epoch": 0.4584221748400853,
"grad_norm": 0.2448500692844391,
"learning_rate": 4.541577825159915e-05,
"loss": 0.0415,
"step": 215
},
{
"epoch": 0.4690831556503198,
"grad_norm": 0.03400927782058716,
"learning_rate": 4.5309168443496804e-05,
"loss": 0.0369,
"step": 220
},
{
"epoch": 0.47974413646055436,
"grad_norm": 0.8473523855209351,
"learning_rate": 4.520255863539446e-05,
"loss": 0.0292,
"step": 225
},
{
"epoch": 0.4904051172707889,
"grad_norm": 0.20860645174980164,
"learning_rate": 4.5095948827292114e-05,
"loss": 0.0228,
"step": 230
},
{
"epoch": 0.5010660980810234,
"grad_norm": 0.027097271755337715,
"learning_rate": 4.498933901918977e-05,
"loss": 0.002,
"step": 235
},
{
"epoch": 0.511727078891258,
"grad_norm": 0.28072676062583923,
"learning_rate": 4.488272921108742e-05,
"loss": 0.0067,
"step": 240
},
{
"epoch": 0.5223880597014925,
"grad_norm": 0.2535737156867981,
"learning_rate": 4.477611940298508e-05,
"loss": 0.0223,
"step": 245
},
{
"epoch": 0.5330490405117271,
"grad_norm": 7.301348686218262,
"learning_rate": 4.466950959488273e-05,
"loss": 0.0323,
"step": 250
},
{
"epoch": 0.5437100213219617,
"grad_norm": 3.8629941940307617,
"learning_rate": 4.456289978678039e-05,
"loss": 0.0092,
"step": 255
},
{
"epoch": 0.5543710021321961,
"grad_norm": 0.3334626853466034,
"learning_rate": 4.445628997867804e-05,
"loss": 0.008,
"step": 260
},
{
"epoch": 0.5650319829424307,
"grad_norm": 0.008050858043134212,
"learning_rate": 4.43496801705757e-05,
"loss": 0.0105,
"step": 265
},
{
"epoch": 0.5756929637526652,
"grad_norm": 0.018746552988886833,
"learning_rate": 4.424307036247335e-05,
"loss": 0.0058,
"step": 270
},
{
"epoch": 0.5863539445628998,
"grad_norm": 0.008293040096759796,
"learning_rate": 4.4136460554371006e-05,
"loss": 0.0025,
"step": 275
},
{
"epoch": 0.5970149253731343,
"grad_norm": 0.06293070316314697,
"learning_rate": 4.402985074626866e-05,
"loss": 0.0032,
"step": 280
},
{
"epoch": 0.6076759061833689,
"grad_norm": 0.00662905303761363,
"learning_rate": 4.3923240938166316e-05,
"loss": 0.032,
"step": 285
},
{
"epoch": 0.6183368869936035,
"grad_norm": 0.010014676488935947,
"learning_rate": 4.381663113006397e-05,
"loss": 0.0151,
"step": 290
},
{
"epoch": 0.6289978678038379,
"grad_norm": 0.656973659992218,
"learning_rate": 4.3710021321961625e-05,
"loss": 0.0314,
"step": 295
},
{
"epoch": 0.6396588486140725,
"grad_norm": 0.006886007729917765,
"learning_rate": 4.360341151385928e-05,
"loss": 0.0008,
"step": 300
},
{
"epoch": 0.650319829424307,
"grad_norm": 0.0562790185213089,
"learning_rate": 4.3496801705756935e-05,
"loss": 0.0122,
"step": 305
},
{
"epoch": 0.6609808102345416,
"grad_norm": 0.00783719401806593,
"learning_rate": 4.339019189765459e-05,
"loss": 0.0039,
"step": 310
},
{
"epoch": 0.6716417910447762,
"grad_norm": 0.08167439699172974,
"learning_rate": 4.328358208955224e-05,
"loss": 0.0205,
"step": 315
},
{
"epoch": 0.6823027718550106,
"grad_norm": 0.005825693253427744,
"learning_rate": 4.31769722814499e-05,
"loss": 0.0282,
"step": 320
},
{
"epoch": 0.6929637526652452,
"grad_norm": 0.014544177800416946,
"learning_rate": 4.307036247334755e-05,
"loss": 0.0273,
"step": 325
},
{
"epoch": 0.7036247334754797,
"grad_norm": 0.8907783627510071,
"learning_rate": 4.29637526652452e-05,
"loss": 0.0015,
"step": 330
},
{
"epoch": 0.7142857142857143,
"grad_norm": 0.5841841101646423,
"learning_rate": 4.2857142857142856e-05,
"loss": 0.002,
"step": 335
},
{
"epoch": 0.7249466950959488,
"grad_norm": 0.004888765048235655,
"learning_rate": 4.275053304904051e-05,
"loss": 0.0009,
"step": 340
},
{
"epoch": 0.7356076759061834,
"grad_norm": 1.254289984703064,
"learning_rate": 4.2643923240938166e-05,
"loss": 0.0896,
"step": 345
},
{
"epoch": 0.746268656716418,
"grad_norm": 0.12638357281684875,
"learning_rate": 4.253731343283582e-05,
"loss": 0.003,
"step": 350
},
{
"epoch": 0.7569296375266524,
"grad_norm": 0.0051824357360601425,
"learning_rate": 4.2430703624733475e-05,
"loss": 0.0433,
"step": 355
},
{
"epoch": 0.767590618336887,
"grad_norm": 2.014404535293579,
"learning_rate": 4.232409381663113e-05,
"loss": 0.0354,
"step": 360
},
{
"epoch": 0.7782515991471215,
"grad_norm": 0.5347994565963745,
"learning_rate": 4.2217484008528785e-05,
"loss": 0.0054,
"step": 365
},
{
"epoch": 0.7889125799573561,
"grad_norm": 0.03580741956830025,
"learning_rate": 4.211087420042644e-05,
"loss": 0.0015,
"step": 370
},
{
"epoch": 0.7995735607675906,
"grad_norm": 2.3350298404693604,
"learning_rate": 4.2004264392324094e-05,
"loss": 0.0443,
"step": 375
},
{
"epoch": 0.8102345415778252,
"grad_norm": 0.017501430585980415,
"learning_rate": 4.189765458422175e-05,
"loss": 0.002,
"step": 380
},
{
"epoch": 0.8208955223880597,
"grad_norm": 4.268174171447754,
"learning_rate": 4.1791044776119404e-05,
"loss": 0.0344,
"step": 385
},
{
"epoch": 0.8315565031982942,
"grad_norm": 0.015792598947882652,
"learning_rate": 4.168443496801706e-05,
"loss": 0.0676,
"step": 390
},
{
"epoch": 0.8422174840085288,
"grad_norm": 0.07864269614219666,
"learning_rate": 4.157782515991471e-05,
"loss": 0.0348,
"step": 395
},
{
"epoch": 0.8528784648187633,
"grad_norm": 0.32944291830062866,
"learning_rate": 4.147121535181237e-05,
"loss": 0.069,
"step": 400
},
{
"epoch": 0.8635394456289979,
"grad_norm": 1.3694591522216797,
"learning_rate": 4.136460554371002e-05,
"loss": 0.0098,
"step": 405
},
{
"epoch": 0.8742004264392325,
"grad_norm": 0.008273276500403881,
"learning_rate": 4.125799573560768e-05,
"loss": 0.0176,
"step": 410
},
{
"epoch": 0.8848614072494669,
"grad_norm": 0.18966375291347504,
"learning_rate": 4.115138592750533e-05,
"loss": 0.0251,
"step": 415
},
{
"epoch": 0.8955223880597015,
"grad_norm": 4.801139831542969,
"learning_rate": 4.104477611940299e-05,
"loss": 0.0506,
"step": 420
},
{
"epoch": 0.906183368869936,
"grad_norm": 5.343632221221924,
"learning_rate": 4.093816631130064e-05,
"loss": 0.0725,
"step": 425
},
{
"epoch": 0.9168443496801706,
"grad_norm": 1.2895259857177734,
"learning_rate": 4.0831556503198296e-05,
"loss": 0.0074,
"step": 430
},
{
"epoch": 0.9275053304904051,
"grad_norm": 0.1996997594833374,
"learning_rate": 4.072494669509595e-05,
"loss": 0.0049,
"step": 435
},
{
"epoch": 0.9381663113006397,
"grad_norm": 0.06797734647989273,
"learning_rate": 4.0618336886993606e-05,
"loss": 0.0079,
"step": 440
},
{
"epoch": 0.9488272921108742,
"grad_norm": 0.2776765525341034,
"learning_rate": 4.051172707889126e-05,
"loss": 0.0341,
"step": 445
},
{
"epoch": 0.9594882729211087,
"grad_norm": 0.16336512565612793,
"learning_rate": 4.0405117270788915e-05,
"loss": 0.0012,
"step": 450
},
{
"epoch": 0.9701492537313433,
"grad_norm": 0.17950734496116638,
"learning_rate": 4.029850746268657e-05,
"loss": 0.003,
"step": 455
},
{
"epoch": 0.9808102345415778,
"grad_norm": 0.5669295191764832,
"learning_rate": 4.0191897654584225e-05,
"loss": 0.0139,
"step": 460
},
{
"epoch": 0.9914712153518124,
"grad_norm": 7.490285873413086,
"learning_rate": 4.008528784648188e-05,
"loss": 0.0345,
"step": 465
},
{
"epoch": 1.0,
"eval_accuracy": 0.9885333333333334,
"eval_loss": 0.04358742758631706,
"eval_runtime": 18.3431,
"eval_samples_per_second": 204.436,
"eval_steps_per_second": 6.433,
"step": 469
},
{
"epoch": 1.0021321961620469,
"grad_norm": 0.004675396252423525,
"learning_rate": 3.997867803837953e-05,
"loss": 0.0046,
"step": 470
},
{
"epoch": 1.0127931769722816,
"grad_norm": 0.005454385187476873,
"learning_rate": 3.987206823027719e-05,
"loss": 0.0063,
"step": 475
},
{
"epoch": 1.023454157782516,
"grad_norm": 0.004695444367825985,
"learning_rate": 3.976545842217484e-05,
"loss": 0.0008,
"step": 480
},
{
"epoch": 1.0341151385927505,
"grad_norm": 0.0073684342205524445,
"learning_rate": 3.96588486140725e-05,
"loss": 0.0037,
"step": 485
},
{
"epoch": 1.044776119402985,
"grad_norm": 8.275728225708008,
"learning_rate": 3.9552238805970146e-05,
"loss": 0.0194,
"step": 490
},
{
"epoch": 1.0554371002132197,
"grad_norm": 0.005319035612046719,
"learning_rate": 3.944562899786781e-05,
"loss": 0.0016,
"step": 495
},
{
"epoch": 1.0660980810234542,
"grad_norm": 0.005408057011663914,
"learning_rate": 3.9339019189765456e-05,
"loss": 0.0008,
"step": 500
},
{
"epoch": 1.0767590618336886,
"grad_norm": 0.02421507053077221,
"learning_rate": 3.923240938166312e-05,
"loss": 0.0256,
"step": 505
},
{
"epoch": 1.0874200426439233,
"grad_norm": 0.16993646323680878,
"learning_rate": 3.9125799573560765e-05,
"loss": 0.0302,
"step": 510
},
{
"epoch": 1.0980810234541578,
"grad_norm": 0.07230987399816513,
"learning_rate": 3.901918976545843e-05,
"loss": 0.0008,
"step": 515
},
{
"epoch": 1.1087420042643923,
"grad_norm": 0.01518043503165245,
"learning_rate": 3.8912579957356075e-05,
"loss": 0.0008,
"step": 520
},
{
"epoch": 1.1194029850746268,
"grad_norm": 0.07658682763576508,
"learning_rate": 3.8805970149253736e-05,
"loss": 0.0687,
"step": 525
},
{
"epoch": 1.1300639658848615,
"grad_norm": 0.22348736226558685,
"learning_rate": 3.8699360341151384e-05,
"loss": 0.001,
"step": 530
},
{
"epoch": 1.140724946695096,
"grad_norm": 0.011451183818280697,
"learning_rate": 3.8592750533049046e-05,
"loss": 0.0009,
"step": 535
},
{
"epoch": 1.1513859275053304,
"grad_norm": 1.6857359409332275,
"learning_rate": 3.8486140724946694e-05,
"loss": 0.0165,
"step": 540
},
{
"epoch": 1.1620469083155651,
"grad_norm": 0.11387021839618683,
"learning_rate": 3.8379530916844355e-05,
"loss": 0.0191,
"step": 545
},
{
"epoch": 1.1727078891257996,
"grad_norm": 0.007831091061234474,
"learning_rate": 3.8272921108742e-05,
"loss": 0.0007,
"step": 550
},
{
"epoch": 1.183368869936034,
"grad_norm": 0.9735778570175171,
"learning_rate": 3.8166311300639665e-05,
"loss": 0.0049,
"step": 555
},
{
"epoch": 1.1940298507462686,
"grad_norm": 0.005843348801136017,
"learning_rate": 3.805970149253731e-05,
"loss": 0.0066,
"step": 560
},
{
"epoch": 1.2046908315565032,
"grad_norm": 0.052961017936468124,
"learning_rate": 3.7953091684434974e-05,
"loss": 0.0306,
"step": 565
},
{
"epoch": 1.2153518123667377,
"grad_norm": 0.007110061589628458,
"learning_rate": 3.784648187633262e-05,
"loss": 0.0014,
"step": 570
},
{
"epoch": 1.2260127931769722,
"grad_norm": 0.014976187609136105,
"learning_rate": 3.7739872068230284e-05,
"loss": 0.0011,
"step": 575
},
{
"epoch": 1.236673773987207,
"grad_norm": 11.425577163696289,
"learning_rate": 3.763326226012793e-05,
"loss": 0.0113,
"step": 580
},
{
"epoch": 1.2473347547974414,
"grad_norm": 0.011842267587780952,
"learning_rate": 3.752665245202559e-05,
"loss": 0.001,
"step": 585
},
{
"epoch": 1.2579957356076759,
"grad_norm": 0.012584103271365166,
"learning_rate": 3.742004264392324e-05,
"loss": 0.0007,
"step": 590
},
{
"epoch": 1.2686567164179103,
"grad_norm": 0.01001448929309845,
"learning_rate": 3.73134328358209e-05,
"loss": 0.0007,
"step": 595
},
{
"epoch": 1.279317697228145,
"grad_norm": 0.9235844612121582,
"learning_rate": 3.720682302771855e-05,
"loss": 0.0015,
"step": 600
},
{
"epoch": 1.2899786780383795,
"grad_norm": 3.172240972518921,
"learning_rate": 3.710021321961621e-05,
"loss": 0.0024,
"step": 605
},
{
"epoch": 1.3006396588486142,
"grad_norm": 0.007660917472094297,
"learning_rate": 3.699360341151386e-05,
"loss": 0.0044,
"step": 610
},
{
"epoch": 1.3113006396588487,
"grad_norm": 0.0039308504201471806,
"learning_rate": 3.6886993603411515e-05,
"loss": 0.0407,
"step": 615
},
{
"epoch": 1.3219616204690832,
"grad_norm": 9.090988159179688,
"learning_rate": 3.678038379530917e-05,
"loss": 0.079,
"step": 620
},
{
"epoch": 1.3326226012793176,
"grad_norm": 0.07939017564058304,
"learning_rate": 3.6673773987206824e-05,
"loss": 0.0186,
"step": 625
},
{
"epoch": 1.3432835820895521,
"grad_norm": 0.010819097980856895,
"learning_rate": 3.656716417910448e-05,
"loss": 0.0305,
"step": 630
},
{
"epoch": 1.3539445628997868,
"grad_norm": 0.004230526275932789,
"learning_rate": 3.6460554371002134e-05,
"loss": 0.0006,
"step": 635
},
{
"epoch": 1.3646055437100213,
"grad_norm": 0.004499130416661501,
"learning_rate": 3.635394456289979e-05,
"loss": 0.0327,
"step": 640
},
{
"epoch": 1.375266524520256,
"grad_norm": 15.124735832214355,
"learning_rate": 3.624733475479744e-05,
"loss": 0.0672,
"step": 645
},
{
"epoch": 1.3859275053304905,
"grad_norm": 6.115004062652588,
"learning_rate": 3.61407249466951e-05,
"loss": 0.0041,
"step": 650
},
{
"epoch": 1.396588486140725,
"grad_norm": 0.02123236283659935,
"learning_rate": 3.603411513859275e-05,
"loss": 0.0249,
"step": 655
},
{
"epoch": 1.4072494669509594,
"grad_norm": 0.004819363821297884,
"learning_rate": 3.592750533049041e-05,
"loss": 0.0006,
"step": 660
},
{
"epoch": 1.417910447761194,
"grad_norm": 0.012994726188480854,
"learning_rate": 3.582089552238806e-05,
"loss": 0.0014,
"step": 665
},
{
"epoch": 1.4285714285714286,
"grad_norm": 0.013508333824574947,
"learning_rate": 3.571428571428572e-05,
"loss": 0.0422,
"step": 670
},
{
"epoch": 1.439232409381663,
"grad_norm": 0.01753731444478035,
"learning_rate": 3.560767590618337e-05,
"loss": 0.0013,
"step": 675
},
{
"epoch": 1.4498933901918978,
"grad_norm": 0.01195667777210474,
"learning_rate": 3.5501066098081026e-05,
"loss": 0.003,
"step": 680
},
{
"epoch": 1.4605543710021323,
"grad_norm": 0.006710922345519066,
"learning_rate": 3.539445628997868e-05,
"loss": 0.0052,
"step": 685
},
{
"epoch": 1.4712153518123667,
"grad_norm": 0.007544829044491053,
"learning_rate": 3.5287846481876336e-05,
"loss": 0.0008,
"step": 690
},
{
"epoch": 1.4818763326226012,
"grad_norm": 0.10621897131204605,
"learning_rate": 3.518123667377399e-05,
"loss": 0.0008,
"step": 695
},
{
"epoch": 1.4925373134328357,
"grad_norm": 0.7118824124336243,
"learning_rate": 3.5074626865671645e-05,
"loss": 0.0023,
"step": 700
},
{
"epoch": 1.5031982942430704,
"grad_norm": 0.06239379197359085,
"learning_rate": 3.496801705756929e-05,
"loss": 0.0019,
"step": 705
},
{
"epoch": 1.5138592750533049,
"grad_norm": 0.01754390262067318,
"learning_rate": 3.4861407249466955e-05,
"loss": 0.0039,
"step": 710
},
{
"epoch": 1.5245202558635396,
"grad_norm": 0.028482411056756973,
"learning_rate": 3.47547974413646e-05,
"loss": 0.0006,
"step": 715
},
{
"epoch": 1.535181236673774,
"grad_norm": 0.01905830018222332,
"learning_rate": 3.4648187633262264e-05,
"loss": 0.0008,
"step": 720
},
{
"epoch": 1.5458422174840085,
"grad_norm": 8.93456745147705,
"learning_rate": 3.454157782515991e-05,
"loss": 0.0346,
"step": 725
},
{
"epoch": 1.556503198294243,
"grad_norm": 0.005203671287745237,
"learning_rate": 3.4434968017057574e-05,
"loss": 0.0082,
"step": 730
},
{
"epoch": 1.5671641791044775,
"grad_norm": 0.003141968045383692,
"learning_rate": 3.432835820895522e-05,
"loss": 0.0008,
"step": 735
},
{
"epoch": 1.5778251599147122,
"grad_norm": 2.969684362411499,
"learning_rate": 3.422174840085288e-05,
"loss": 0.0018,
"step": 740
},
{
"epoch": 1.5884861407249466,
"grad_norm": 0.0036859207320958376,
"learning_rate": 3.411513859275053e-05,
"loss": 0.0059,
"step": 745
},
{
"epoch": 1.5991471215351813,
"grad_norm": 0.017221724614501,
"learning_rate": 3.400852878464819e-05,
"loss": 0.0034,
"step": 750
},
{
"epoch": 1.6098081023454158,
"grad_norm": 0.003814267460256815,
"learning_rate": 3.390191897654584e-05,
"loss": 0.0023,
"step": 755
},
{
"epoch": 1.6204690831556503,
"grad_norm": 0.021603576838970184,
"learning_rate": 3.37953091684435e-05,
"loss": 0.0005,
"step": 760
},
{
"epoch": 1.6311300639658848,
"grad_norm": 0.005658705253154039,
"learning_rate": 3.368869936034115e-05,
"loss": 0.0004,
"step": 765
},
{
"epoch": 1.6417910447761193,
"grad_norm": 0.003284742822870612,
"learning_rate": 3.358208955223881e-05,
"loss": 0.0033,
"step": 770
},
{
"epoch": 1.652452025586354,
"grad_norm": 0.00574084697291255,
"learning_rate": 3.347547974413646e-05,
"loss": 0.031,
"step": 775
},
{
"epoch": 1.6631130063965884,
"grad_norm": 0.0030344082042574883,
"learning_rate": 3.336886993603412e-05,
"loss": 0.0319,
"step": 780
},
{
"epoch": 1.6737739872068231,
"grad_norm": 0.0278617050498724,
"learning_rate": 3.326226012793177e-05,
"loss": 0.0004,
"step": 785
},
{
"epoch": 1.6844349680170576,
"grad_norm": 9.194768905639648,
"learning_rate": 3.3155650319829424e-05,
"loss": 0.058,
"step": 790
},
{
"epoch": 1.695095948827292,
"grad_norm": 0.066549152135849,
"learning_rate": 3.304904051172708e-05,
"loss": 0.0005,
"step": 795
},
{
"epoch": 1.7057569296375266,
"grad_norm": 0.02074478194117546,
"learning_rate": 3.294243070362473e-05,
"loss": 0.0004,
"step": 800
},
{
"epoch": 1.716417910447761,
"grad_norm": 0.00491687236353755,
"learning_rate": 3.283582089552239e-05,
"loss": 0.0004,
"step": 805
},
{
"epoch": 1.7270788912579957,
"grad_norm": 0.00695793004706502,
"learning_rate": 3.272921108742004e-05,
"loss": 0.0154,
"step": 810
},
{
"epoch": 1.7377398720682304,
"grad_norm": 0.6809759140014648,
"learning_rate": 3.26226012793177e-05,
"loss": 0.0022,
"step": 815
},
{
"epoch": 1.748400852878465,
"grad_norm": 0.003181993030011654,
"learning_rate": 3.251599147121535e-05,
"loss": 0.0004,
"step": 820
},
{
"epoch": 1.7590618336886994,
"grad_norm": 0.003694689366966486,
"learning_rate": 3.240938166311301e-05,
"loss": 0.0004,
"step": 825
},
{
"epoch": 1.7697228144989339,
"grad_norm": 3.479130506515503,
"learning_rate": 3.230277185501066e-05,
"loss": 0.0023,
"step": 830
},
{
"epoch": 1.7803837953091683,
"grad_norm": 0.0040030209347605705,
"learning_rate": 3.2196162046908317e-05,
"loss": 0.0005,
"step": 835
},
{
"epoch": 1.7910447761194028,
"grad_norm": 0.0031086145900189877,
"learning_rate": 3.208955223880597e-05,
"loss": 0.0004,
"step": 840
},
{
"epoch": 1.8017057569296375,
"grad_norm": 0.04968525469303131,
"learning_rate": 3.1982942430703626e-05,
"loss": 0.0004,
"step": 845
},
{
"epoch": 1.8123667377398722,
"grad_norm": 0.003668514546006918,
"learning_rate": 3.187633262260128e-05,
"loss": 0.0006,
"step": 850
},
{
"epoch": 1.8230277185501067,
"grad_norm": 0.0027924957685172558,
"learning_rate": 3.1769722814498935e-05,
"loss": 0.0006,
"step": 855
},
{
"epoch": 1.8336886993603412,
"grad_norm": 0.00413157045841217,
"learning_rate": 3.166311300639659e-05,
"loss": 0.0004,
"step": 860
},
{
"epoch": 1.8443496801705757,
"grad_norm": 0.0029154550284147263,
"learning_rate": 3.1556503198294245e-05,
"loss": 0.0006,
"step": 865
},
{
"epoch": 1.8550106609808101,
"grad_norm": 0.003610234009101987,
"learning_rate": 3.14498933901919e-05,
"loss": 0.0007,
"step": 870
},
{
"epoch": 1.8656716417910446,
"grad_norm": 0.14754119515419006,
"learning_rate": 3.1343283582089554e-05,
"loss": 0.0008,
"step": 875
},
{
"epoch": 1.8763326226012793,
"grad_norm": 2.621108055114746,
"learning_rate": 3.123667377398721e-05,
"loss": 0.0018,
"step": 880
},
{
"epoch": 1.886993603411514,
"grad_norm": 0.13447169959545135,
"learning_rate": 3.1130063965884864e-05,
"loss": 0.0005,
"step": 885
},
{
"epoch": 1.8976545842217485,
"grad_norm": 0.003139887936413288,
"learning_rate": 3.102345415778252e-05,
"loss": 0.0472,
"step": 890
},
{
"epoch": 1.908315565031983,
"grad_norm": 0.002950455294921994,
"learning_rate": 3.0916844349680173e-05,
"loss": 0.0216,
"step": 895
},
{
"epoch": 1.9189765458422174,
"grad_norm": 0.0028594762552529573,
"learning_rate": 3.081023454157783e-05,
"loss": 0.0005,
"step": 900
},
{
"epoch": 1.929637526652452,
"grad_norm": 0.06320278346538544,
"learning_rate": 3.070362473347548e-05,
"loss": 0.0004,
"step": 905
},
{
"epoch": 1.9402985074626866,
"grad_norm": 0.7825538516044617,
"learning_rate": 3.059701492537314e-05,
"loss": 0.0143,
"step": 910
},
{
"epoch": 1.950959488272921,
"grad_norm": 0.08826715499162674,
"learning_rate": 3.0490405117270792e-05,
"loss": 0.0006,
"step": 915
},
{
"epoch": 1.9616204690831558,
"grad_norm": 0.04132532700896263,
"learning_rate": 3.0383795309168444e-05,
"loss": 0.0006,
"step": 920
},
{
"epoch": 1.9722814498933903,
"grad_norm": 0.0030114222317934036,
"learning_rate": 3.0277185501066102e-05,
"loss": 0.0004,
"step": 925
},
{
"epoch": 1.9829424307036247,
"grad_norm": 0.035205941647291183,
"learning_rate": 3.0170575692963753e-05,
"loss": 0.0004,
"step": 930
},
{
"epoch": 1.9936034115138592,
"grad_norm": 0.011215535923838615,
"learning_rate": 3.006396588486141e-05,
"loss": 0.0004,
"step": 935
},
{
"epoch": 2.0,
"eval_accuracy": 0.9893333333333333,
"eval_loss": 0.04325896501541138,
"eval_runtime": 17.7996,
"eval_samples_per_second": 210.679,
"eval_steps_per_second": 6.629,
"step": 938
},
{
"epoch": 2.0042643923240937,
"grad_norm": 0.0036944737657904625,
"learning_rate": 2.9957356076759063e-05,
"loss": 0.0004,
"step": 940
},
{
"epoch": 2.014925373134328,
"grad_norm": 0.0037233037874102592,
"learning_rate": 2.9850746268656714e-05,
"loss": 0.0006,
"step": 945
},
{
"epoch": 2.025586353944563,
"grad_norm": 0.00471721775829792,
"learning_rate": 2.9744136460554372e-05,
"loss": 0.0012,
"step": 950
},
{
"epoch": 2.0362473347547976,
"grad_norm": 0.0028184354305267334,
"learning_rate": 2.9637526652452023e-05,
"loss": 0.0004,
"step": 955
},
{
"epoch": 2.046908315565032,
"grad_norm": 0.0026999644469469786,
"learning_rate": 2.953091684434968e-05,
"loss": 0.0003,
"step": 960
},
{
"epoch": 2.0575692963752665,
"grad_norm": 7.866677284240723,
"learning_rate": 2.9424307036247333e-05,
"loss": 0.0099,
"step": 965
},
{
"epoch": 2.068230277185501,
"grad_norm": 0.003158628474920988,
"learning_rate": 2.931769722814499e-05,
"loss": 0.0003,
"step": 970
},
{
"epoch": 2.0788912579957355,
"grad_norm": 0.0029078605584800243,
"learning_rate": 2.9211087420042642e-05,
"loss": 0.0003,
"step": 975
},
{
"epoch": 2.08955223880597,
"grad_norm": 0.002635799115523696,
"learning_rate": 2.91044776119403e-05,
"loss": 0.0004,
"step": 980
},
{
"epoch": 2.100213219616205,
"grad_norm": 0.0028462933842092752,
"learning_rate": 2.8997867803837952e-05,
"loss": 0.0003,
"step": 985
},
{
"epoch": 2.1108742004264394,
"grad_norm": 0.002656631637364626,
"learning_rate": 2.889125799573561e-05,
"loss": 0.0003,
"step": 990
},
{
"epoch": 2.121535181236674,
"grad_norm": 0.0027064948808401823,
"learning_rate": 2.878464818763326e-05,
"loss": 0.0004,
"step": 995
},
{
"epoch": 2.1321961620469083,
"grad_norm": 0.002770259277895093,
"learning_rate": 2.867803837953092e-05,
"loss": 0.0004,
"step": 1000
},
{
"epoch": 2.142857142857143,
"grad_norm": 0.0030895874369889498,
"learning_rate": 2.857142857142857e-05,
"loss": 0.0003,
"step": 1005
},
{
"epoch": 2.1535181236673773,
"grad_norm": 0.0027953784447163343,
"learning_rate": 2.846481876332623e-05,
"loss": 0.0004,
"step": 1010
},
{
"epoch": 2.1641791044776117,
"grad_norm": 0.005705618299543858,
"learning_rate": 2.835820895522388e-05,
"loss": 0.0003,
"step": 1015
},
{
"epoch": 2.1748400852878467,
"grad_norm": 0.006399468053132296,
"learning_rate": 2.825159914712154e-05,
"loss": 0.0004,
"step": 1020
},
{
"epoch": 2.185501066098081,
"grad_norm": 0.0025555843021720648,
"learning_rate": 2.814498933901919e-05,
"loss": 0.0006,
"step": 1025
},
{
"epoch": 2.1961620469083156,
"grad_norm": 0.0027065330650657415,
"learning_rate": 2.8038379530916848e-05,
"loss": 0.0003,
"step": 1030
},
{
"epoch": 2.20682302771855,
"grad_norm": 0.002442255150526762,
"learning_rate": 2.79317697228145e-05,
"loss": 0.0005,
"step": 1035
},
{
"epoch": 2.2174840085287846,
"grad_norm": 0.002484562573954463,
"learning_rate": 2.7825159914712157e-05,
"loss": 0.0003,
"step": 1040
},
{
"epoch": 2.228144989339019,
"grad_norm": 0.0036380344536155462,
"learning_rate": 2.771855010660981e-05,
"loss": 0.0003,
"step": 1045
},
{
"epoch": 2.2388059701492535,
"grad_norm": 0.002537985099479556,
"learning_rate": 2.7611940298507467e-05,
"loss": 0.0003,
"step": 1050
},
{
"epoch": 2.2494669509594885,
"grad_norm": 0.004076914396136999,
"learning_rate": 2.7505330490405118e-05,
"loss": 0.0003,
"step": 1055
},
{
"epoch": 2.260127931769723,
"grad_norm": 0.002602552529424429,
"learning_rate": 2.7398720682302776e-05,
"loss": 0.0003,
"step": 1060
},
{
"epoch": 2.2707889125799574,
"grad_norm": 0.00241272384300828,
"learning_rate": 2.7292110874200428e-05,
"loss": 0.0003,
"step": 1065
},
{
"epoch": 2.281449893390192,
"grad_norm": 0.0025189516600221395,
"learning_rate": 2.7185501066098086e-05,
"loss": 0.0003,
"step": 1070
},
{
"epoch": 2.2921108742004264,
"grad_norm": 0.0025987394619733095,
"learning_rate": 2.7078891257995737e-05,
"loss": 0.0003,
"step": 1075
},
{
"epoch": 2.302771855010661,
"grad_norm": 0.0023669751826673746,
"learning_rate": 2.6972281449893395e-05,
"loss": 0.0003,
"step": 1080
},
{
"epoch": 2.3134328358208958,
"grad_norm": 0.0026609115302562714,
"learning_rate": 2.6865671641791047e-05,
"loss": 0.0003,
"step": 1085
},
{
"epoch": 2.3240938166311302,
"grad_norm": 0.0025364335160702467,
"learning_rate": 2.6759061833688705e-05,
"loss": 0.0003,
"step": 1090
},
{
"epoch": 2.3347547974413647,
"grad_norm": 0.003316157031804323,
"learning_rate": 2.6652452025586356e-05,
"loss": 0.0003,
"step": 1095
},
{
"epoch": 2.345415778251599,
"grad_norm": 0.0033791414462029934,
"learning_rate": 2.6545842217484007e-05,
"loss": 0.0005,
"step": 1100
},
{
"epoch": 2.3560767590618337,
"grad_norm": 0.0030639800243079662,
"learning_rate": 2.6439232409381666e-05,
"loss": 0.0003,
"step": 1105
},
{
"epoch": 2.366737739872068,
"grad_norm": 0.003819510340690613,
"learning_rate": 2.6332622601279317e-05,
"loss": 0.0038,
"step": 1110
},
{
"epoch": 2.3773987206823026,
"grad_norm": 0.004337433259934187,
"learning_rate": 2.6226012793176975e-05,
"loss": 0.0004,
"step": 1115
},
{
"epoch": 2.388059701492537,
"grad_norm": 0.002666846150532365,
"learning_rate": 2.6119402985074626e-05,
"loss": 0.0003,
"step": 1120
},
{
"epoch": 2.398720682302772,
"grad_norm": 0.0023007108829915524,
"learning_rate": 2.6012793176972285e-05,
"loss": 0.0003,
"step": 1125
},
{
"epoch": 2.4093816631130065,
"grad_norm": 0.0057333544827997684,
"learning_rate": 2.5906183368869936e-05,
"loss": 0.0003,
"step": 1130
},
{
"epoch": 2.420042643923241,
"grad_norm": 0.002227972960099578,
"learning_rate": 2.5799573560767594e-05,
"loss": 0.0003,
"step": 1135
},
{
"epoch": 2.4307036247334755,
"grad_norm": 0.5877718925476074,
"learning_rate": 2.5692963752665245e-05,
"loss": 0.0006,
"step": 1140
},
{
"epoch": 2.44136460554371,
"grad_norm": 0.03610414266586304,
"learning_rate": 2.5586353944562904e-05,
"loss": 0.0003,
"step": 1145
},
{
"epoch": 2.4520255863539444,
"grad_norm": 0.0026552320923656225,
"learning_rate": 2.5479744136460555e-05,
"loss": 0.0003,
"step": 1150
},
{
"epoch": 2.4626865671641793,
"grad_norm": 0.005995908752083778,
"learning_rate": 2.537313432835821e-05,
"loss": 0.0003,
"step": 1155
},
{
"epoch": 2.473347547974414,
"grad_norm": 0.002901578787714243,
"learning_rate": 2.5266524520255864e-05,
"loss": 0.0026,
"step": 1160
},
{
"epoch": 2.4840085287846483,
"grad_norm": 0.002390436828136444,
"learning_rate": 2.515991471215352e-05,
"loss": 0.0003,
"step": 1165
},
{
"epoch": 2.4946695095948828,
"grad_norm": 0.002299776067957282,
"learning_rate": 2.5053304904051174e-05,
"loss": 0.0005,
"step": 1170
},
{
"epoch": 2.5053304904051172,
"grad_norm": 0.0026134243234992027,
"learning_rate": 2.494669509594883e-05,
"loss": 0.0003,
"step": 1175
},
{
"epoch": 2.5159914712153517,
"grad_norm": 2.797267198562622,
"learning_rate": 2.4840085287846483e-05,
"loss": 0.0443,
"step": 1180
},
{
"epoch": 2.526652452025586,
"grad_norm": 0.0022763311862945557,
"learning_rate": 2.4733475479744138e-05,
"loss": 0.0021,
"step": 1185
},
{
"epoch": 2.5373134328358207,
"grad_norm": 0.0023430436849594116,
"learning_rate": 2.4626865671641793e-05,
"loss": 0.0002,
"step": 1190
},
{
"epoch": 2.5479744136460556,
"grad_norm": 2.1748013496398926,
"learning_rate": 2.4520255863539444e-05,
"loss": 0.0013,
"step": 1195
},
{
"epoch": 2.55863539445629,
"grad_norm": 0.002146865241229534,
"learning_rate": 2.44136460554371e-05,
"loss": 0.0003,
"step": 1200
},
{
"epoch": 2.5692963752665245,
"grad_norm": 0.005170762538909912,
"learning_rate": 2.4307036247334754e-05,
"loss": 0.0003,
"step": 1205
},
{
"epoch": 2.579957356076759,
"grad_norm": 0.0024131566751748323,
"learning_rate": 2.420042643923241e-05,
"loss": 0.0003,
"step": 1210
},
{
"epoch": 2.5906183368869935,
"grad_norm": 0.0024173653218895197,
"learning_rate": 2.4093816631130063e-05,
"loss": 0.0003,
"step": 1215
},
{
"epoch": 2.6012793176972284,
"grad_norm": 0.0021876043174415827,
"learning_rate": 2.3987206823027718e-05,
"loss": 0.0019,
"step": 1220
},
{
"epoch": 2.611940298507463,
"grad_norm": 0.002135910326614976,
"learning_rate": 2.3880597014925373e-05,
"loss": 0.0004,
"step": 1225
},
{
"epoch": 2.6226012793176974,
"grad_norm": 0.0021610886324197054,
"learning_rate": 2.3773987206823027e-05,
"loss": 0.0003,
"step": 1230
},
{
"epoch": 2.633262260127932,
"grad_norm": 0.0022106110118329525,
"learning_rate": 2.3667377398720682e-05,
"loss": 0.0002,
"step": 1235
},
{
"epoch": 2.6439232409381663,
"grad_norm": 0.5581987500190735,
"learning_rate": 2.3560767590618337e-05,
"loss": 0.0128,
"step": 1240
},
{
"epoch": 2.654584221748401,
"grad_norm": 0.0054076798260211945,
"learning_rate": 2.345415778251599e-05,
"loss": 0.0003,
"step": 1245
},
{
"epoch": 2.6652452025586353,
"grad_norm": 0.003327286569401622,
"learning_rate": 2.3347547974413646e-05,
"loss": 0.0003,
"step": 1250
},
{
"epoch": 2.6759061833688698,
"grad_norm": 0.004477964248508215,
"learning_rate": 2.32409381663113e-05,
"loss": 0.0002,
"step": 1255
},
{
"epoch": 2.6865671641791042,
"grad_norm": 0.0021120470482856035,
"learning_rate": 2.3134328358208956e-05,
"loss": 0.0003,
"step": 1260
},
{
"epoch": 2.697228144989339,
"grad_norm": 0.0035305528435856104,
"learning_rate": 2.302771855010661e-05,
"loss": 0.0003,
"step": 1265
},
{
"epoch": 2.7078891257995736,
"grad_norm": 0.002070706570520997,
"learning_rate": 2.2921108742004265e-05,
"loss": 0.0002,
"step": 1270
},
{
"epoch": 2.718550106609808,
"grad_norm": 0.026903709396719933,
"learning_rate": 2.281449893390192e-05,
"loss": 0.0003,
"step": 1275
},
{
"epoch": 2.7292110874200426,
"grad_norm": 0.0020756307058036327,
"learning_rate": 2.2707889125799575e-05,
"loss": 0.0003,
"step": 1280
},
{
"epoch": 2.739872068230277,
"grad_norm": 0.002199813723564148,
"learning_rate": 2.260127931769723e-05,
"loss": 0.0003,
"step": 1285
},
{
"epoch": 2.750533049040512,
"grad_norm": 0.0023094303905963898,
"learning_rate": 2.2494669509594884e-05,
"loss": 0.0002,
"step": 1290
},
{
"epoch": 2.7611940298507465,
"grad_norm": 0.0068667493760585785,
"learning_rate": 2.238805970149254e-05,
"loss": 0.0003,
"step": 1295
},
{
"epoch": 2.771855010660981,
"grad_norm": 0.0020157108083367348,
"learning_rate": 2.2281449893390194e-05,
"loss": 0.0002,
"step": 1300
},
{
"epoch": 2.7825159914712154,
"grad_norm": 0.0020970592740923166,
"learning_rate": 2.217484008528785e-05,
"loss": 0.0003,
"step": 1305
},
{
"epoch": 2.79317697228145,
"grad_norm": 0.0020755301229655743,
"learning_rate": 2.2068230277185503e-05,
"loss": 0.0002,
"step": 1310
},
{
"epoch": 2.8038379530916844,
"grad_norm": 0.003090676385909319,
"learning_rate": 2.1961620469083158e-05,
"loss": 0.0002,
"step": 1315
},
{
"epoch": 2.814498933901919,
"grad_norm": 0.003277962561696768,
"learning_rate": 2.1855010660980813e-05,
"loss": 0.0002,
"step": 1320
},
{
"epoch": 2.8251599147121533,
"grad_norm": 0.002007275354117155,
"learning_rate": 2.1748400852878467e-05,
"loss": 0.0002,
"step": 1325
},
{
"epoch": 2.835820895522388,
"grad_norm": 0.0029582190327346325,
"learning_rate": 2.164179104477612e-05,
"loss": 0.0003,
"step": 1330
},
{
"epoch": 2.8464818763326227,
"grad_norm": 0.002031074371188879,
"learning_rate": 2.1535181236673773e-05,
"loss": 0.0003,
"step": 1335
},
{
"epoch": 2.857142857142857,
"grad_norm": 0.0029593866784125566,
"learning_rate": 2.1428571428571428e-05,
"loss": 0.0002,
"step": 1340
},
{
"epoch": 2.8678038379530917,
"grad_norm": 0.002100985962897539,
"learning_rate": 2.1321961620469083e-05,
"loss": 0.0002,
"step": 1345
},
{
"epoch": 2.878464818763326,
"grad_norm": 0.002106971340253949,
"learning_rate": 2.1215351812366738e-05,
"loss": 0.0002,
"step": 1350
},
{
"epoch": 2.8891257995735606,
"grad_norm": 0.005778283346444368,
"learning_rate": 2.1108742004264392e-05,
"loss": 0.0003,
"step": 1355
},
{
"epoch": 2.8997867803837956,
"grad_norm": 0.001990243559703231,
"learning_rate": 2.1002132196162047e-05,
"loss": 0.0091,
"step": 1360
},
{
"epoch": 2.91044776119403,
"grad_norm": 0.002967429580166936,
"learning_rate": 2.0895522388059702e-05,
"loss": 0.0002,
"step": 1365
},
{
"epoch": 2.9211087420042645,
"grad_norm": 0.002995220245793462,
"learning_rate": 2.0788912579957357e-05,
"loss": 0.0002,
"step": 1370
},
{
"epoch": 2.931769722814499,
"grad_norm": 0.0019023464992642403,
"learning_rate": 2.068230277185501e-05,
"loss": 0.0002,
"step": 1375
},
{
"epoch": 2.9424307036247335,
"grad_norm": 0.006048965267837048,
"learning_rate": 2.0575692963752666e-05,
"loss": 0.0003,
"step": 1380
},
{
"epoch": 2.953091684434968,
"grad_norm": 0.00221344199962914,
"learning_rate": 2.046908315565032e-05,
"loss": 0.0003,
"step": 1385
},
{
"epoch": 2.9637526652452024,
"grad_norm": 0.001973741687834263,
"learning_rate": 2.0362473347547976e-05,
"loss": 0.0002,
"step": 1390
},
{
"epoch": 2.974413646055437,
"grad_norm": 0.0020740386098623276,
"learning_rate": 2.025586353944563e-05,
"loss": 0.0002,
"step": 1395
},
{
"epoch": 2.9850746268656714,
"grad_norm": 0.001976174535229802,
"learning_rate": 2.0149253731343285e-05,
"loss": 0.0002,
"step": 1400
},
{
"epoch": 2.9957356076759063,
"grad_norm": 0.002017454942688346,
"learning_rate": 2.004264392324094e-05,
"loss": 0.0006,
"step": 1405
},
{
"epoch": 3.0,
"eval_accuracy": 0.9909333333333333,
"eval_loss": 0.041698116809129715,
"eval_runtime": 17.3059,
"eval_samples_per_second": 216.689,
"eval_steps_per_second": 6.818,
"step": 1407
},
{
"epoch": 3.0063965884861408,
"grad_norm": 0.0046324701979756355,
"learning_rate": 1.9936034115138594e-05,
"loss": 0.0002,
"step": 1410
},
{
"epoch": 3.0170575692963753,
"grad_norm": 0.001944435527548194,
"learning_rate": 1.982942430703625e-05,
"loss": 0.0002,
"step": 1415
},
{
"epoch": 3.0277185501066097,
"grad_norm": 0.009258815087378025,
"learning_rate": 1.9722814498933904e-05,
"loss": 0.0003,
"step": 1420
},
{
"epoch": 3.038379530916844,
"grad_norm": 0.001943689538165927,
"learning_rate": 1.961620469083156e-05,
"loss": 0.0002,
"step": 1425
},
{
"epoch": 3.0490405117270787,
"grad_norm": 0.028894688934087753,
"learning_rate": 1.9509594882729213e-05,
"loss": 0.0004,
"step": 1430
},
{
"epoch": 3.0597014925373136,
"grad_norm": 0.006632302422076464,
"learning_rate": 1.9402985074626868e-05,
"loss": 0.0083,
"step": 1435
},
{
"epoch": 3.070362473347548,
"grad_norm": 0.0024058783892542124,
"learning_rate": 1.9296375266524523e-05,
"loss": 0.0002,
"step": 1440
},
{
"epoch": 3.0810234541577826,
"grad_norm": 0.002219252521172166,
"learning_rate": 1.9189765458422178e-05,
"loss": 0.0002,
"step": 1445
},
{
"epoch": 3.091684434968017,
"grad_norm": 0.0019314315868541598,
"learning_rate": 1.9083155650319832e-05,
"loss": 0.0002,
"step": 1450
},
{
"epoch": 3.1023454157782515,
"grad_norm": 0.0018429073970764875,
"learning_rate": 1.8976545842217487e-05,
"loss": 0.0018,
"step": 1455
},
{
"epoch": 3.113006396588486,
"grad_norm": 0.001831927103921771,
"learning_rate": 1.8869936034115142e-05,
"loss": 0.0002,
"step": 1460
},
{
"epoch": 3.1236673773987205,
"grad_norm": 0.001986939227208495,
"learning_rate": 1.8763326226012797e-05,
"loss": 0.0002,
"step": 1465
},
{
"epoch": 3.1343283582089554,
"grad_norm": 0.0026803421787917614,
"learning_rate": 1.865671641791045e-05,
"loss": 0.0002,
"step": 1470
},
{
"epoch": 3.14498933901919,
"grad_norm": 0.0027609444223344326,
"learning_rate": 1.8550106609808106e-05,
"loss": 0.0002,
"step": 1475
},
{
"epoch": 3.1556503198294243,
"grad_norm": 0.001865135389380157,
"learning_rate": 1.8443496801705757e-05,
"loss": 0.0035,
"step": 1480
},
{
"epoch": 3.166311300639659,
"grad_norm": 0.0019004421774297953,
"learning_rate": 1.8336886993603412e-05,
"loss": 0.0002,
"step": 1485
},
{
"epoch": 3.1769722814498933,
"grad_norm": 0.0018759845988824964,
"learning_rate": 1.8230277185501067e-05,
"loss": 0.0002,
"step": 1490
},
{
"epoch": 3.1876332622601278,
"grad_norm": 0.0027162828482687473,
"learning_rate": 1.812366737739872e-05,
"loss": 0.0003,
"step": 1495
},
{
"epoch": 3.1982942430703627,
"grad_norm": 0.001807849621400237,
"learning_rate": 1.8017057569296376e-05,
"loss": 0.0003,
"step": 1500
},
{
"epoch": 3.208955223880597,
"grad_norm": 0.0026615445967763662,
"learning_rate": 1.791044776119403e-05,
"loss": 0.0002,
"step": 1505
},
{
"epoch": 3.2196162046908317,
"grad_norm": 0.0019175607012584805,
"learning_rate": 1.7803837953091686e-05,
"loss": 0.0002,
"step": 1510
},
{
"epoch": 3.230277185501066,
"grad_norm": 0.02142210118472576,
"learning_rate": 1.769722814498934e-05,
"loss": 0.0002,
"step": 1515
},
{
"epoch": 3.2409381663113006,
"grad_norm": 0.001850056927651167,
"learning_rate": 1.7590618336886995e-05,
"loss": 0.0002,
"step": 1520
},
{
"epoch": 3.251599147121535,
"grad_norm": 0.0018486682092770934,
"learning_rate": 1.7484008528784647e-05,
"loss": 0.0002,
"step": 1525
},
{
"epoch": 3.2622601279317696,
"grad_norm": 0.0021774503402411938,
"learning_rate": 1.73773987206823e-05,
"loss": 0.0002,
"step": 1530
},
{
"epoch": 3.272921108742004,
"grad_norm": 0.006993378046900034,
"learning_rate": 1.7270788912579956e-05,
"loss": 0.0002,
"step": 1535
},
{
"epoch": 3.283582089552239,
"grad_norm": 0.0018699101638048887,
"learning_rate": 1.716417910447761e-05,
"loss": 0.0002,
"step": 1540
},
{
"epoch": 3.2942430703624734,
"grad_norm": 0.002980427350848913,
"learning_rate": 1.7057569296375266e-05,
"loss": 0.0002,
"step": 1545
},
{
"epoch": 3.304904051172708,
"grad_norm": 0.0017131326021626592,
"learning_rate": 1.695095948827292e-05,
"loss": 0.0002,
"step": 1550
},
{
"epoch": 3.3155650319829424,
"grad_norm": 0.0016876270528882742,
"learning_rate": 1.6844349680170575e-05,
"loss": 0.0002,
"step": 1555
},
{
"epoch": 3.326226012793177,
"grad_norm": 0.0035409792326390743,
"learning_rate": 1.673773987206823e-05,
"loss": 0.0002,
"step": 1560
},
{
"epoch": 3.3368869936034113,
"grad_norm": 0.0017458832589909434,
"learning_rate": 1.6631130063965885e-05,
"loss": 0.0002,
"step": 1565
},
{
"epoch": 3.3475479744136463,
"grad_norm": 0.0017402229132130742,
"learning_rate": 1.652452025586354e-05,
"loss": 0.0002,
"step": 1570
},
{
"epoch": 3.3582089552238807,
"grad_norm": 0.006694675888866186,
"learning_rate": 1.6417910447761194e-05,
"loss": 0.0002,
"step": 1575
},
{
"epoch": 3.368869936034115,
"grad_norm": 0.0017123989528045058,
"learning_rate": 1.631130063965885e-05,
"loss": 0.0002,
"step": 1580
},
{
"epoch": 3.3795309168443497,
"grad_norm": 0.0018285670084878802,
"learning_rate": 1.6204690831556504e-05,
"loss": 0.0002,
"step": 1585
},
{
"epoch": 3.390191897654584,
"grad_norm": 0.0017450174782425165,
"learning_rate": 1.6098081023454158e-05,
"loss": 0.0002,
"step": 1590
},
{
"epoch": 3.4008528784648187,
"grad_norm": 0.0019464660435914993,
"learning_rate": 1.5991471215351813e-05,
"loss": 0.0002,
"step": 1595
},
{
"epoch": 3.411513859275053,
"grad_norm": 0.0018827993189916015,
"learning_rate": 1.5884861407249468e-05,
"loss": 0.0002,
"step": 1600
},
{
"epoch": 3.4221748400852876,
"grad_norm": 0.0016939195338636637,
"learning_rate": 1.5778251599147122e-05,
"loss": 0.0002,
"step": 1605
},
{
"epoch": 3.4328358208955225,
"grad_norm": 0.0017598357517272234,
"learning_rate": 1.5671641791044777e-05,
"loss": 0.0002,
"step": 1610
},
{
"epoch": 3.443496801705757,
"grad_norm": 0.001619174494408071,
"learning_rate": 1.5565031982942432e-05,
"loss": 0.0002,
"step": 1615
},
{
"epoch": 3.4541577825159915,
"grad_norm": 0.0018752575851976871,
"learning_rate": 1.5458422174840087e-05,
"loss": 0.0002,
"step": 1620
},
{
"epoch": 3.464818763326226,
"grad_norm": 0.011363488622009754,
"learning_rate": 1.535181236673774e-05,
"loss": 0.0002,
"step": 1625
},
{
"epoch": 3.4754797441364604,
"grad_norm": 0.002178217750042677,
"learning_rate": 1.5245202558635396e-05,
"loss": 0.0002,
"step": 1630
},
{
"epoch": 3.486140724946695,
"grad_norm": 0.0018172799609601498,
"learning_rate": 1.5138592750533051e-05,
"loss": 0.0002,
"step": 1635
},
{
"epoch": 3.49680170575693,
"grad_norm": 0.04406259208917618,
"learning_rate": 1.5031982942430706e-05,
"loss": 0.0003,
"step": 1640
},
{
"epoch": 3.5074626865671643,
"grad_norm": 0.0036606185603886843,
"learning_rate": 1.4925373134328357e-05,
"loss": 0.0002,
"step": 1645
},
{
"epoch": 3.518123667377399,
"grad_norm": 0.0018294027540832758,
"learning_rate": 1.4818763326226012e-05,
"loss": 0.0002,
"step": 1650
},
{
"epoch": 3.5287846481876333,
"grad_norm": 0.0037881783209741116,
"learning_rate": 1.4712153518123666e-05,
"loss": 0.0002,
"step": 1655
},
{
"epoch": 3.5394456289978677,
"grad_norm": 0.0016408147057518363,
"learning_rate": 1.4605543710021321e-05,
"loss": 0.0002,
"step": 1660
},
{
"epoch": 3.550106609808102,
"grad_norm": 0.0017521231202408671,
"learning_rate": 1.4498933901918976e-05,
"loss": 0.0002,
"step": 1665
},
{
"epoch": 3.5607675906183367,
"grad_norm": 0.001984440255910158,
"learning_rate": 1.439232409381663e-05,
"loss": 0.0002,
"step": 1670
},
{
"epoch": 3.571428571428571,
"grad_norm": 0.0032320241443812847,
"learning_rate": 1.4285714285714285e-05,
"loss": 0.0002,
"step": 1675
},
{
"epoch": 3.582089552238806,
"grad_norm": 0.001663031755015254,
"learning_rate": 1.417910447761194e-05,
"loss": 0.0002,
"step": 1680
},
{
"epoch": 3.5927505330490406,
"grad_norm": 0.0026488795410841703,
"learning_rate": 1.4072494669509595e-05,
"loss": 0.0038,
"step": 1685
},
{
"epoch": 3.603411513859275,
"grad_norm": 0.0016906807431951165,
"learning_rate": 1.396588486140725e-05,
"loss": 0.0002,
"step": 1690
},
{
"epoch": 3.6140724946695095,
"grad_norm": 0.0021994952112436295,
"learning_rate": 1.3859275053304904e-05,
"loss": 0.0002,
"step": 1695
},
{
"epoch": 3.624733475479744,
"grad_norm": 0.001628071186132729,
"learning_rate": 1.3752665245202559e-05,
"loss": 0.0002,
"step": 1700
},
{
"epoch": 3.635394456289979,
"grad_norm": 0.0016546807019039989,
"learning_rate": 1.3646055437100214e-05,
"loss": 0.0002,
"step": 1705
},
{
"epoch": 3.6460554371002134,
"grad_norm": 0.011477833613753319,
"learning_rate": 1.3539445628997869e-05,
"loss": 0.0002,
"step": 1710
},
{
"epoch": 3.656716417910448,
"grad_norm": 0.0037379234563559294,
"learning_rate": 1.3432835820895523e-05,
"loss": 0.0002,
"step": 1715
},
{
"epoch": 3.6673773987206824,
"grad_norm": 0.0017678189324215055,
"learning_rate": 1.3326226012793178e-05,
"loss": 0.0002,
"step": 1720
},
{
"epoch": 3.678038379530917,
"grad_norm": 0.0016043706564232707,
"learning_rate": 1.3219616204690833e-05,
"loss": 0.0002,
"step": 1725
},
{
"epoch": 3.6886993603411513,
"grad_norm": 0.0016335045220330358,
"learning_rate": 1.3113006396588488e-05,
"loss": 0.0002,
"step": 1730
},
{
"epoch": 3.699360341151386,
"grad_norm": 0.002535222563892603,
"learning_rate": 1.3006396588486142e-05,
"loss": 0.0063,
"step": 1735
},
{
"epoch": 3.7100213219616203,
"grad_norm": 0.001638981862924993,
"learning_rate": 1.2899786780383797e-05,
"loss": 0.0002,
"step": 1740
},
{
"epoch": 3.7206823027718547,
"grad_norm": 0.0015988540835678577,
"learning_rate": 1.2793176972281452e-05,
"loss": 0.0002,
"step": 1745
},
{
"epoch": 3.7313432835820897,
"grad_norm": 0.0016024501528590918,
"learning_rate": 1.2686567164179105e-05,
"loss": 0.0002,
"step": 1750
},
{
"epoch": 3.742004264392324,
"grad_norm": 0.0016518591437488794,
"learning_rate": 1.257995735607676e-05,
"loss": 0.0002,
"step": 1755
},
{
"epoch": 3.7526652452025586,
"grad_norm": 0.0017737026792019606,
"learning_rate": 1.2473347547974414e-05,
"loss": 0.0003,
"step": 1760
},
{
"epoch": 3.763326226012793,
"grad_norm": 0.001583307865075767,
"learning_rate": 1.2366737739872069e-05,
"loss": 0.0002,
"step": 1765
},
{
"epoch": 3.7739872068230276,
"grad_norm": 0.001610273728147149,
"learning_rate": 1.2260127931769722e-05,
"loss": 0.0002,
"step": 1770
},
{
"epoch": 3.7846481876332625,
"grad_norm": 0.00161422835662961,
"learning_rate": 1.2153518123667377e-05,
"loss": 0.0002,
"step": 1775
},
{
"epoch": 3.795309168443497,
"grad_norm": 0.001505696913227439,
"learning_rate": 1.2046908315565032e-05,
"loss": 0.0002,
"step": 1780
},
{
"epoch": 3.8059701492537314,
"grad_norm": 0.0016396938590332866,
"learning_rate": 1.1940298507462686e-05,
"loss": 0.0002,
"step": 1785
},
{
"epoch": 3.816631130063966,
"grad_norm": 0.0016325798351317644,
"learning_rate": 1.1833688699360341e-05,
"loss": 0.0002,
"step": 1790
},
{
"epoch": 3.8272921108742004,
"grad_norm": 0.0015872290823608637,
"learning_rate": 1.1727078891257996e-05,
"loss": 0.0002,
"step": 1795
},
{
"epoch": 3.837953091684435,
"grad_norm": 0.001704989350400865,
"learning_rate": 1.162046908315565e-05,
"loss": 0.0002,
"step": 1800
},
{
"epoch": 3.8486140724946694,
"grad_norm": 0.002239868976175785,
"learning_rate": 1.1513859275053305e-05,
"loss": 0.021,
"step": 1805
},
{
"epoch": 3.859275053304904,
"grad_norm": 0.001560310716740787,
"learning_rate": 1.140724946695096e-05,
"loss": 0.0002,
"step": 1810
},
{
"epoch": 3.8699360341151388,
"grad_norm": 0.0022932300344109535,
"learning_rate": 1.1300639658848615e-05,
"loss": 0.0002,
"step": 1815
},
{
"epoch": 3.8805970149253732,
"grad_norm": 0.003975142724812031,
"learning_rate": 1.119402985074627e-05,
"loss": 0.0002,
"step": 1820
},
{
"epoch": 3.8912579957356077,
"grad_norm": 0.0021527514327317476,
"learning_rate": 1.1087420042643924e-05,
"loss": 0.0002,
"step": 1825
},
{
"epoch": 3.901918976545842,
"grad_norm": 0.0016404085326939821,
"learning_rate": 1.0980810234541579e-05,
"loss": 0.0002,
"step": 1830
},
{
"epoch": 3.9125799573560767,
"grad_norm": 0.0018758618971332908,
"learning_rate": 1.0874200426439234e-05,
"loss": 0.0002,
"step": 1835
},
{
"epoch": 3.923240938166311,
"grad_norm": 0.00320087349973619,
"learning_rate": 1.0767590618336887e-05,
"loss": 0.0003,
"step": 1840
},
{
"epoch": 3.933901918976546,
"grad_norm": 0.0018033963860943913,
"learning_rate": 1.0660980810234541e-05,
"loss": 0.0002,
"step": 1845
},
{
"epoch": 3.9445628997867805,
"grad_norm": 0.006478359457105398,
"learning_rate": 1.0554371002132196e-05,
"loss": 0.0002,
"step": 1850
},
{
"epoch": 3.955223880597015,
"grad_norm": 0.001532308873720467,
"learning_rate": 1.0447761194029851e-05,
"loss": 0.0002,
"step": 1855
},
{
"epoch": 3.9658848614072495,
"grad_norm": 0.0018310812301933765,
"learning_rate": 1.0341151385927506e-05,
"loss": 0.0002,
"step": 1860
},
{
"epoch": 3.976545842217484,
"grad_norm": 0.003386010183021426,
"learning_rate": 1.023454157782516e-05,
"loss": 0.0002,
"step": 1865
},
{
"epoch": 3.9872068230277184,
"grad_norm": 0.0016056750901043415,
"learning_rate": 1.0127931769722815e-05,
"loss": 0.0002,
"step": 1870
},
{
"epoch": 3.997867803837953,
"grad_norm": 0.00147445616312325,
"learning_rate": 1.002132196162047e-05,
"loss": 0.0002,
"step": 1875
},
{
"epoch": 4.0,
"eval_accuracy": 0.9909333333333333,
"eval_loss": 0.04472832754254341,
"eval_runtime": 17.8366,
"eval_samples_per_second": 210.242,
"eval_steps_per_second": 6.616,
"step": 1876
},
{
"epoch": 4.008528784648187,
"grad_norm": 0.0015055208932608366,
"learning_rate": 9.914712153518125e-06,
"loss": 0.0002,
"step": 1880
},
{
"epoch": 4.019189765458422,
"grad_norm": 0.0015762978000566363,
"learning_rate": 9.80810234541578e-06,
"loss": 0.0002,
"step": 1885
},
{
"epoch": 4.029850746268656,
"grad_norm": 0.001574833644554019,
"learning_rate": 9.701492537313434e-06,
"loss": 0.0002,
"step": 1890
},
{
"epoch": 4.040511727078891,
"grad_norm": 0.004094330593943596,
"learning_rate": 9.594882729211089e-06,
"loss": 0.0002,
"step": 1895
},
{
"epoch": 4.051172707889126,
"grad_norm": 0.0016474354779347777,
"learning_rate": 9.488272921108744e-06,
"loss": 0.0003,
"step": 1900
},
{
"epoch": 4.061833688699361,
"grad_norm": 0.003057042835280299,
"learning_rate": 9.381663113006398e-06,
"loss": 0.0002,
"step": 1905
},
{
"epoch": 4.072494669509595,
"grad_norm": 0.0015259331557899714,
"learning_rate": 9.275053304904053e-06,
"loss": 0.0002,
"step": 1910
},
{
"epoch": 4.08315565031983,
"grad_norm": 0.00908763986080885,
"learning_rate": 9.168443496801706e-06,
"loss": 0.0002,
"step": 1915
},
{
"epoch": 4.093816631130064,
"grad_norm": 0.0015083594480529428,
"learning_rate": 9.06183368869936e-06,
"loss": 0.0002,
"step": 1920
},
{
"epoch": 4.104477611940299,
"grad_norm": 0.0015552736585959792,
"learning_rate": 8.955223880597016e-06,
"loss": 0.0002,
"step": 1925
},
{
"epoch": 4.115138592750533,
"grad_norm": 0.001449069008231163,
"learning_rate": 8.84861407249467e-06,
"loss": 0.0002,
"step": 1930
},
{
"epoch": 4.1257995735607675,
"grad_norm": 0.004730133339762688,
"learning_rate": 8.742004264392323e-06,
"loss": 0.0002,
"step": 1935
},
{
"epoch": 4.136460554371002,
"grad_norm": 0.0015674683963879943,
"learning_rate": 8.635394456289978e-06,
"loss": 0.0002,
"step": 1940
},
{
"epoch": 4.1471215351812365,
"grad_norm": 0.004498023074120283,
"learning_rate": 8.528784648187633e-06,
"loss": 0.0002,
"step": 1945
},
{
"epoch": 4.157782515991471,
"grad_norm": 0.0014473695773631334,
"learning_rate": 8.422174840085288e-06,
"loss": 0.0002,
"step": 1950
},
{
"epoch": 4.1684434968017055,
"grad_norm": 0.0018072060775011778,
"learning_rate": 8.315565031982942e-06,
"loss": 0.0024,
"step": 1955
},
{
"epoch": 4.17910447761194,
"grad_norm": 0.0021151595283299685,
"learning_rate": 8.208955223880597e-06,
"loss": 0.0002,
"step": 1960
},
{
"epoch": 4.189765458422174,
"grad_norm": 0.0014712359989061952,
"learning_rate": 8.102345415778252e-06,
"loss": 0.0002,
"step": 1965
},
{
"epoch": 4.20042643923241,
"grad_norm": 0.0014201990561559796,
"learning_rate": 7.995735607675907e-06,
"loss": 0.0002,
"step": 1970
},
{
"epoch": 4.211087420042644,
"grad_norm": 0.0017560258274897933,
"learning_rate": 7.889125799573561e-06,
"loss": 0.0002,
"step": 1975
},
{
"epoch": 4.221748400852879,
"grad_norm": 0.0014262365875765681,
"learning_rate": 7.782515991471216e-06,
"loss": 0.0002,
"step": 1980
},
{
"epoch": 4.232409381663113,
"grad_norm": 0.17100705206394196,
"learning_rate": 7.67590618336887e-06,
"loss": 0.0061,
"step": 1985
},
{
"epoch": 4.243070362473348,
"grad_norm": 0.001415537903085351,
"learning_rate": 7.5692963752665255e-06,
"loss": 0.0002,
"step": 1990
},
{
"epoch": 4.253731343283582,
"grad_norm": 0.0016385321505367756,
"learning_rate": 7.4626865671641785e-06,
"loss": 0.0002,
"step": 1995
},
{
"epoch": 4.264392324093817,
"grad_norm": 0.0015488544013351202,
"learning_rate": 7.356076759061833e-06,
"loss": 0.0002,
"step": 2000
},
{
"epoch": 4.275053304904051,
"grad_norm": 0.0014028094010427594,
"learning_rate": 7.249466950959488e-06,
"loss": 0.0002,
"step": 2005
},
{
"epoch": 4.285714285714286,
"grad_norm": 0.004617854952812195,
"learning_rate": 7.142857142857143e-06,
"loss": 0.0002,
"step": 2010
},
{
"epoch": 4.29637526652452,
"grad_norm": 0.0016957065090537071,
"learning_rate": 7.0362473347547975e-06,
"loss": 0.0002,
"step": 2015
},
{
"epoch": 4.3070362473347545,
"grad_norm": 0.001407737610861659,
"learning_rate": 6.929637526652452e-06,
"loss": 0.0002,
"step": 2020
},
{
"epoch": 4.317697228144989,
"grad_norm": 0.10060031712055206,
"learning_rate": 6.823027718550107e-06,
"loss": 0.0026,
"step": 2025
},
{
"epoch": 4.3283582089552235,
"grad_norm": 0.0014457283541560173,
"learning_rate": 6.716417910447762e-06,
"loss": 0.0002,
"step": 2030
},
{
"epoch": 4.339019189765459,
"grad_norm": 0.001550312270410359,
"learning_rate": 6.609808102345416e-06,
"loss": 0.0002,
"step": 2035
},
{
"epoch": 4.349680170575693,
"grad_norm": 0.003339284099638462,
"learning_rate": 6.503198294243071e-06,
"loss": 0.0002,
"step": 2040
},
{
"epoch": 4.360341151385928,
"grad_norm": 0.010866936296224594,
"learning_rate": 6.396588486140726e-06,
"loss": 0.0002,
"step": 2045
},
{
"epoch": 4.371002132196162,
"grad_norm": 0.0014470528112724423,
"learning_rate": 6.28997867803838e-06,
"loss": 0.0002,
"step": 2050
},
{
"epoch": 4.381663113006397,
"grad_norm": 0.0014920171815901995,
"learning_rate": 6.1833688699360345e-06,
"loss": 0.0002,
"step": 2055
},
{
"epoch": 4.392324093816631,
"grad_norm": 0.003572584129869938,
"learning_rate": 6.076759061833688e-06,
"loss": 0.0002,
"step": 2060
},
{
"epoch": 4.402985074626866,
"grad_norm": 0.0014895932981744409,
"learning_rate": 5.970149253731343e-06,
"loss": 0.0002,
"step": 2065
},
{
"epoch": 4.4136460554371,
"grad_norm": 0.0016164244152605534,
"learning_rate": 5.863539445628998e-06,
"loss": 0.0002,
"step": 2070
},
{
"epoch": 4.424307036247335,
"grad_norm": 0.005721047054976225,
"learning_rate": 5.756929637526653e-06,
"loss": 0.0002,
"step": 2075
},
{
"epoch": 4.434968017057569,
"grad_norm": 0.0014151050709187984,
"learning_rate": 5.650319829424307e-06,
"loss": 0.0002,
"step": 2080
},
{
"epoch": 4.445628997867804,
"grad_norm": 0.002815955551341176,
"learning_rate": 5.543710021321962e-06,
"loss": 0.0002,
"step": 2085
},
{
"epoch": 4.456289978678038,
"grad_norm": 0.0015904013998806477,
"learning_rate": 5.437100213219617e-06,
"loss": 0.0002,
"step": 2090
},
{
"epoch": 4.466950959488273,
"grad_norm": 0.0016160622471943498,
"learning_rate": 5.330490405117271e-06,
"loss": 0.0002,
"step": 2095
},
{
"epoch": 4.477611940298507,
"grad_norm": 0.00136861065402627,
"learning_rate": 5.2238805970149255e-06,
"loss": 0.0002,
"step": 2100
},
{
"epoch": 4.4882729211087415,
"grad_norm": 0.0015304243424907327,
"learning_rate": 5.11727078891258e-06,
"loss": 0.0002,
"step": 2105
},
{
"epoch": 4.498933901918977,
"grad_norm": 0.0014139491831883788,
"learning_rate": 5.010660980810235e-06,
"loss": 0.0002,
"step": 2110
},
{
"epoch": 4.509594882729211,
"grad_norm": 0.001407725503668189,
"learning_rate": 4.90405117270789e-06,
"loss": 0.0002,
"step": 2115
},
{
"epoch": 4.520255863539446,
"grad_norm": 0.001434042351320386,
"learning_rate": 4.797441364605544e-06,
"loss": 0.0002,
"step": 2120
},
{
"epoch": 4.53091684434968,
"grad_norm": 0.3172968327999115,
"learning_rate": 4.690831556503199e-06,
"loss": 0.0002,
"step": 2125
},
{
"epoch": 4.541577825159915,
"grad_norm": 0.0014782707439735532,
"learning_rate": 4.584221748400853e-06,
"loss": 0.0002,
"step": 2130
},
{
"epoch": 4.552238805970149,
"grad_norm": 0.001482070074416697,
"learning_rate": 4.477611940298508e-06,
"loss": 0.0002,
"step": 2135
},
{
"epoch": 4.562899786780384,
"grad_norm": 0.0013925688108429313,
"learning_rate": 4.371002132196162e-06,
"loss": 0.0002,
"step": 2140
},
{
"epoch": 4.573560767590618,
"grad_norm": 0.0014818840427324176,
"learning_rate": 4.264392324093816e-06,
"loss": 0.0002,
"step": 2145
},
{
"epoch": 4.584221748400853,
"grad_norm": 0.011090172454714775,
"learning_rate": 4.157782515991471e-06,
"loss": 0.0002,
"step": 2150
},
{
"epoch": 4.594882729211087,
"grad_norm": 0.0013689486077055335,
"learning_rate": 4.051172707889126e-06,
"loss": 0.0002,
"step": 2155
},
{
"epoch": 4.605543710021322,
"grad_norm": 0.0015006845351308584,
"learning_rate": 3.944562899786781e-06,
"loss": 0.0002,
"step": 2160
},
{
"epoch": 4.616204690831556,
"grad_norm": 0.0014300316106528044,
"learning_rate": 3.837953091684435e-06,
"loss": 0.0002,
"step": 2165
},
{
"epoch": 4.6268656716417915,
"grad_norm": 0.0014382352819666266,
"learning_rate": 3.7313432835820893e-06,
"loss": 0.0002,
"step": 2170
},
{
"epoch": 4.637526652452026,
"grad_norm": 0.0013684292789548635,
"learning_rate": 3.624733475479744e-06,
"loss": 0.0002,
"step": 2175
},
{
"epoch": 4.6481876332622605,
"grad_norm": 0.0014151682844385505,
"learning_rate": 3.5181236673773987e-06,
"loss": 0.0002,
"step": 2180
},
{
"epoch": 4.658848614072495,
"grad_norm": 0.001544486964121461,
"learning_rate": 3.4115138592750535e-06,
"loss": 0.0002,
"step": 2185
},
{
"epoch": 4.669509594882729,
"grad_norm": 0.0015028889756649733,
"learning_rate": 3.304904051172708e-06,
"loss": 0.0002,
"step": 2190
},
{
"epoch": 4.680170575692964,
"grad_norm": 0.0015417990507557988,
"learning_rate": 3.198294243070363e-06,
"loss": 0.0002,
"step": 2195
},
{
"epoch": 4.690831556503198,
"grad_norm": 0.001461653271690011,
"learning_rate": 3.0916844349680173e-06,
"loss": 0.0002,
"step": 2200
},
{
"epoch": 4.701492537313433,
"grad_norm": 0.0032877279445528984,
"learning_rate": 2.9850746268656716e-06,
"loss": 0.0002,
"step": 2205
},
{
"epoch": 4.712153518123667,
"grad_norm": 0.0013766046613454819,
"learning_rate": 2.8784648187633263e-06,
"loss": 0.0002,
"step": 2210
},
{
"epoch": 4.722814498933902,
"grad_norm": 0.18013963103294373,
"learning_rate": 2.771855010660981e-06,
"loss": 0.006,
"step": 2215
},
{
"epoch": 4.733475479744136,
"grad_norm": 0.0014056072104722261,
"learning_rate": 2.6652452025586354e-06,
"loss": 0.0002,
"step": 2220
},
{
"epoch": 4.744136460554371,
"grad_norm": 0.0013763911556452513,
"learning_rate": 2.55863539445629e-06,
"loss": 0.0002,
"step": 2225
},
{
"epoch": 4.754797441364605,
"grad_norm": 0.0015414757654070854,
"learning_rate": 2.452025586353945e-06,
"loss": 0.0002,
"step": 2230
},
{
"epoch": 4.76545842217484,
"grad_norm": 0.0015764967538416386,
"learning_rate": 2.3454157782515996e-06,
"loss": 0.0002,
"step": 2235
},
{
"epoch": 4.776119402985074,
"grad_norm": 0.0013990583829581738,
"learning_rate": 2.238805970149254e-06,
"loss": 0.0002,
"step": 2240
},
{
"epoch": 4.786780383795309,
"grad_norm": 0.0014663510955870152,
"learning_rate": 2.132196162046908e-06,
"loss": 0.0002,
"step": 2245
},
{
"epoch": 4.797441364605544,
"grad_norm": 0.001371467486023903,
"learning_rate": 2.025586353944563e-06,
"loss": 0.0002,
"step": 2250
},
{
"epoch": 4.8081023454157785,
"grad_norm": 0.004159616772085428,
"learning_rate": 1.9189765458422177e-06,
"loss": 0.0002,
"step": 2255
},
{
"epoch": 4.818763326226013,
"grad_norm": 0.0014570379862561822,
"learning_rate": 1.812366737739872e-06,
"loss": 0.0002,
"step": 2260
},
{
"epoch": 4.8294243070362475,
"grad_norm": 0.0014461831888183951,
"learning_rate": 1.7057569296375267e-06,
"loss": 0.0002,
"step": 2265
},
{
"epoch": 4.840085287846482,
"grad_norm": 0.001420230371877551,
"learning_rate": 1.5991471215351815e-06,
"loss": 0.0002,
"step": 2270
},
{
"epoch": 4.850746268656716,
"grad_norm": 0.0015151703264564276,
"learning_rate": 1.4925373134328358e-06,
"loss": 0.0002,
"step": 2275
},
{
"epoch": 4.861407249466951,
"grad_norm": 0.0013555221958085895,
"learning_rate": 1.3859275053304905e-06,
"loss": 0.0002,
"step": 2280
},
{
"epoch": 4.872068230277185,
"grad_norm": 0.001343977521173656,
"learning_rate": 1.279317697228145e-06,
"loss": 0.0002,
"step": 2285
},
{
"epoch": 4.88272921108742,
"grad_norm": 0.001965308329090476,
"learning_rate": 1.1727078891257998e-06,
"loss": 0.0002,
"step": 2290
},
{
"epoch": 4.893390191897654,
"grad_norm": 0.0018944249022752047,
"learning_rate": 1.066098081023454e-06,
"loss": 0.0024,
"step": 2295
},
{
"epoch": 4.904051172707889,
"grad_norm": 0.001487111789174378,
"learning_rate": 9.594882729211088e-07,
"loss": 0.0002,
"step": 2300
},
{
"epoch": 4.914712153518123,
"grad_norm": 0.009423498995602131,
"learning_rate": 8.528784648187634e-07,
"loss": 0.0002,
"step": 2305
},
{
"epoch": 4.925373134328359,
"grad_norm": 0.0014211320085451007,
"learning_rate": 7.462686567164179e-07,
"loss": 0.0002,
"step": 2310
},
{
"epoch": 4.936034115138593,
"grad_norm": 0.0017777611501514912,
"learning_rate": 6.396588486140725e-07,
"loss": 0.0002,
"step": 2315
},
{
"epoch": 4.946695095948828,
"grad_norm": 0.0014010198647156358,
"learning_rate": 5.33049040511727e-07,
"loss": 0.0002,
"step": 2320
},
{
"epoch": 4.957356076759062,
"grad_norm": 0.0013863181229680777,
"learning_rate": 4.264392324093817e-07,
"loss": 0.0002,
"step": 2325
},
{
"epoch": 4.968017057569297,
"grad_norm": 0.0015228184638544917,
"learning_rate": 3.1982942430703626e-07,
"loss": 0.0002,
"step": 2330
},
{
"epoch": 4.978678038379531,
"grad_norm": 0.003958679269999266,
"learning_rate": 2.1321961620469084e-07,
"loss": 0.0002,
"step": 2335
},
{
"epoch": 4.9893390191897655,
"grad_norm": 0.0014096760423853993,
"learning_rate": 1.0660980810234542e-07,
"loss": 0.0002,
"step": 2340
},
{
"epoch": 5.0,
"grad_norm": 0.001597987487912178,
"learning_rate": 0.0,
"loss": 0.0002,
"step": 2345
},
{
"epoch": 5.0,
"eval_accuracy": 0.992,
"eval_loss": 0.04217932000756264,
"eval_runtime": 17.3493,
"eval_samples_per_second": 216.147,
"eval_steps_per_second": 6.801,
"step": 2345
},
{
"epoch": 5.0,
"step": 2345,
"total_flos": 5.8118992210944e+18,
"train_loss": 0.00785012186263134,
"train_runtime": 818.19,
"train_samples_per_second": 91.666,
"train_steps_per_second": 2.866
}
],
"logging_steps": 5,
"max_steps": 2345,
"num_input_tokens_seen": 0,
"num_train_epochs": 5,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 5.8118992210944e+18,
"train_batch_size": 32,
"trial_name": null,
"trial_params": null
}