Omriy123's picture
🍻 cheers
4ebf233 verified
{
"best_metric": 0.059450652450323105,
"best_model_checkpoint": "vit_epochs5_batch32_lr5e-05_size224_tiles3_seed1_q1\\checkpoint-938",
"epoch": 5.0,
"eval_steps": 500,
"global_step": 2345,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.010660980810234541,
"grad_norm": 7.523022174835205,
"learning_rate": 4.989339019189766e-05,
"loss": 0.0624,
"step": 5
},
{
"epoch": 0.021321961620469083,
"grad_norm": 0.07260505110025406,
"learning_rate": 4.978678038379531e-05,
"loss": 0.0251,
"step": 10
},
{
"epoch": 0.031982942430703626,
"grad_norm": 12.401585578918457,
"learning_rate": 4.9680170575692967e-05,
"loss": 0.0088,
"step": 15
},
{
"epoch": 0.042643923240938165,
"grad_norm": 13.994823455810547,
"learning_rate": 4.957356076759062e-05,
"loss": 0.0352,
"step": 20
},
{
"epoch": 0.053304904051172705,
"grad_norm": 6.336690425872803,
"learning_rate": 4.9466950959488276e-05,
"loss": 0.0758,
"step": 25
},
{
"epoch": 0.06396588486140725,
"grad_norm": 8.967635154724121,
"learning_rate": 4.936034115138593e-05,
"loss": 0.1398,
"step": 30
},
{
"epoch": 0.07462686567164178,
"grad_norm": 1.0860027074813843,
"learning_rate": 4.9253731343283586e-05,
"loss": 0.1171,
"step": 35
},
{
"epoch": 0.08528784648187633,
"grad_norm": 10.433364868164062,
"learning_rate": 4.914712153518124e-05,
"loss": 0.1258,
"step": 40
},
{
"epoch": 0.09594882729211088,
"grad_norm": 12.0237398147583,
"learning_rate": 4.904051172707889e-05,
"loss": 0.1628,
"step": 45
},
{
"epoch": 0.10660980810234541,
"grad_norm": 0.047775354236364365,
"learning_rate": 4.893390191897655e-05,
"loss": 0.0825,
"step": 50
},
{
"epoch": 0.11727078891257996,
"grad_norm": 0.9382216334342957,
"learning_rate": 4.88272921108742e-05,
"loss": 0.0328,
"step": 55
},
{
"epoch": 0.1279317697228145,
"grad_norm": 4.386844635009766,
"learning_rate": 4.872068230277186e-05,
"loss": 0.0104,
"step": 60
},
{
"epoch": 0.13859275053304904,
"grad_norm": 0.0282029677182436,
"learning_rate": 4.861407249466951e-05,
"loss": 0.0909,
"step": 65
},
{
"epoch": 0.14925373134328357,
"grad_norm": 0.10068627446889877,
"learning_rate": 4.850746268656717e-05,
"loss": 0.0533,
"step": 70
},
{
"epoch": 0.15991471215351813,
"grad_norm": 0.6938403844833374,
"learning_rate": 4.840085287846482e-05,
"loss": 0.0468,
"step": 75
},
{
"epoch": 0.17057569296375266,
"grad_norm": 2.18245530128479,
"learning_rate": 4.829424307036248e-05,
"loss": 0.0537,
"step": 80
},
{
"epoch": 0.1812366737739872,
"grad_norm": 0.6952275037765503,
"learning_rate": 4.8187633262260126e-05,
"loss": 0.0502,
"step": 85
},
{
"epoch": 0.19189765458422176,
"grad_norm": 0.10946524888277054,
"learning_rate": 4.808102345415779e-05,
"loss": 0.0098,
"step": 90
},
{
"epoch": 0.2025586353944563,
"grad_norm": 0.023197470232844353,
"learning_rate": 4.7974413646055436e-05,
"loss": 0.0131,
"step": 95
},
{
"epoch": 0.21321961620469082,
"grad_norm": 0.6925906538963318,
"learning_rate": 4.78678038379531e-05,
"loss": 0.0141,
"step": 100
},
{
"epoch": 0.22388059701492538,
"grad_norm": 3.7687132358551025,
"learning_rate": 4.7761194029850745e-05,
"loss": 0.0293,
"step": 105
},
{
"epoch": 0.2345415778251599,
"grad_norm": 0.34725797176361084,
"learning_rate": 4.765458422174841e-05,
"loss": 0.0026,
"step": 110
},
{
"epoch": 0.24520255863539445,
"grad_norm": 8.361950874328613,
"learning_rate": 4.7547974413646055e-05,
"loss": 0.1031,
"step": 115
},
{
"epoch": 0.255863539445629,
"grad_norm": 3.192587375640869,
"learning_rate": 4.7441364605543716e-05,
"loss": 0.0303,
"step": 120
},
{
"epoch": 0.26652452025586354,
"grad_norm": 0.13406366109848022,
"learning_rate": 4.7334754797441364e-05,
"loss": 0.0921,
"step": 125
},
{
"epoch": 0.2771855010660981,
"grad_norm": 6.989262104034424,
"learning_rate": 4.7228144989339026e-05,
"loss": 0.0903,
"step": 130
},
{
"epoch": 0.2878464818763326,
"grad_norm": 0.008396037854254246,
"learning_rate": 4.7121535181236674e-05,
"loss": 0.0009,
"step": 135
},
{
"epoch": 0.29850746268656714,
"grad_norm": 13.521650314331055,
"learning_rate": 4.7014925373134335e-05,
"loss": 0.1023,
"step": 140
},
{
"epoch": 0.3091684434968017,
"grad_norm": 0.11828146874904633,
"learning_rate": 4.690831556503198e-05,
"loss": 0.045,
"step": 145
},
{
"epoch": 0.31982942430703626,
"grad_norm": 0.23041707277297974,
"learning_rate": 4.6801705756929645e-05,
"loss": 0.0303,
"step": 150
},
{
"epoch": 0.3304904051172708,
"grad_norm": 1.239055871963501,
"learning_rate": 4.669509594882729e-05,
"loss": 0.0026,
"step": 155
},
{
"epoch": 0.3411513859275053,
"grad_norm": 4.681065082550049,
"learning_rate": 4.658848614072495e-05,
"loss": 0.0364,
"step": 160
},
{
"epoch": 0.35181236673773986,
"grad_norm": 5.469776153564453,
"learning_rate": 4.64818763326226e-05,
"loss": 0.0121,
"step": 165
},
{
"epoch": 0.3624733475479744,
"grad_norm": 2.5136218070983887,
"learning_rate": 4.637526652452026e-05,
"loss": 0.0625,
"step": 170
},
{
"epoch": 0.373134328358209,
"grad_norm": 5.015021324157715,
"learning_rate": 4.626865671641791e-05,
"loss": 0.0346,
"step": 175
},
{
"epoch": 0.3837953091684435,
"grad_norm": 5.595672607421875,
"learning_rate": 4.6162046908315566e-05,
"loss": 0.1033,
"step": 180
},
{
"epoch": 0.39445628997867804,
"grad_norm": 1.053650140762329,
"learning_rate": 4.605543710021322e-05,
"loss": 0.0572,
"step": 185
},
{
"epoch": 0.4051172707889126,
"grad_norm": 0.6538744568824768,
"learning_rate": 4.5948827292110876e-05,
"loss": 0.018,
"step": 190
},
{
"epoch": 0.4157782515991471,
"grad_norm": 1.1336654424667358,
"learning_rate": 4.584221748400853e-05,
"loss": 0.0264,
"step": 195
},
{
"epoch": 0.42643923240938164,
"grad_norm": 0.10380210727453232,
"learning_rate": 4.5735607675906185e-05,
"loss": 0.049,
"step": 200
},
{
"epoch": 0.43710021321961623,
"grad_norm": 0.2752641439437866,
"learning_rate": 4.562899786780384e-05,
"loss": 0.0372,
"step": 205
},
{
"epoch": 0.44776119402985076,
"grad_norm": 0.018331853672862053,
"learning_rate": 4.5522388059701495e-05,
"loss": 0.0784,
"step": 210
},
{
"epoch": 0.4584221748400853,
"grad_norm": 4.259432315826416,
"learning_rate": 4.541577825159915e-05,
"loss": 0.1846,
"step": 215
},
{
"epoch": 0.4690831556503198,
"grad_norm": 2.1121721267700195,
"learning_rate": 4.5309168443496804e-05,
"loss": 0.089,
"step": 220
},
{
"epoch": 0.47974413646055436,
"grad_norm": 0.08846390247344971,
"learning_rate": 4.520255863539446e-05,
"loss": 0.0203,
"step": 225
},
{
"epoch": 0.4904051172707889,
"grad_norm": 3.529513359069824,
"learning_rate": 4.5095948827292114e-05,
"loss": 0.0619,
"step": 230
},
{
"epoch": 0.5010660980810234,
"grad_norm": 2.0754308700561523,
"learning_rate": 4.498933901918977e-05,
"loss": 0.0643,
"step": 235
},
{
"epoch": 0.511727078891258,
"grad_norm": 0.41601184010505676,
"learning_rate": 4.488272921108742e-05,
"loss": 0.0792,
"step": 240
},
{
"epoch": 0.5223880597014925,
"grad_norm": 0.21950437128543854,
"learning_rate": 4.477611940298508e-05,
"loss": 0.0295,
"step": 245
},
{
"epoch": 0.5330490405117271,
"grad_norm": 0.38914090394973755,
"learning_rate": 4.466950959488273e-05,
"loss": 0.0392,
"step": 250
},
{
"epoch": 0.5437100213219617,
"grad_norm": 1.4780489206314087,
"learning_rate": 4.456289978678039e-05,
"loss": 0.0068,
"step": 255
},
{
"epoch": 0.5543710021321961,
"grad_norm": 1.410050630569458,
"learning_rate": 4.445628997867804e-05,
"loss": 0.0052,
"step": 260
},
{
"epoch": 0.5650319829424307,
"grad_norm": 1.8445395231246948,
"learning_rate": 4.43496801705757e-05,
"loss": 0.0472,
"step": 265
},
{
"epoch": 0.5756929637526652,
"grad_norm": 1.1135804653167725,
"learning_rate": 4.424307036247335e-05,
"loss": 0.0457,
"step": 270
},
{
"epoch": 0.5863539445628998,
"grad_norm": 0.1754743605852127,
"learning_rate": 4.4136460554371006e-05,
"loss": 0.0249,
"step": 275
},
{
"epoch": 0.5970149253731343,
"grad_norm": 5.77360200881958,
"learning_rate": 4.402985074626866e-05,
"loss": 0.0621,
"step": 280
},
{
"epoch": 0.6076759061833689,
"grad_norm": 0.1517520397901535,
"learning_rate": 4.3923240938166316e-05,
"loss": 0.0742,
"step": 285
},
{
"epoch": 0.6183368869936035,
"grad_norm": 6.860696315765381,
"learning_rate": 4.381663113006397e-05,
"loss": 0.0445,
"step": 290
},
{
"epoch": 0.6289978678038379,
"grad_norm": 9.31261920928955,
"learning_rate": 4.3710021321961625e-05,
"loss": 0.0586,
"step": 295
},
{
"epoch": 0.6396588486140725,
"grad_norm": 0.022177331149578094,
"learning_rate": 4.360341151385928e-05,
"loss": 0.1295,
"step": 300
},
{
"epoch": 0.650319829424307,
"grad_norm": 2.3088161945343018,
"learning_rate": 4.3496801705756935e-05,
"loss": 0.0358,
"step": 305
},
{
"epoch": 0.6609808102345416,
"grad_norm": 0.9260404109954834,
"learning_rate": 4.339019189765459e-05,
"loss": 0.0124,
"step": 310
},
{
"epoch": 0.6716417910447762,
"grad_norm": 0.6644256114959717,
"learning_rate": 4.328358208955224e-05,
"loss": 0.0386,
"step": 315
},
{
"epoch": 0.6823027718550106,
"grad_norm": 0.13432615995407104,
"learning_rate": 4.31769722814499e-05,
"loss": 0.0253,
"step": 320
},
{
"epoch": 0.6929637526652452,
"grad_norm": 0.011860758997499943,
"learning_rate": 4.307036247334755e-05,
"loss": 0.0418,
"step": 325
},
{
"epoch": 0.7036247334754797,
"grad_norm": 0.03158905729651451,
"learning_rate": 4.29637526652452e-05,
"loss": 0.0185,
"step": 330
},
{
"epoch": 0.7142857142857143,
"grad_norm": 6.030971527099609,
"learning_rate": 4.2857142857142856e-05,
"loss": 0.0248,
"step": 335
},
{
"epoch": 0.7249466950959488,
"grad_norm": 0.25387847423553467,
"learning_rate": 4.275053304904051e-05,
"loss": 0.0046,
"step": 340
},
{
"epoch": 0.7356076759061834,
"grad_norm": 6.103703498840332,
"learning_rate": 4.2643923240938166e-05,
"loss": 0.0573,
"step": 345
},
{
"epoch": 0.746268656716418,
"grad_norm": 4.099830150604248,
"learning_rate": 4.253731343283582e-05,
"loss": 0.0132,
"step": 350
},
{
"epoch": 0.7569296375266524,
"grad_norm": 8.336865425109863,
"learning_rate": 4.2430703624733475e-05,
"loss": 0.0711,
"step": 355
},
{
"epoch": 0.767590618336887,
"grad_norm": 0.04767255485057831,
"learning_rate": 4.232409381663113e-05,
"loss": 0.0543,
"step": 360
},
{
"epoch": 0.7782515991471215,
"grad_norm": 2.82381534576416,
"learning_rate": 4.2217484008528785e-05,
"loss": 0.0295,
"step": 365
},
{
"epoch": 0.7889125799573561,
"grad_norm": 3.60748553276062,
"learning_rate": 4.211087420042644e-05,
"loss": 0.028,
"step": 370
},
{
"epoch": 0.7995735607675906,
"grad_norm": 0.21805140376091003,
"learning_rate": 4.2004264392324094e-05,
"loss": 0.0682,
"step": 375
},
{
"epoch": 0.8102345415778252,
"grad_norm": 2.768376588821411,
"learning_rate": 4.189765458422175e-05,
"loss": 0.1194,
"step": 380
},
{
"epoch": 0.8208955223880597,
"grad_norm": 0.9290714263916016,
"learning_rate": 4.1791044776119404e-05,
"loss": 0.0444,
"step": 385
},
{
"epoch": 0.8315565031982942,
"grad_norm": 3.394944667816162,
"learning_rate": 4.168443496801706e-05,
"loss": 0.0494,
"step": 390
},
{
"epoch": 0.8422174840085288,
"grad_norm": 0.16371995210647583,
"learning_rate": 4.157782515991471e-05,
"loss": 0.0337,
"step": 395
},
{
"epoch": 0.8528784648187633,
"grad_norm": 7.573339462280273,
"learning_rate": 4.147121535181237e-05,
"loss": 0.0512,
"step": 400
},
{
"epoch": 0.8635394456289979,
"grad_norm": 0.3734154999256134,
"learning_rate": 4.136460554371002e-05,
"loss": 0.1004,
"step": 405
},
{
"epoch": 0.8742004264392325,
"grad_norm": 0.3404262661933899,
"learning_rate": 4.125799573560768e-05,
"loss": 0.0386,
"step": 410
},
{
"epoch": 0.8848614072494669,
"grad_norm": 3.594224214553833,
"learning_rate": 4.115138592750533e-05,
"loss": 0.0347,
"step": 415
},
{
"epoch": 0.8955223880597015,
"grad_norm": 0.39725932478904724,
"learning_rate": 4.104477611940299e-05,
"loss": 0.0246,
"step": 420
},
{
"epoch": 0.906183368869936,
"grad_norm": 0.10839869827032089,
"learning_rate": 4.093816631130064e-05,
"loss": 0.056,
"step": 425
},
{
"epoch": 0.9168443496801706,
"grad_norm": 0.05952761694788933,
"learning_rate": 4.0831556503198296e-05,
"loss": 0.058,
"step": 430
},
{
"epoch": 0.9275053304904051,
"grad_norm": 1.0791202783584595,
"learning_rate": 4.072494669509595e-05,
"loss": 0.006,
"step": 435
},
{
"epoch": 0.9381663113006397,
"grad_norm": 0.011694577522575855,
"learning_rate": 4.0618336886993606e-05,
"loss": 0.0078,
"step": 440
},
{
"epoch": 0.9488272921108742,
"grad_norm": 0.2500549852848053,
"learning_rate": 4.051172707889126e-05,
"loss": 0.0076,
"step": 445
},
{
"epoch": 0.9594882729211087,
"grad_norm": 4.215610504150391,
"learning_rate": 4.0405117270788915e-05,
"loss": 0.0377,
"step": 450
},
{
"epoch": 0.9701492537313433,
"grad_norm": 2.921285390853882,
"learning_rate": 4.029850746268657e-05,
"loss": 0.0395,
"step": 455
},
{
"epoch": 0.9808102345415778,
"grad_norm": 6.361176013946533,
"learning_rate": 4.0191897654584225e-05,
"loss": 0.0361,
"step": 460
},
{
"epoch": 0.9914712153518124,
"grad_norm": 4.146510601043701,
"learning_rate": 4.008528784648188e-05,
"loss": 0.0888,
"step": 465
},
{
"epoch": 1.0,
"eval_accuracy": 0.9682666666666667,
"eval_loss": 0.12556278705596924,
"eval_runtime": 18.1564,
"eval_samples_per_second": 206.538,
"eval_steps_per_second": 6.499,
"step": 469
},
{
"epoch": 1.0021321961620469,
"grad_norm": 0.01982228457927704,
"learning_rate": 3.997867803837953e-05,
"loss": 0.0171,
"step": 470
},
{
"epoch": 1.0127931769722816,
"grad_norm": 0.10318928211927414,
"learning_rate": 3.987206823027719e-05,
"loss": 0.086,
"step": 475
},
{
"epoch": 1.023454157782516,
"grad_norm": 0.0037166441325098276,
"learning_rate": 3.976545842217484e-05,
"loss": 0.0152,
"step": 480
},
{
"epoch": 1.0341151385927505,
"grad_norm": 0.2977741062641144,
"learning_rate": 3.96588486140725e-05,
"loss": 0.0264,
"step": 485
},
{
"epoch": 1.044776119402985,
"grad_norm": 0.01882185786962509,
"learning_rate": 3.9552238805970146e-05,
"loss": 0.022,
"step": 490
},
{
"epoch": 1.0554371002132197,
"grad_norm": 0.3763905167579651,
"learning_rate": 3.944562899786781e-05,
"loss": 0.025,
"step": 495
},
{
"epoch": 1.0660980810234542,
"grad_norm": 0.05068141222000122,
"learning_rate": 3.9339019189765456e-05,
"loss": 0.0025,
"step": 500
},
{
"epoch": 1.0767590618336886,
"grad_norm": 0.014066731557250023,
"learning_rate": 3.923240938166312e-05,
"loss": 0.0067,
"step": 505
},
{
"epoch": 1.0874200426439233,
"grad_norm": 0.05760305002331734,
"learning_rate": 3.9125799573560765e-05,
"loss": 0.014,
"step": 510
},
{
"epoch": 1.0980810234541578,
"grad_norm": 0.48546120524406433,
"learning_rate": 3.901918976545843e-05,
"loss": 0.0422,
"step": 515
},
{
"epoch": 1.1087420042643923,
"grad_norm": 0.12980687618255615,
"learning_rate": 3.8912579957356075e-05,
"loss": 0.004,
"step": 520
},
{
"epoch": 1.1194029850746268,
"grad_norm": 0.019840959459543228,
"learning_rate": 3.8805970149253736e-05,
"loss": 0.0018,
"step": 525
},
{
"epoch": 1.1300639658848615,
"grad_norm": 0.03178727626800537,
"learning_rate": 3.8699360341151384e-05,
"loss": 0.0012,
"step": 530
},
{
"epoch": 1.140724946695096,
"grad_norm": 0.024738993495702744,
"learning_rate": 3.8592750533049046e-05,
"loss": 0.0007,
"step": 535
},
{
"epoch": 1.1513859275053304,
"grad_norm": 0.04438888281583786,
"learning_rate": 3.8486140724946694e-05,
"loss": 0.0385,
"step": 540
},
{
"epoch": 1.1620469083155651,
"grad_norm": 0.08628378063440323,
"learning_rate": 3.8379530916844355e-05,
"loss": 0.0037,
"step": 545
},
{
"epoch": 1.1727078891257996,
"grad_norm": 0.03334299102425575,
"learning_rate": 3.8272921108742e-05,
"loss": 0.0265,
"step": 550
},
{
"epoch": 1.183368869936034,
"grad_norm": 0.16531717777252197,
"learning_rate": 3.8166311300639665e-05,
"loss": 0.0075,
"step": 555
},
{
"epoch": 1.1940298507462686,
"grad_norm": 0.006571681704372168,
"learning_rate": 3.805970149253731e-05,
"loss": 0.0006,
"step": 560
},
{
"epoch": 1.2046908315565032,
"grad_norm": 4.12578010559082,
"learning_rate": 3.7953091684434974e-05,
"loss": 0.0178,
"step": 565
},
{
"epoch": 1.2153518123667377,
"grad_norm": 0.0815979465842247,
"learning_rate": 3.784648187633262e-05,
"loss": 0.0037,
"step": 570
},
{
"epoch": 1.2260127931769722,
"grad_norm": 3.699103593826294,
"learning_rate": 3.7739872068230284e-05,
"loss": 0.0055,
"step": 575
},
{
"epoch": 1.236673773987207,
"grad_norm": 0.012649956159293652,
"learning_rate": 3.763326226012793e-05,
"loss": 0.0021,
"step": 580
},
{
"epoch": 1.2473347547974414,
"grad_norm": 4.331559658050537,
"learning_rate": 3.752665245202559e-05,
"loss": 0.0329,
"step": 585
},
{
"epoch": 1.2579957356076759,
"grad_norm": 0.42186281085014343,
"learning_rate": 3.742004264392324e-05,
"loss": 0.0013,
"step": 590
},
{
"epoch": 1.2686567164179103,
"grad_norm": 0.011500491760671139,
"learning_rate": 3.73134328358209e-05,
"loss": 0.001,
"step": 595
},
{
"epoch": 1.279317697228145,
"grad_norm": 0.042148228734731674,
"learning_rate": 3.720682302771855e-05,
"loss": 0.0414,
"step": 600
},
{
"epoch": 1.2899786780383795,
"grad_norm": 0.026529042050242424,
"learning_rate": 3.710021321961621e-05,
"loss": 0.0092,
"step": 605
},
{
"epoch": 1.3006396588486142,
"grad_norm": 2.9121885299682617,
"learning_rate": 3.699360341151386e-05,
"loss": 0.0701,
"step": 610
},
{
"epoch": 1.3113006396588487,
"grad_norm": 0.00463957991451025,
"learning_rate": 3.6886993603411515e-05,
"loss": 0.0016,
"step": 615
},
{
"epoch": 1.3219616204690832,
"grad_norm": 0.010834756307303905,
"learning_rate": 3.678038379530917e-05,
"loss": 0.0024,
"step": 620
},
{
"epoch": 1.3326226012793176,
"grad_norm": 0.03318125382065773,
"learning_rate": 3.6673773987206824e-05,
"loss": 0.0016,
"step": 625
},
{
"epoch": 1.3432835820895521,
"grad_norm": 0.02642156556248665,
"learning_rate": 3.656716417910448e-05,
"loss": 0.0232,
"step": 630
},
{
"epoch": 1.3539445628997868,
"grad_norm": 0.0203594621270895,
"learning_rate": 3.6460554371002134e-05,
"loss": 0.019,
"step": 635
},
{
"epoch": 1.3646055437100213,
"grad_norm": 0.1975228190422058,
"learning_rate": 3.635394456289979e-05,
"loss": 0.0345,
"step": 640
},
{
"epoch": 1.375266524520256,
"grad_norm": 1.1461132764816284,
"learning_rate": 3.624733475479744e-05,
"loss": 0.0025,
"step": 645
},
{
"epoch": 1.3859275053304905,
"grad_norm": 3.93400502204895,
"learning_rate": 3.61407249466951e-05,
"loss": 0.028,
"step": 650
},
{
"epoch": 1.396588486140725,
"grad_norm": 0.2767792344093323,
"learning_rate": 3.603411513859275e-05,
"loss": 0.0029,
"step": 655
},
{
"epoch": 1.4072494669509594,
"grad_norm": 0.02454341948032379,
"learning_rate": 3.592750533049041e-05,
"loss": 0.0079,
"step": 660
},
{
"epoch": 1.417910447761194,
"grad_norm": 0.5461854338645935,
"learning_rate": 3.582089552238806e-05,
"loss": 0.0021,
"step": 665
},
{
"epoch": 1.4285714285714286,
"grad_norm": 0.007815384306013584,
"learning_rate": 3.571428571428572e-05,
"loss": 0.0167,
"step": 670
},
{
"epoch": 1.439232409381663,
"grad_norm": 0.030073419213294983,
"learning_rate": 3.560767590618337e-05,
"loss": 0.0174,
"step": 675
},
{
"epoch": 1.4498933901918978,
"grad_norm": 0.04773944616317749,
"learning_rate": 3.5501066098081026e-05,
"loss": 0.007,
"step": 680
},
{
"epoch": 1.4605543710021323,
"grad_norm": 0.012186390347778797,
"learning_rate": 3.539445628997868e-05,
"loss": 0.0007,
"step": 685
},
{
"epoch": 1.4712153518123667,
"grad_norm": 0.02012632042169571,
"learning_rate": 3.5287846481876336e-05,
"loss": 0.0005,
"step": 690
},
{
"epoch": 1.4818763326226012,
"grad_norm": 0.006190048530697823,
"learning_rate": 3.518123667377399e-05,
"loss": 0.0037,
"step": 695
},
{
"epoch": 1.4925373134328357,
"grad_norm": 0.00781849306076765,
"learning_rate": 3.5074626865671645e-05,
"loss": 0.0006,
"step": 700
},
{
"epoch": 1.5031982942430704,
"grad_norm": 0.012595439329743385,
"learning_rate": 3.496801705756929e-05,
"loss": 0.0007,
"step": 705
},
{
"epoch": 1.5138592750533049,
"grad_norm": 5.092950820922852,
"learning_rate": 3.4861407249466955e-05,
"loss": 0.0254,
"step": 710
},
{
"epoch": 1.5245202558635396,
"grad_norm": 0.013176420703530312,
"learning_rate": 3.47547974413646e-05,
"loss": 0.0005,
"step": 715
},
{
"epoch": 1.535181236673774,
"grad_norm": 0.002772344509139657,
"learning_rate": 3.4648187633262264e-05,
"loss": 0.0011,
"step": 720
},
{
"epoch": 1.5458422174840085,
"grad_norm": 2.556558847427368,
"learning_rate": 3.454157782515991e-05,
"loss": 0.0056,
"step": 725
},
{
"epoch": 1.556503198294243,
"grad_norm": 0.0028016751166433096,
"learning_rate": 3.4434968017057574e-05,
"loss": 0.0006,
"step": 730
},
{
"epoch": 1.5671641791044775,
"grad_norm": 0.002541080117225647,
"learning_rate": 3.432835820895522e-05,
"loss": 0.0005,
"step": 735
},
{
"epoch": 1.5778251599147122,
"grad_norm": 0.01701749488711357,
"learning_rate": 3.422174840085288e-05,
"loss": 0.0005,
"step": 740
},
{
"epoch": 1.5884861407249466,
"grad_norm": 0.0032772503327578306,
"learning_rate": 3.411513859275053e-05,
"loss": 0.0015,
"step": 745
},
{
"epoch": 1.5991471215351813,
"grad_norm": 0.4739973247051239,
"learning_rate": 3.400852878464819e-05,
"loss": 0.0255,
"step": 750
},
{
"epoch": 1.6098081023454158,
"grad_norm": 0.031100528314709663,
"learning_rate": 3.390191897654584e-05,
"loss": 0.0094,
"step": 755
},
{
"epoch": 1.6204690831556503,
"grad_norm": 0.016540905460715294,
"learning_rate": 3.37953091684435e-05,
"loss": 0.0103,
"step": 760
},
{
"epoch": 1.6311300639658848,
"grad_norm": 0.0029734622221440077,
"learning_rate": 3.368869936034115e-05,
"loss": 0.0003,
"step": 765
},
{
"epoch": 1.6417910447761193,
"grad_norm": 3.637174367904663,
"learning_rate": 3.358208955223881e-05,
"loss": 0.0155,
"step": 770
},
{
"epoch": 1.652452025586354,
"grad_norm": 0.002992335008457303,
"learning_rate": 3.347547974413646e-05,
"loss": 0.0082,
"step": 775
},
{
"epoch": 1.6631130063965884,
"grad_norm": 0.0026899639051407576,
"learning_rate": 3.336886993603412e-05,
"loss": 0.0015,
"step": 780
},
{
"epoch": 1.6737739872068231,
"grad_norm": 0.02699490822851658,
"learning_rate": 3.326226012793177e-05,
"loss": 0.0007,
"step": 785
},
{
"epoch": 1.6844349680170576,
"grad_norm": 0.007621058728545904,
"learning_rate": 3.3155650319829424e-05,
"loss": 0.0246,
"step": 790
},
{
"epoch": 1.695095948827292,
"grad_norm": 0.012562038376927376,
"learning_rate": 3.304904051172708e-05,
"loss": 0.0031,
"step": 795
},
{
"epoch": 1.7057569296375266,
"grad_norm": 0.002219436690211296,
"learning_rate": 3.294243070362473e-05,
"loss": 0.0005,
"step": 800
},
{
"epoch": 1.716417910447761,
"grad_norm": 0.09879221022129059,
"learning_rate": 3.283582089552239e-05,
"loss": 0.0015,
"step": 805
},
{
"epoch": 1.7270788912579957,
"grad_norm": 0.07470264285802841,
"learning_rate": 3.272921108742004e-05,
"loss": 0.0136,
"step": 810
},
{
"epoch": 1.7377398720682304,
"grad_norm": 0.002869713120162487,
"learning_rate": 3.26226012793177e-05,
"loss": 0.0006,
"step": 815
},
{
"epoch": 1.748400852878465,
"grad_norm": 3.1059861183166504,
"learning_rate": 3.251599147121535e-05,
"loss": 0.0351,
"step": 820
},
{
"epoch": 1.7590618336886994,
"grad_norm": 2.2680935859680176,
"learning_rate": 3.240938166311301e-05,
"loss": 0.0479,
"step": 825
},
{
"epoch": 1.7697228144989339,
"grad_norm": 0.0024357482325285673,
"learning_rate": 3.230277185501066e-05,
"loss": 0.0373,
"step": 830
},
{
"epoch": 1.7803837953091683,
"grad_norm": 0.010801059193909168,
"learning_rate": 3.2196162046908317e-05,
"loss": 0.005,
"step": 835
},
{
"epoch": 1.7910447761194028,
"grad_norm": 0.0023505063727498055,
"learning_rate": 3.208955223880597e-05,
"loss": 0.0068,
"step": 840
},
{
"epoch": 1.8017057569296375,
"grad_norm": 0.004954994190484285,
"learning_rate": 3.1982942430703626e-05,
"loss": 0.0006,
"step": 845
},
{
"epoch": 1.8123667377398722,
"grad_norm": 0.005325252655893564,
"learning_rate": 3.187633262260128e-05,
"loss": 0.0013,
"step": 850
},
{
"epoch": 1.8230277185501067,
"grad_norm": 0.0022678172681480646,
"learning_rate": 3.1769722814498935e-05,
"loss": 0.0451,
"step": 855
},
{
"epoch": 1.8336886993603412,
"grad_norm": 0.05244056135416031,
"learning_rate": 3.166311300639659e-05,
"loss": 0.0004,
"step": 860
},
{
"epoch": 1.8443496801705757,
"grad_norm": 6.828389644622803,
"learning_rate": 3.1556503198294245e-05,
"loss": 0.0543,
"step": 865
},
{
"epoch": 1.8550106609808101,
"grad_norm": 0.0025951359421014786,
"learning_rate": 3.14498933901919e-05,
"loss": 0.0009,
"step": 870
},
{
"epoch": 1.8656716417910446,
"grad_norm": 0.025431588292121887,
"learning_rate": 3.1343283582089554e-05,
"loss": 0.0005,
"step": 875
},
{
"epoch": 1.8763326226012793,
"grad_norm": 0.5160558819770813,
"learning_rate": 3.123667377398721e-05,
"loss": 0.0451,
"step": 880
},
{
"epoch": 1.886993603411514,
"grad_norm": 0.005602386314421892,
"learning_rate": 3.1130063965884864e-05,
"loss": 0.0013,
"step": 885
},
{
"epoch": 1.8976545842217485,
"grad_norm": 0.008902816101908684,
"learning_rate": 3.102345415778252e-05,
"loss": 0.0005,
"step": 890
},
{
"epoch": 1.908315565031983,
"grad_norm": 0.008681601844727993,
"learning_rate": 3.0916844349680173e-05,
"loss": 0.0163,
"step": 895
},
{
"epoch": 1.9189765458422174,
"grad_norm": 0.0028283647261559963,
"learning_rate": 3.081023454157783e-05,
"loss": 0.0006,
"step": 900
},
{
"epoch": 1.929637526652452,
"grad_norm": 0.013242118060588837,
"learning_rate": 3.070362473347548e-05,
"loss": 0.0009,
"step": 905
},
{
"epoch": 1.9402985074626866,
"grad_norm": 3.295297145843506,
"learning_rate": 3.059701492537314e-05,
"loss": 0.0144,
"step": 910
},
{
"epoch": 1.950959488272921,
"grad_norm": 0.005470735486596823,
"learning_rate": 3.0490405117270792e-05,
"loss": 0.0003,
"step": 915
},
{
"epoch": 1.9616204690831558,
"grad_norm": 0.0032625121530145407,
"learning_rate": 3.0383795309168444e-05,
"loss": 0.0003,
"step": 920
},
{
"epoch": 1.9722814498933903,
"grad_norm": 0.003790609771385789,
"learning_rate": 3.0277185501066102e-05,
"loss": 0.0666,
"step": 925
},
{
"epoch": 1.9829424307036247,
"grad_norm": 0.0024929724168032408,
"learning_rate": 3.0170575692963753e-05,
"loss": 0.0003,
"step": 930
},
{
"epoch": 1.9936034115138592,
"grad_norm": 0.05972645431756973,
"learning_rate": 3.006396588486141e-05,
"loss": 0.0005,
"step": 935
},
{
"epoch": 2.0,
"eval_accuracy": 0.9829333333333333,
"eval_loss": 0.059450652450323105,
"eval_runtime": 17.4029,
"eval_samples_per_second": 215.482,
"eval_steps_per_second": 6.78,
"step": 938
},
{
"epoch": 2.0042643923240937,
"grad_norm": 0.011405990459024906,
"learning_rate": 2.9957356076759063e-05,
"loss": 0.0008,
"step": 940
},
{
"epoch": 2.014925373134328,
"grad_norm": 0.008791730739176273,
"learning_rate": 2.9850746268656714e-05,
"loss": 0.0005,
"step": 945
},
{
"epoch": 2.025586353944563,
"grad_norm": 0.21560530364513397,
"learning_rate": 2.9744136460554372e-05,
"loss": 0.0009,
"step": 950
},
{
"epoch": 2.0362473347547976,
"grad_norm": 0.011877737939357758,
"learning_rate": 2.9637526652452023e-05,
"loss": 0.0004,
"step": 955
},
{
"epoch": 2.046908315565032,
"grad_norm": 0.0031844661571085453,
"learning_rate": 2.953091684434968e-05,
"loss": 0.0004,
"step": 960
},
{
"epoch": 2.0575692963752665,
"grad_norm": 0.01256948709487915,
"learning_rate": 2.9424307036247333e-05,
"loss": 0.0004,
"step": 965
},
{
"epoch": 2.068230277185501,
"grad_norm": 0.003664059331640601,
"learning_rate": 2.931769722814499e-05,
"loss": 0.0005,
"step": 970
},
{
"epoch": 2.0788912579957355,
"grad_norm": 0.009233221411705017,
"learning_rate": 2.9211087420042642e-05,
"loss": 0.0005,
"step": 975
},
{
"epoch": 2.08955223880597,
"grad_norm": 0.002802032744511962,
"learning_rate": 2.91044776119403e-05,
"loss": 0.0004,
"step": 980
},
{
"epoch": 2.100213219616205,
"grad_norm": 0.0022706035524606705,
"learning_rate": 2.8997867803837952e-05,
"loss": 0.0007,
"step": 985
},
{
"epoch": 2.1108742004264394,
"grad_norm": 0.002773319836705923,
"learning_rate": 2.889125799573561e-05,
"loss": 0.0006,
"step": 990
},
{
"epoch": 2.121535181236674,
"grad_norm": 0.0067685265094041824,
"learning_rate": 2.878464818763326e-05,
"loss": 0.0003,
"step": 995
},
{
"epoch": 2.1321961620469083,
"grad_norm": 0.014644935727119446,
"learning_rate": 2.867803837953092e-05,
"loss": 0.0005,
"step": 1000
},
{
"epoch": 2.142857142857143,
"grad_norm": 0.003831750713288784,
"learning_rate": 2.857142857142857e-05,
"loss": 0.0004,
"step": 1005
},
{
"epoch": 2.1535181236673773,
"grad_norm": 0.013705959543585777,
"learning_rate": 2.846481876332623e-05,
"loss": 0.0026,
"step": 1010
},
{
"epoch": 2.1641791044776117,
"grad_norm": 0.0025688319001346827,
"learning_rate": 2.835820895522388e-05,
"loss": 0.0004,
"step": 1015
},
{
"epoch": 2.1748400852878467,
"grad_norm": 0.2528219223022461,
"learning_rate": 2.825159914712154e-05,
"loss": 0.0014,
"step": 1020
},
{
"epoch": 2.185501066098081,
"grad_norm": 0.0019451375119388103,
"learning_rate": 2.814498933901919e-05,
"loss": 0.0005,
"step": 1025
},
{
"epoch": 2.1961620469083156,
"grad_norm": 0.0054586827754974365,
"learning_rate": 2.8038379530916848e-05,
"loss": 0.0003,
"step": 1030
},
{
"epoch": 2.20682302771855,
"grad_norm": 0.0018325847340747714,
"learning_rate": 2.79317697228145e-05,
"loss": 0.0031,
"step": 1035
},
{
"epoch": 2.2174840085287846,
"grad_norm": 0.030601533129811287,
"learning_rate": 2.7825159914712157e-05,
"loss": 0.0004,
"step": 1040
},
{
"epoch": 2.228144989339019,
"grad_norm": 0.002786671044304967,
"learning_rate": 2.771855010660981e-05,
"loss": 0.0065,
"step": 1045
},
{
"epoch": 2.2388059701492535,
"grad_norm": 0.0025517321191728115,
"learning_rate": 2.7611940298507467e-05,
"loss": 0.0002,
"step": 1050
},
{
"epoch": 2.2494669509594885,
"grad_norm": 0.0022363627795130014,
"learning_rate": 2.7505330490405118e-05,
"loss": 0.0003,
"step": 1055
},
{
"epoch": 2.260127931769723,
"grad_norm": 0.002015487290918827,
"learning_rate": 2.7398720682302776e-05,
"loss": 0.0002,
"step": 1060
},
{
"epoch": 2.2707889125799574,
"grad_norm": 0.002033746801316738,
"learning_rate": 2.7292110874200428e-05,
"loss": 0.0003,
"step": 1065
},
{
"epoch": 2.281449893390192,
"grad_norm": 0.23982590436935425,
"learning_rate": 2.7185501066098086e-05,
"loss": 0.0007,
"step": 1070
},
{
"epoch": 2.2921108742004264,
"grad_norm": 0.0019197396468371153,
"learning_rate": 2.7078891257995737e-05,
"loss": 0.0111,
"step": 1075
},
{
"epoch": 2.302771855010661,
"grad_norm": 0.001743512460961938,
"learning_rate": 2.6972281449893395e-05,
"loss": 0.0004,
"step": 1080
},
{
"epoch": 2.3134328358208958,
"grad_norm": 0.011543733067810535,
"learning_rate": 2.6865671641791047e-05,
"loss": 0.0002,
"step": 1085
},
{
"epoch": 2.3240938166311302,
"grad_norm": 0.001960090361535549,
"learning_rate": 2.6759061833688705e-05,
"loss": 0.0002,
"step": 1090
},
{
"epoch": 2.3347547974413647,
"grad_norm": 0.0021510159131139517,
"learning_rate": 2.6652452025586356e-05,
"loss": 0.0003,
"step": 1095
},
{
"epoch": 2.345415778251599,
"grad_norm": 0.001994698541238904,
"learning_rate": 2.6545842217484007e-05,
"loss": 0.0051,
"step": 1100
},
{
"epoch": 2.3560767590618337,
"grad_norm": 0.0020854491740465164,
"learning_rate": 2.6439232409381666e-05,
"loss": 0.0002,
"step": 1105
},
{
"epoch": 2.366737739872068,
"grad_norm": 0.04646117985248566,
"learning_rate": 2.6332622601279317e-05,
"loss": 0.0033,
"step": 1110
},
{
"epoch": 2.3773987206823026,
"grad_norm": 0.0025137723423540592,
"learning_rate": 2.6226012793176975e-05,
"loss": 0.0002,
"step": 1115
},
{
"epoch": 2.388059701492537,
"grad_norm": 2.028430461883545,
"learning_rate": 2.6119402985074626e-05,
"loss": 0.0008,
"step": 1120
},
{
"epoch": 2.398720682302772,
"grad_norm": 0.00535277696326375,
"learning_rate": 2.6012793176972285e-05,
"loss": 0.002,
"step": 1125
},
{
"epoch": 2.4093816631130065,
"grad_norm": 15.36055850982666,
"learning_rate": 2.5906183368869936e-05,
"loss": 0.0089,
"step": 1130
},
{
"epoch": 2.420042643923241,
"grad_norm": 0.001888532075099647,
"learning_rate": 2.5799573560767594e-05,
"loss": 0.0005,
"step": 1135
},
{
"epoch": 2.4307036247334755,
"grad_norm": 0.002072590170428157,
"learning_rate": 2.5692963752665245e-05,
"loss": 0.0017,
"step": 1140
},
{
"epoch": 2.44136460554371,
"grad_norm": 0.006232311949133873,
"learning_rate": 2.5586353944562904e-05,
"loss": 0.0004,
"step": 1145
},
{
"epoch": 2.4520255863539444,
"grad_norm": 0.03075665421783924,
"learning_rate": 2.5479744136460555e-05,
"loss": 0.0005,
"step": 1150
},
{
"epoch": 2.4626865671641793,
"grad_norm": 0.0019579094368964434,
"learning_rate": 2.537313432835821e-05,
"loss": 0.0002,
"step": 1155
},
{
"epoch": 2.473347547974414,
"grad_norm": 0.00516589917242527,
"learning_rate": 2.5266524520255864e-05,
"loss": 0.0129,
"step": 1160
},
{
"epoch": 2.4840085287846483,
"grad_norm": 0.0019142766250297427,
"learning_rate": 2.515991471215352e-05,
"loss": 0.002,
"step": 1165
},
{
"epoch": 2.4946695095948828,
"grad_norm": 0.001729660900309682,
"learning_rate": 2.5053304904051174e-05,
"loss": 0.0002,
"step": 1170
},
{
"epoch": 2.5053304904051172,
"grad_norm": 0.004812856670469046,
"learning_rate": 2.494669509594883e-05,
"loss": 0.0124,
"step": 1175
},
{
"epoch": 2.5159914712153517,
"grad_norm": 0.002255210420116782,
"learning_rate": 2.4840085287846483e-05,
"loss": 0.0018,
"step": 1180
},
{
"epoch": 2.526652452025586,
"grad_norm": 0.0042625595815479755,
"learning_rate": 2.4733475479744138e-05,
"loss": 0.0022,
"step": 1185
},
{
"epoch": 2.5373134328358207,
"grad_norm": 0.003317923052236438,
"learning_rate": 2.4626865671641793e-05,
"loss": 0.0002,
"step": 1190
},
{
"epoch": 2.5479744136460556,
"grad_norm": 0.0021533577237278223,
"learning_rate": 2.4520255863539444e-05,
"loss": 0.0006,
"step": 1195
},
{
"epoch": 2.55863539445629,
"grad_norm": 0.0016170875169336796,
"learning_rate": 2.44136460554371e-05,
"loss": 0.0002,
"step": 1200
},
{
"epoch": 2.5692963752665245,
"grad_norm": 0.0032187297474592924,
"learning_rate": 2.4307036247334754e-05,
"loss": 0.0003,
"step": 1205
},
{
"epoch": 2.579957356076759,
"grad_norm": 0.0017038070363923907,
"learning_rate": 2.420042643923241e-05,
"loss": 0.0007,
"step": 1210
},
{
"epoch": 2.5906183368869935,
"grad_norm": 0.0022185908164829016,
"learning_rate": 2.4093816631130063e-05,
"loss": 0.0003,
"step": 1215
},
{
"epoch": 2.6012793176972284,
"grad_norm": 0.011953674256801605,
"learning_rate": 2.3987206823027718e-05,
"loss": 0.0004,
"step": 1220
},
{
"epoch": 2.611940298507463,
"grad_norm": 0.0017118758987635374,
"learning_rate": 2.3880597014925373e-05,
"loss": 0.0002,
"step": 1225
},
{
"epoch": 2.6226012793176974,
"grad_norm": 0.002300629625096917,
"learning_rate": 2.3773987206823027e-05,
"loss": 0.0002,
"step": 1230
},
{
"epoch": 2.633262260127932,
"grad_norm": 0.0016920581692829728,
"learning_rate": 2.3667377398720682e-05,
"loss": 0.0062,
"step": 1235
},
{
"epoch": 2.6439232409381663,
"grad_norm": 0.24134622514247894,
"learning_rate": 2.3560767590618337e-05,
"loss": 0.0056,
"step": 1240
},
{
"epoch": 2.654584221748401,
"grad_norm": 0.0015582832274958491,
"learning_rate": 2.345415778251599e-05,
"loss": 0.0012,
"step": 1245
},
{
"epoch": 2.6652452025586353,
"grad_norm": 0.022430991753935814,
"learning_rate": 2.3347547974413646e-05,
"loss": 0.0003,
"step": 1250
},
{
"epoch": 2.6759061833688698,
"grad_norm": 0.13407553732395172,
"learning_rate": 2.32409381663113e-05,
"loss": 0.0003,
"step": 1255
},
{
"epoch": 2.6865671641791042,
"grad_norm": 0.001633663778193295,
"learning_rate": 2.3134328358208956e-05,
"loss": 0.0003,
"step": 1260
},
{
"epoch": 2.697228144989339,
"grad_norm": 0.002987906336784363,
"learning_rate": 2.302771855010661e-05,
"loss": 0.0002,
"step": 1265
},
{
"epoch": 2.7078891257995736,
"grad_norm": 0.001594817265868187,
"learning_rate": 2.2921108742004265e-05,
"loss": 0.0002,
"step": 1270
},
{
"epoch": 2.718550106609808,
"grad_norm": 0.07639887928962708,
"learning_rate": 2.281449893390192e-05,
"loss": 0.0003,
"step": 1275
},
{
"epoch": 2.7292110874200426,
"grad_norm": 0.002753237960860133,
"learning_rate": 2.2707889125799575e-05,
"loss": 0.0003,
"step": 1280
},
{
"epoch": 2.739872068230277,
"grad_norm": 0.00843032356351614,
"learning_rate": 2.260127931769723e-05,
"loss": 0.0003,
"step": 1285
},
{
"epoch": 2.750533049040512,
"grad_norm": 0.0017697943840175867,
"learning_rate": 2.2494669509594884e-05,
"loss": 0.0005,
"step": 1290
},
{
"epoch": 2.7611940298507465,
"grad_norm": 0.012765592895448208,
"learning_rate": 2.238805970149254e-05,
"loss": 0.0003,
"step": 1295
},
{
"epoch": 2.771855010660981,
"grad_norm": 0.001516074757091701,
"learning_rate": 2.2281449893390194e-05,
"loss": 0.0002,
"step": 1300
},
{
"epoch": 2.7825159914712154,
"grad_norm": 0.0016697804676368833,
"learning_rate": 2.217484008528785e-05,
"loss": 0.0002,
"step": 1305
},
{
"epoch": 2.79317697228145,
"grad_norm": 0.0018960988381877542,
"learning_rate": 2.2068230277185503e-05,
"loss": 0.0002,
"step": 1310
},
{
"epoch": 2.8038379530916844,
"grad_norm": 0.002706008730456233,
"learning_rate": 2.1961620469083158e-05,
"loss": 0.0002,
"step": 1315
},
{
"epoch": 2.814498933901919,
"grad_norm": 0.0015761232934892178,
"learning_rate": 2.1855010660980813e-05,
"loss": 0.0002,
"step": 1320
},
{
"epoch": 2.8251599147121533,
"grad_norm": 0.04365153983235359,
"learning_rate": 2.1748400852878467e-05,
"loss": 0.0054,
"step": 1325
},
{
"epoch": 2.835820895522388,
"grad_norm": 0.0030039746779948473,
"learning_rate": 2.164179104477612e-05,
"loss": 0.0003,
"step": 1330
},
{
"epoch": 2.8464818763326227,
"grad_norm": 0.0018323756521567702,
"learning_rate": 2.1535181236673773e-05,
"loss": 0.0007,
"step": 1335
},
{
"epoch": 2.857142857142857,
"grad_norm": 0.0018739423248916864,
"learning_rate": 2.1428571428571428e-05,
"loss": 0.0002,
"step": 1340
},
{
"epoch": 2.8678038379530917,
"grad_norm": 0.0016587284626439214,
"learning_rate": 2.1321961620469083e-05,
"loss": 0.0002,
"step": 1345
},
{
"epoch": 2.878464818763326,
"grad_norm": 0.0021149320527911186,
"learning_rate": 2.1215351812366738e-05,
"loss": 0.0002,
"step": 1350
},
{
"epoch": 2.8891257995735606,
"grad_norm": 0.001595294801518321,
"learning_rate": 2.1108742004264392e-05,
"loss": 0.0002,
"step": 1355
},
{
"epoch": 2.8997867803837956,
"grad_norm": 0.003828309243544936,
"learning_rate": 2.1002132196162047e-05,
"loss": 0.0089,
"step": 1360
},
{
"epoch": 2.91044776119403,
"grad_norm": 0.004440088756382465,
"learning_rate": 2.0895522388059702e-05,
"loss": 0.0003,
"step": 1365
},
{
"epoch": 2.9211087420042645,
"grad_norm": 0.0017082487465813756,
"learning_rate": 2.0788912579957357e-05,
"loss": 0.0089,
"step": 1370
},
{
"epoch": 2.931769722814499,
"grad_norm": 0.17494313418865204,
"learning_rate": 2.068230277185501e-05,
"loss": 0.0079,
"step": 1375
},
{
"epoch": 2.9424307036247335,
"grad_norm": 0.001594117609784007,
"learning_rate": 2.0575692963752666e-05,
"loss": 0.0002,
"step": 1380
},
{
"epoch": 2.953091684434968,
"grad_norm": 0.008118892088532448,
"learning_rate": 2.046908315565032e-05,
"loss": 0.0004,
"step": 1385
},
{
"epoch": 2.9637526652452024,
"grad_norm": 0.0024735559709370136,
"learning_rate": 2.0362473347547976e-05,
"loss": 0.0002,
"step": 1390
},
{
"epoch": 2.974413646055437,
"grad_norm": 0.0014542067656293511,
"learning_rate": 2.025586353944563e-05,
"loss": 0.0002,
"step": 1395
},
{
"epoch": 2.9850746268656714,
"grad_norm": 0.0031547469552606344,
"learning_rate": 2.0149253731343285e-05,
"loss": 0.0002,
"step": 1400
},
{
"epoch": 2.9957356076759063,
"grad_norm": 0.0015797675587236881,
"learning_rate": 2.004264392324094e-05,
"loss": 0.0002,
"step": 1405
},
{
"epoch": 3.0,
"eval_accuracy": 0.9861333333333333,
"eval_loss": 0.06201761215925217,
"eval_runtime": 17.375,
"eval_samples_per_second": 215.827,
"eval_steps_per_second": 6.791,
"step": 1407
},
{
"epoch": 3.0063965884861408,
"grad_norm": 1.461633563041687,
"learning_rate": 1.9936034115138594e-05,
"loss": 0.0007,
"step": 1410
},
{
"epoch": 3.0170575692963753,
"grad_norm": 0.008623131550848484,
"learning_rate": 1.982942430703625e-05,
"loss": 0.0002,
"step": 1415
},
{
"epoch": 3.0277185501066097,
"grad_norm": 0.0022685376461595297,
"learning_rate": 1.9722814498933904e-05,
"loss": 0.0002,
"step": 1420
},
{
"epoch": 3.038379530916844,
"grad_norm": 0.0021228427067399025,
"learning_rate": 1.961620469083156e-05,
"loss": 0.0002,
"step": 1425
},
{
"epoch": 3.0490405117270787,
"grad_norm": 0.0026628009509295225,
"learning_rate": 1.9509594882729213e-05,
"loss": 0.0002,
"step": 1430
},
{
"epoch": 3.0597014925373136,
"grad_norm": 0.0015997429145500064,
"learning_rate": 1.9402985074626868e-05,
"loss": 0.0065,
"step": 1435
},
{
"epoch": 3.070362473347548,
"grad_norm": 0.0015492772217839956,
"learning_rate": 1.9296375266524523e-05,
"loss": 0.0002,
"step": 1440
},
{
"epoch": 3.0810234541577826,
"grad_norm": 0.0014074740465730429,
"learning_rate": 1.9189765458422178e-05,
"loss": 0.0002,
"step": 1445
},
{
"epoch": 3.091684434968017,
"grad_norm": 0.0014764177612960339,
"learning_rate": 1.9083155650319832e-05,
"loss": 0.0002,
"step": 1450
},
{
"epoch": 3.1023454157782515,
"grad_norm": 0.0015944272745400667,
"learning_rate": 1.8976545842217487e-05,
"loss": 0.0002,
"step": 1455
},
{
"epoch": 3.113006396588486,
"grad_norm": 0.002632107585668564,
"learning_rate": 1.8869936034115142e-05,
"loss": 0.0002,
"step": 1460
},
{
"epoch": 3.1236673773987205,
"grad_norm": 0.0015126124490052462,
"learning_rate": 1.8763326226012797e-05,
"loss": 0.0002,
"step": 1465
},
{
"epoch": 3.1343283582089554,
"grad_norm": 0.024501264095306396,
"learning_rate": 1.865671641791045e-05,
"loss": 0.0002,
"step": 1470
},
{
"epoch": 3.14498933901919,
"grad_norm": 0.014305677264928818,
"learning_rate": 1.8550106609808106e-05,
"loss": 0.0002,
"step": 1475
},
{
"epoch": 3.1556503198294243,
"grad_norm": 0.0020725333597511053,
"learning_rate": 1.8443496801705757e-05,
"loss": 0.0032,
"step": 1480
},
{
"epoch": 3.166311300639659,
"grad_norm": 0.0038152721244841814,
"learning_rate": 1.8336886993603412e-05,
"loss": 0.0002,
"step": 1485
},
{
"epoch": 3.1769722814498933,
"grad_norm": 0.001414337893947959,
"learning_rate": 1.8230277185501067e-05,
"loss": 0.0002,
"step": 1490
},
{
"epoch": 3.1876332622601278,
"grad_norm": 0.002374679781496525,
"learning_rate": 1.812366737739872e-05,
"loss": 0.0002,
"step": 1495
},
{
"epoch": 3.1982942430703627,
"grad_norm": 0.002602379070594907,
"learning_rate": 1.8017057569296376e-05,
"loss": 0.0002,
"step": 1500
},
{
"epoch": 3.208955223880597,
"grad_norm": 0.001584042445756495,
"learning_rate": 1.791044776119403e-05,
"loss": 0.0003,
"step": 1505
},
{
"epoch": 3.2196162046908317,
"grad_norm": 0.0019072901923209429,
"learning_rate": 1.7803837953091686e-05,
"loss": 0.0002,
"step": 1510
},
{
"epoch": 3.230277185501066,
"grad_norm": 0.0014707124792039394,
"learning_rate": 1.769722814498934e-05,
"loss": 0.0002,
"step": 1515
},
{
"epoch": 3.2409381663113006,
"grad_norm": 0.0016866616206243634,
"learning_rate": 1.7590618336886995e-05,
"loss": 0.0002,
"step": 1520
},
{
"epoch": 3.251599147121535,
"grad_norm": 0.0013746339827775955,
"learning_rate": 1.7484008528784647e-05,
"loss": 0.0002,
"step": 1525
},
{
"epoch": 3.2622601279317696,
"grad_norm": 0.0014366420218721032,
"learning_rate": 1.73773987206823e-05,
"loss": 0.0002,
"step": 1530
},
{
"epoch": 3.272921108742004,
"grad_norm": 21.536617279052734,
"learning_rate": 1.7270788912579956e-05,
"loss": 0.0113,
"step": 1535
},
{
"epoch": 3.283582089552239,
"grad_norm": 0.030154358595609665,
"learning_rate": 1.716417910447761e-05,
"loss": 0.0002,
"step": 1540
},
{
"epoch": 3.2942430703624734,
"grad_norm": 0.0018184144282713532,
"learning_rate": 1.7057569296375266e-05,
"loss": 0.0004,
"step": 1545
},
{
"epoch": 3.304904051172708,
"grad_norm": 0.0014812969602644444,
"learning_rate": 1.695095948827292e-05,
"loss": 0.0002,
"step": 1550
},
{
"epoch": 3.3155650319829424,
"grad_norm": 0.0013438789173960686,
"learning_rate": 1.6844349680170575e-05,
"loss": 0.0005,
"step": 1555
},
{
"epoch": 3.326226012793177,
"grad_norm": 0.003677763743326068,
"learning_rate": 1.673773987206823e-05,
"loss": 0.0008,
"step": 1560
},
{
"epoch": 3.3368869936034113,
"grad_norm": 0.10813038796186447,
"learning_rate": 1.6631130063965885e-05,
"loss": 0.0003,
"step": 1565
},
{
"epoch": 3.3475479744136463,
"grad_norm": 0.007127894088625908,
"learning_rate": 1.652452025586354e-05,
"loss": 0.0002,
"step": 1570
},
{
"epoch": 3.3582089552238807,
"grad_norm": 0.001401298213750124,
"learning_rate": 1.6417910447761194e-05,
"loss": 0.0002,
"step": 1575
},
{
"epoch": 3.368869936034115,
"grad_norm": 0.0013577801873907447,
"learning_rate": 1.631130063965885e-05,
"loss": 0.0002,
"step": 1580
},
{
"epoch": 3.3795309168443497,
"grad_norm": 0.0017308747628703713,
"learning_rate": 1.6204690831556504e-05,
"loss": 0.0002,
"step": 1585
},
{
"epoch": 3.390191897654584,
"grad_norm": 0.0013733214000239968,
"learning_rate": 1.6098081023454158e-05,
"loss": 0.0002,
"step": 1590
},
{
"epoch": 3.4008528784648187,
"grad_norm": 0.011999400332570076,
"learning_rate": 1.5991471215351813e-05,
"loss": 0.0002,
"step": 1595
},
{
"epoch": 3.411513859275053,
"grad_norm": 0.0013399006566032767,
"learning_rate": 1.5884861407249468e-05,
"loss": 0.0003,
"step": 1600
},
{
"epoch": 3.4221748400852876,
"grad_norm": 0.0014706465881317854,
"learning_rate": 1.5778251599147122e-05,
"loss": 0.0002,
"step": 1605
},
{
"epoch": 3.4328358208955225,
"grad_norm": 0.0013743231538683176,
"learning_rate": 1.5671641791044777e-05,
"loss": 0.0002,
"step": 1610
},
{
"epoch": 3.443496801705757,
"grad_norm": 0.0013075482565909624,
"learning_rate": 1.5565031982942432e-05,
"loss": 0.0002,
"step": 1615
},
{
"epoch": 3.4541577825159915,
"grad_norm": 0.0014712231932207942,
"learning_rate": 1.5458422174840087e-05,
"loss": 0.0002,
"step": 1620
},
{
"epoch": 3.464818763326226,
"grad_norm": 0.0013278430560603738,
"learning_rate": 1.535181236673774e-05,
"loss": 0.0009,
"step": 1625
},
{
"epoch": 3.4754797441364604,
"grad_norm": 0.0014948764583095908,
"learning_rate": 1.5245202558635396e-05,
"loss": 0.0002,
"step": 1630
},
{
"epoch": 3.486140724946695,
"grad_norm": 0.0016076472820714116,
"learning_rate": 1.5138592750533051e-05,
"loss": 0.0002,
"step": 1635
},
{
"epoch": 3.49680170575693,
"grad_norm": 0.046660102903842926,
"learning_rate": 1.5031982942430706e-05,
"loss": 0.0003,
"step": 1640
},
{
"epoch": 3.5074626865671643,
"grad_norm": 0.001787811517715454,
"learning_rate": 1.4925373134328357e-05,
"loss": 0.0002,
"step": 1645
},
{
"epoch": 3.518123667377399,
"grad_norm": 0.019754258915781975,
"learning_rate": 1.4818763326226012e-05,
"loss": 0.0002,
"step": 1650
},
{
"epoch": 3.5287846481876333,
"grad_norm": 0.0013567762216553092,
"learning_rate": 1.4712153518123666e-05,
"loss": 0.0002,
"step": 1655
},
{
"epoch": 3.5394456289978677,
"grad_norm": 0.0017151799984276295,
"learning_rate": 1.4605543710021321e-05,
"loss": 0.0002,
"step": 1660
},
{
"epoch": 3.550106609808102,
"grad_norm": 0.0014122892171144485,
"learning_rate": 1.4498933901918976e-05,
"loss": 0.0001,
"step": 1665
},
{
"epoch": 3.5607675906183367,
"grad_norm": 0.001381227164529264,
"learning_rate": 1.439232409381663e-05,
"loss": 0.0002,
"step": 1670
},
{
"epoch": 3.571428571428571,
"grad_norm": 0.001501563936471939,
"learning_rate": 1.4285714285714285e-05,
"loss": 0.0002,
"step": 1675
},
{
"epoch": 3.582089552238806,
"grad_norm": 10.50568962097168,
"learning_rate": 1.417910447761194e-05,
"loss": 0.0155,
"step": 1680
},
{
"epoch": 3.5927505330490406,
"grad_norm": 0.0035982050467282534,
"learning_rate": 1.4072494669509595e-05,
"loss": 0.004,
"step": 1685
},
{
"epoch": 3.603411513859275,
"grad_norm": 0.0030845210421830416,
"learning_rate": 1.396588486140725e-05,
"loss": 0.0002,
"step": 1690
},
{
"epoch": 3.6140724946695095,
"grad_norm": 0.0024272853042930365,
"learning_rate": 1.3859275053304904e-05,
"loss": 0.0002,
"step": 1695
},
{
"epoch": 3.624733475479744,
"grad_norm": 0.001277498435229063,
"learning_rate": 1.3752665245202559e-05,
"loss": 0.0001,
"step": 1700
},
{
"epoch": 3.635394456289979,
"grad_norm": 0.001351995742879808,
"learning_rate": 1.3646055437100214e-05,
"loss": 0.0002,
"step": 1705
},
{
"epoch": 3.6460554371002134,
"grad_norm": 0.0013289814814925194,
"learning_rate": 1.3539445628997869e-05,
"loss": 0.0001,
"step": 1710
},
{
"epoch": 3.656716417910448,
"grad_norm": 0.004144403617829084,
"learning_rate": 1.3432835820895523e-05,
"loss": 0.0002,
"step": 1715
},
{
"epoch": 3.6673773987206824,
"grad_norm": 0.0013239245163276792,
"learning_rate": 1.3326226012793178e-05,
"loss": 0.0002,
"step": 1720
},
{
"epoch": 3.678038379530917,
"grad_norm": 0.003802697407081723,
"learning_rate": 1.3219616204690833e-05,
"loss": 0.0002,
"step": 1725
},
{
"epoch": 3.6886993603411513,
"grad_norm": 0.0013056733878329396,
"learning_rate": 1.3113006396588488e-05,
"loss": 0.0001,
"step": 1730
},
{
"epoch": 3.699360341151386,
"grad_norm": 0.0013066122774034739,
"learning_rate": 1.3006396588486142e-05,
"loss": 0.0068,
"step": 1735
},
{
"epoch": 3.7100213219616203,
"grad_norm": 0.001307385740801692,
"learning_rate": 1.2899786780383797e-05,
"loss": 0.0002,
"step": 1740
},
{
"epoch": 3.7206823027718547,
"grad_norm": 0.0012692222371697426,
"learning_rate": 1.2793176972281452e-05,
"loss": 0.0001,
"step": 1745
},
{
"epoch": 3.7313432835820897,
"grad_norm": 0.0012794553767889738,
"learning_rate": 1.2686567164179105e-05,
"loss": 0.0002,
"step": 1750
},
{
"epoch": 3.742004264392324,
"grad_norm": 0.0016664585564285517,
"learning_rate": 1.257995735607676e-05,
"loss": 0.0001,
"step": 1755
},
{
"epoch": 3.7526652452025586,
"grad_norm": 0.0013751591322943568,
"learning_rate": 1.2473347547974414e-05,
"loss": 0.0002,
"step": 1760
},
{
"epoch": 3.763326226012793,
"grad_norm": 0.0017565148882567883,
"learning_rate": 1.2366737739872069e-05,
"loss": 0.0002,
"step": 1765
},
{
"epoch": 3.7739872068230276,
"grad_norm": 0.001302393269725144,
"learning_rate": 1.2260127931769722e-05,
"loss": 0.0001,
"step": 1770
},
{
"epoch": 3.7846481876332625,
"grad_norm": 0.0031294100917875767,
"learning_rate": 1.2153518123667377e-05,
"loss": 0.0002,
"step": 1775
},
{
"epoch": 3.795309168443497,
"grad_norm": 0.002873555989935994,
"learning_rate": 1.2046908315565032e-05,
"loss": 0.0002,
"step": 1780
},
{
"epoch": 3.8059701492537314,
"grad_norm": 0.0014639534056186676,
"learning_rate": 1.1940298507462686e-05,
"loss": 0.0001,
"step": 1785
},
{
"epoch": 3.816631130063966,
"grad_norm": 0.0024766058195382357,
"learning_rate": 1.1833688699360341e-05,
"loss": 0.0001,
"step": 1790
},
{
"epoch": 3.8272921108742004,
"grad_norm": 0.0016069738194346428,
"learning_rate": 1.1727078891257996e-05,
"loss": 0.0001,
"step": 1795
},
{
"epoch": 3.837953091684435,
"grad_norm": 0.0014199650613591075,
"learning_rate": 1.162046908315565e-05,
"loss": 0.0001,
"step": 1800
},
{
"epoch": 3.8486140724946694,
"grad_norm": 0.002102483995258808,
"learning_rate": 1.1513859275053305e-05,
"loss": 0.0039,
"step": 1805
},
{
"epoch": 3.859275053304904,
"grad_norm": 0.0018299691146239638,
"learning_rate": 1.140724946695096e-05,
"loss": 0.0002,
"step": 1810
},
{
"epoch": 3.8699360341151388,
"grad_norm": 0.00693817762658,
"learning_rate": 1.1300639658848615e-05,
"loss": 0.0002,
"step": 1815
},
{
"epoch": 3.8805970149253732,
"grad_norm": 0.005339950788766146,
"learning_rate": 1.119402985074627e-05,
"loss": 0.0002,
"step": 1820
},
{
"epoch": 3.8912579957356077,
"grad_norm": 0.001799010206013918,
"learning_rate": 1.1087420042643924e-05,
"loss": 0.0002,
"step": 1825
},
{
"epoch": 3.901918976545842,
"grad_norm": 0.0032261726446449757,
"learning_rate": 1.0980810234541579e-05,
"loss": 0.0002,
"step": 1830
},
{
"epoch": 3.9125799573560767,
"grad_norm": 0.01785244606435299,
"learning_rate": 1.0874200426439234e-05,
"loss": 0.0002,
"step": 1835
},
{
"epoch": 3.923240938166311,
"grad_norm": 0.0012020582798868418,
"learning_rate": 1.0767590618336887e-05,
"loss": 0.0002,
"step": 1840
},
{
"epoch": 3.933901918976546,
"grad_norm": 0.0012705688131973147,
"learning_rate": 1.0660980810234541e-05,
"loss": 0.0001,
"step": 1845
},
{
"epoch": 3.9445628997867805,
"grad_norm": 0.0014653359539806843,
"learning_rate": 1.0554371002132196e-05,
"loss": 0.0002,
"step": 1850
},
{
"epoch": 3.955223880597015,
"grad_norm": 0.0015560160391032696,
"learning_rate": 1.0447761194029851e-05,
"loss": 0.0002,
"step": 1855
},
{
"epoch": 3.9658848614072495,
"grad_norm": 0.0036310593131929636,
"learning_rate": 1.0341151385927506e-05,
"loss": 0.0001,
"step": 1860
},
{
"epoch": 3.976545842217484,
"grad_norm": 0.0012366551673039794,
"learning_rate": 1.023454157782516e-05,
"loss": 0.0001,
"step": 1865
},
{
"epoch": 3.9872068230277184,
"grad_norm": 0.0017698772717267275,
"learning_rate": 1.0127931769722815e-05,
"loss": 0.0002,
"step": 1870
},
{
"epoch": 3.997867803837953,
"grad_norm": 0.0013641887344419956,
"learning_rate": 1.002132196162047e-05,
"loss": 0.0001,
"step": 1875
},
{
"epoch": 4.0,
"eval_accuracy": 0.9861333333333333,
"eval_loss": 0.06454310566186905,
"eval_runtime": 17.2675,
"eval_samples_per_second": 217.171,
"eval_steps_per_second": 6.834,
"step": 1876
},
{
"epoch": 4.008528784648187,
"grad_norm": 0.0012424176093190908,
"learning_rate": 9.914712153518125e-06,
"loss": 0.0001,
"step": 1880
},
{
"epoch": 4.019189765458422,
"grad_norm": 0.0013428545789793134,
"learning_rate": 9.80810234541578e-06,
"loss": 0.0002,
"step": 1885
},
{
"epoch": 4.029850746268656,
"grad_norm": 0.002022024942561984,
"learning_rate": 9.701492537313434e-06,
"loss": 0.0001,
"step": 1890
},
{
"epoch": 4.040511727078891,
"grad_norm": 0.004463700577616692,
"learning_rate": 9.594882729211089e-06,
"loss": 0.0001,
"step": 1895
},
{
"epoch": 4.051172707889126,
"grad_norm": 0.0013927206164225936,
"learning_rate": 9.488272921108744e-06,
"loss": 0.0002,
"step": 1900
},
{
"epoch": 4.061833688699361,
"grad_norm": 0.001286355429328978,
"learning_rate": 9.381663113006398e-06,
"loss": 0.0002,
"step": 1905
},
{
"epoch": 4.072494669509595,
"grad_norm": 0.004472631961107254,
"learning_rate": 9.275053304904053e-06,
"loss": 0.0002,
"step": 1910
},
{
"epoch": 4.08315565031983,
"grad_norm": 0.001554334769025445,
"learning_rate": 9.168443496801706e-06,
"loss": 0.0001,
"step": 1915
},
{
"epoch": 4.093816631130064,
"grad_norm": 0.0013858502497896552,
"learning_rate": 9.06183368869936e-06,
"loss": 0.0002,
"step": 1920
},
{
"epoch": 4.104477611940299,
"grad_norm": 0.0013580528320744634,
"learning_rate": 8.955223880597016e-06,
"loss": 0.0001,
"step": 1925
},
{
"epoch": 4.115138592750533,
"grad_norm": 0.0011945017613470554,
"learning_rate": 8.84861407249467e-06,
"loss": 0.0002,
"step": 1930
},
{
"epoch": 4.1257995735607675,
"grad_norm": 0.0015058082062751055,
"learning_rate": 8.742004264392323e-06,
"loss": 0.0001,
"step": 1935
},
{
"epoch": 4.136460554371002,
"grad_norm": 0.0018504057079553604,
"learning_rate": 8.635394456289978e-06,
"loss": 0.0001,
"step": 1940
},
{
"epoch": 4.1471215351812365,
"grad_norm": 0.001987944357097149,
"learning_rate": 8.528784648187633e-06,
"loss": 0.0002,
"step": 1945
},
{
"epoch": 4.157782515991471,
"grad_norm": 0.001231278758496046,
"learning_rate": 8.422174840085288e-06,
"loss": 0.0001,
"step": 1950
},
{
"epoch": 4.1684434968017055,
"grad_norm": 0.0014915940118953586,
"learning_rate": 8.315565031982942e-06,
"loss": 0.0029,
"step": 1955
},
{
"epoch": 4.17910447761194,
"grad_norm": 0.0017193792155012488,
"learning_rate": 8.208955223880597e-06,
"loss": 0.0001,
"step": 1960
},
{
"epoch": 4.189765458422174,
"grad_norm": 0.0013483568327501416,
"learning_rate": 8.102345415778252e-06,
"loss": 0.0001,
"step": 1965
},
{
"epoch": 4.20042643923241,
"grad_norm": 0.0011997171677649021,
"learning_rate": 7.995735607675907e-06,
"loss": 0.0001,
"step": 1970
},
{
"epoch": 4.211087420042644,
"grad_norm": 0.0017619481077417731,
"learning_rate": 7.889125799573561e-06,
"loss": 0.0002,
"step": 1975
},
{
"epoch": 4.221748400852879,
"grad_norm": 0.001219351775944233,
"learning_rate": 7.782515991471216e-06,
"loss": 0.0001,
"step": 1980
},
{
"epoch": 4.232409381663113,
"grad_norm": 0.16717660427093506,
"learning_rate": 7.67590618336887e-06,
"loss": 0.0063,
"step": 1985
},
{
"epoch": 4.243070362473348,
"grad_norm": 0.0017397510819137096,
"learning_rate": 7.5692963752665255e-06,
"loss": 0.0001,
"step": 1990
},
{
"epoch": 4.253731343283582,
"grad_norm": 0.0011504489229992032,
"learning_rate": 7.4626865671641785e-06,
"loss": 0.0001,
"step": 1995
},
{
"epoch": 4.264392324093817,
"grad_norm": 0.0012177824974060059,
"learning_rate": 7.356076759061833e-06,
"loss": 0.0001,
"step": 2000
},
{
"epoch": 4.275053304904051,
"grad_norm": 0.001127021387219429,
"learning_rate": 7.249466950959488e-06,
"loss": 0.0002,
"step": 2005
},
{
"epoch": 4.285714285714286,
"grad_norm": 0.004080480430275202,
"learning_rate": 7.142857142857143e-06,
"loss": 0.0001,
"step": 2010
},
{
"epoch": 4.29637526652452,
"grad_norm": 0.0011779138585552573,
"learning_rate": 7.0362473347547975e-06,
"loss": 0.0002,
"step": 2015
},
{
"epoch": 4.3070362473347545,
"grad_norm": 0.0012129482347518206,
"learning_rate": 6.929637526652452e-06,
"loss": 0.0002,
"step": 2020
},
{
"epoch": 4.317697228144989,
"grad_norm": 0.08944092690944672,
"learning_rate": 6.823027718550107e-06,
"loss": 0.0025,
"step": 2025
},
{
"epoch": 4.3283582089552235,
"grad_norm": 0.0011878637596964836,
"learning_rate": 6.716417910447762e-06,
"loss": 0.0001,
"step": 2030
},
{
"epoch": 4.339019189765459,
"grad_norm": 0.007837583310902119,
"learning_rate": 6.609808102345416e-06,
"loss": 0.0002,
"step": 2035
},
{
"epoch": 4.349680170575693,
"grad_norm": 0.0012950020609423518,
"learning_rate": 6.503198294243071e-06,
"loss": 0.0001,
"step": 2040
},
{
"epoch": 4.360341151385928,
"grad_norm": 0.0011917536612600088,
"learning_rate": 6.396588486140726e-06,
"loss": 0.0001,
"step": 2045
},
{
"epoch": 4.371002132196162,
"grad_norm": 0.0013600134989246726,
"learning_rate": 6.28997867803838e-06,
"loss": 0.0001,
"step": 2050
},
{
"epoch": 4.381663113006397,
"grad_norm": 0.0013914266601204872,
"learning_rate": 6.1833688699360345e-06,
"loss": 0.0001,
"step": 2055
},
{
"epoch": 4.392324093816631,
"grad_norm": 0.0011868373258039355,
"learning_rate": 6.076759061833688e-06,
"loss": 0.0001,
"step": 2060
},
{
"epoch": 4.402985074626866,
"grad_norm": 0.009749215096235275,
"learning_rate": 5.970149253731343e-06,
"loss": 0.0002,
"step": 2065
},
{
"epoch": 4.4136460554371,
"grad_norm": 0.0011316449381411076,
"learning_rate": 5.863539445628998e-06,
"loss": 0.0001,
"step": 2070
},
{
"epoch": 4.424307036247335,
"grad_norm": 0.0033109618816524744,
"learning_rate": 5.756929637526653e-06,
"loss": 0.0002,
"step": 2075
},
{
"epoch": 4.434968017057569,
"grad_norm": 0.0012717369245365262,
"learning_rate": 5.650319829424307e-06,
"loss": 0.0001,
"step": 2080
},
{
"epoch": 4.445628997867804,
"grad_norm": 0.0011261467589065433,
"learning_rate": 5.543710021321962e-06,
"loss": 0.0001,
"step": 2085
},
{
"epoch": 4.456289978678038,
"grad_norm": 0.0015071145026013255,
"learning_rate": 5.437100213219617e-06,
"loss": 0.0001,
"step": 2090
},
{
"epoch": 4.466950959488273,
"grad_norm": 0.0013715195236727595,
"learning_rate": 5.330490405117271e-06,
"loss": 0.0001,
"step": 2095
},
{
"epoch": 4.477611940298507,
"grad_norm": 0.0011400962248444557,
"learning_rate": 5.2238805970149255e-06,
"loss": 0.0001,
"step": 2100
},
{
"epoch": 4.4882729211087415,
"grad_norm": 0.001285375445149839,
"learning_rate": 5.11727078891258e-06,
"loss": 0.0001,
"step": 2105
},
{
"epoch": 4.498933901918977,
"grad_norm": 0.0013375984271988273,
"learning_rate": 5.010660980810235e-06,
"loss": 0.0001,
"step": 2110
},
{
"epoch": 4.509594882729211,
"grad_norm": 0.0012032234808430076,
"learning_rate": 4.90405117270789e-06,
"loss": 0.0001,
"step": 2115
},
{
"epoch": 4.520255863539446,
"grad_norm": 0.0013014287687838078,
"learning_rate": 4.797441364605544e-06,
"loss": 0.0002,
"step": 2120
},
{
"epoch": 4.53091684434968,
"grad_norm": 0.0020497296936810017,
"learning_rate": 4.690831556503199e-06,
"loss": 0.0001,
"step": 2125
},
{
"epoch": 4.541577825159915,
"grad_norm": 0.004726668354123831,
"learning_rate": 4.584221748400853e-06,
"loss": 0.0001,
"step": 2130
},
{
"epoch": 4.552238805970149,
"grad_norm": 0.0030003113206475973,
"learning_rate": 4.477611940298508e-06,
"loss": 0.0002,
"step": 2135
},
{
"epoch": 4.562899786780384,
"grad_norm": 0.0013835778227075934,
"learning_rate": 4.371002132196162e-06,
"loss": 0.0001,
"step": 2140
},
{
"epoch": 4.573560767590618,
"grad_norm": 0.0012324253330007195,
"learning_rate": 4.264392324093816e-06,
"loss": 0.0001,
"step": 2145
},
{
"epoch": 4.584221748400853,
"grad_norm": 0.001273977686651051,
"learning_rate": 4.157782515991471e-06,
"loss": 0.0001,
"step": 2150
},
{
"epoch": 4.594882729211087,
"grad_norm": 0.0011130374623462558,
"learning_rate": 4.051172707889126e-06,
"loss": 0.0001,
"step": 2155
},
{
"epoch": 4.605543710021322,
"grad_norm": 0.001868380350060761,
"learning_rate": 3.944562899786781e-06,
"loss": 0.0002,
"step": 2160
},
{
"epoch": 4.616204690831556,
"grad_norm": 0.0011330582201480865,
"learning_rate": 3.837953091684435e-06,
"loss": 0.0001,
"step": 2165
},
{
"epoch": 4.6268656716417915,
"grad_norm": 0.0012200495693832636,
"learning_rate": 3.7313432835820893e-06,
"loss": 0.0001,
"step": 2170
},
{
"epoch": 4.637526652452026,
"grad_norm": 0.001197567442432046,
"learning_rate": 3.624733475479744e-06,
"loss": 0.0001,
"step": 2175
},
{
"epoch": 4.6481876332622605,
"grad_norm": 0.0012370103504508734,
"learning_rate": 3.5181236673773987e-06,
"loss": 0.0001,
"step": 2180
},
{
"epoch": 4.658848614072495,
"grad_norm": 0.001441104570403695,
"learning_rate": 3.4115138592750535e-06,
"loss": 0.0004,
"step": 2185
},
{
"epoch": 4.669509594882729,
"grad_norm": 0.005181392654776573,
"learning_rate": 3.304904051172708e-06,
"loss": 0.0002,
"step": 2190
},
{
"epoch": 4.680170575692964,
"grad_norm": 0.0030894805677235126,
"learning_rate": 3.198294243070363e-06,
"loss": 0.0001,
"step": 2195
},
{
"epoch": 4.690831556503198,
"grad_norm": 0.0029679632280021906,
"learning_rate": 3.0916844349680173e-06,
"loss": 0.0001,
"step": 2200
},
{
"epoch": 4.701492537313433,
"grad_norm": 0.016101934015750885,
"learning_rate": 2.9850746268656716e-06,
"loss": 0.0001,
"step": 2205
},
{
"epoch": 4.712153518123667,
"grad_norm": 0.0016056873137131333,
"learning_rate": 2.8784648187633263e-06,
"loss": 0.0001,
"step": 2210
},
{
"epoch": 4.722814498933902,
"grad_norm": 0.3704182505607605,
"learning_rate": 2.771855010660981e-06,
"loss": 0.0054,
"step": 2215
},
{
"epoch": 4.733475479744136,
"grad_norm": 0.0011979964328929782,
"learning_rate": 2.6652452025586354e-06,
"loss": 0.0001,
"step": 2220
},
{
"epoch": 4.744136460554371,
"grad_norm": 0.0012237406335771084,
"learning_rate": 2.55863539445629e-06,
"loss": 0.0001,
"step": 2225
},
{
"epoch": 4.754797441364605,
"grad_norm": 0.0016670930199325085,
"learning_rate": 2.452025586353945e-06,
"loss": 0.0001,
"step": 2230
},
{
"epoch": 4.76545842217484,
"grad_norm": 0.0016099626664072275,
"learning_rate": 2.3454157782515996e-06,
"loss": 0.0001,
"step": 2235
},
{
"epoch": 4.776119402985074,
"grad_norm": 0.0012452139053493738,
"learning_rate": 2.238805970149254e-06,
"loss": 0.0001,
"step": 2240
},
{
"epoch": 4.786780383795309,
"grad_norm": 0.0012234573950991035,
"learning_rate": 2.132196162046908e-06,
"loss": 0.0001,
"step": 2245
},
{
"epoch": 4.797441364605544,
"grad_norm": 0.001381521113216877,
"learning_rate": 2.025586353944563e-06,
"loss": 0.0001,
"step": 2250
},
{
"epoch": 4.8081023454157785,
"grad_norm": 0.0011367109837010503,
"learning_rate": 1.9189765458422177e-06,
"loss": 0.0001,
"step": 2255
},
{
"epoch": 4.818763326226013,
"grad_norm": 0.0013733627274632454,
"learning_rate": 1.812366737739872e-06,
"loss": 0.0001,
"step": 2260
},
{
"epoch": 4.8294243070362475,
"grad_norm": 0.0011499322718009353,
"learning_rate": 1.7057569296375267e-06,
"loss": 0.0001,
"step": 2265
},
{
"epoch": 4.840085287846482,
"grad_norm": 0.0013391554821282625,
"learning_rate": 1.5991471215351815e-06,
"loss": 0.0001,
"step": 2270
},
{
"epoch": 4.850746268656716,
"grad_norm": 0.0013704358134418726,
"learning_rate": 1.4925373134328358e-06,
"loss": 0.0001,
"step": 2275
},
{
"epoch": 4.861407249466951,
"grad_norm": 0.0012855156091973186,
"learning_rate": 1.3859275053304905e-06,
"loss": 0.0001,
"step": 2280
},
{
"epoch": 4.872068230277185,
"grad_norm": 0.0011203851317986846,
"learning_rate": 1.279317697228145e-06,
"loss": 0.0001,
"step": 2285
},
{
"epoch": 4.88272921108742,
"grad_norm": 0.0011925158323720098,
"learning_rate": 1.1727078891257998e-06,
"loss": 0.0001,
"step": 2290
},
{
"epoch": 4.893390191897654,
"grad_norm": 0.0013797224964946508,
"learning_rate": 1.066098081023454e-06,
"loss": 0.0024,
"step": 2295
},
{
"epoch": 4.904051172707889,
"grad_norm": 0.001322761527262628,
"learning_rate": 9.594882729211088e-07,
"loss": 0.0001,
"step": 2300
},
{
"epoch": 4.914712153518123,
"grad_norm": 0.0011309119872748852,
"learning_rate": 8.528784648187634e-07,
"loss": 0.0001,
"step": 2305
},
{
"epoch": 4.925373134328359,
"grad_norm": 0.0011366803664714098,
"learning_rate": 7.462686567164179e-07,
"loss": 0.0001,
"step": 2310
},
{
"epoch": 4.936034115138593,
"grad_norm": 0.0017129272455349565,
"learning_rate": 6.396588486140725e-07,
"loss": 0.0002,
"step": 2315
},
{
"epoch": 4.946695095948828,
"grad_norm": 0.002514494815841317,
"learning_rate": 5.33049040511727e-07,
"loss": 0.0001,
"step": 2320
},
{
"epoch": 4.957356076759062,
"grad_norm": 0.0012437364785000682,
"learning_rate": 4.264392324093817e-07,
"loss": 0.0001,
"step": 2325
},
{
"epoch": 4.968017057569297,
"grad_norm": 0.0011664883932098746,
"learning_rate": 3.1982942430703626e-07,
"loss": 0.0001,
"step": 2330
},
{
"epoch": 4.978678038379531,
"grad_norm": 0.005681448150426149,
"learning_rate": 2.1321961620469084e-07,
"loss": 0.0002,
"step": 2335
},
{
"epoch": 4.9893390191897655,
"grad_norm": 0.0011803240049630404,
"learning_rate": 1.0660980810234542e-07,
"loss": 0.0001,
"step": 2340
},
{
"epoch": 5.0,
"grad_norm": 0.0018254140159115195,
"learning_rate": 0.0,
"loss": 0.0001,
"step": 2345
},
{
"epoch": 5.0,
"eval_accuracy": 0.9874666666666667,
"eval_loss": 0.06246929615736008,
"eval_runtime": 17.414,
"eval_samples_per_second": 215.344,
"eval_steps_per_second": 6.776,
"step": 2345
},
{
"epoch": 5.0,
"step": 2345,
"total_flos": 5.8118992210944e+18,
"train_loss": 0.013133438363490598,
"train_runtime": 809.9301,
"train_samples_per_second": 92.601,
"train_steps_per_second": 2.895
}
],
"logging_steps": 5,
"max_steps": 2345,
"num_input_tokens_seen": 0,
"num_train_epochs": 5,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 5.8118992210944e+18,
"train_batch_size": 32,
"trial_name": null,
"trial_params": null
}