|
{
|
|
"best_metric": 0.024724164977669716,
|
|
"best_model_checkpoint": "vit_epochs5_batch32_lr5e-05_size224_tiles1_seed1_classic_image_classification\\checkpoint-1876",
|
|
"epoch": 5.0,
|
|
"eval_steps": 500,
|
|
"global_step": 2345,
|
|
"is_hyper_param_search": false,
|
|
"is_local_process_zero": true,
|
|
"is_world_process_zero": true,
|
|
"log_history": [
|
|
{
|
|
"epoch": 0.010660980810234541,
|
|
"grad_norm": 1.2567126750946045,
|
|
"learning_rate": 4.989339019189766e-05,
|
|
"loss": 0.642,
|
|
"step": 5
|
|
},
|
|
{
|
|
"epoch": 0.021321961620469083,
|
|
"grad_norm": 1.3538587093353271,
|
|
"learning_rate": 4.978678038379531e-05,
|
|
"loss": 0.4891,
|
|
"step": 10
|
|
},
|
|
{
|
|
"epoch": 0.031982942430703626,
|
|
"grad_norm": 1.1701405048370361,
|
|
"learning_rate": 4.9680170575692967e-05,
|
|
"loss": 0.3426,
|
|
"step": 15
|
|
},
|
|
{
|
|
"epoch": 0.042643923240938165,
|
|
"grad_norm": 1.3944813013076782,
|
|
"learning_rate": 4.957356076759062e-05,
|
|
"loss": 0.236,
|
|
"step": 20
|
|
},
|
|
{
|
|
"epoch": 0.053304904051172705,
|
|
"grad_norm": 2.115678548812866,
|
|
"learning_rate": 4.9466950959488276e-05,
|
|
"loss": 0.1691,
|
|
"step": 25
|
|
},
|
|
{
|
|
"epoch": 0.06396588486140725,
|
|
"grad_norm": 1.9631171226501465,
|
|
"learning_rate": 4.936034115138593e-05,
|
|
"loss": 0.1418,
|
|
"step": 30
|
|
},
|
|
{
|
|
"epoch": 0.07462686567164178,
|
|
"grad_norm": 0.4107034206390381,
|
|
"learning_rate": 4.9253731343283586e-05,
|
|
"loss": 0.0867,
|
|
"step": 35
|
|
},
|
|
{
|
|
"epoch": 0.08528784648187633,
|
|
"grad_norm": 1.5991907119750977,
|
|
"learning_rate": 4.914712153518124e-05,
|
|
"loss": 0.1,
|
|
"step": 40
|
|
},
|
|
{
|
|
"epoch": 0.09594882729211088,
|
|
"grad_norm": 1.274415373802185,
|
|
"learning_rate": 4.904051172707889e-05,
|
|
"loss": 0.1098,
|
|
"step": 45
|
|
},
|
|
{
|
|
"epoch": 0.10660980810234541,
|
|
"grad_norm": 1.7743926048278809,
|
|
"learning_rate": 4.893390191897655e-05,
|
|
"loss": 0.0664,
|
|
"step": 50
|
|
},
|
|
{
|
|
"epoch": 0.11727078891257996,
|
|
"grad_norm": 0.32264184951782227,
|
|
"learning_rate": 4.88272921108742e-05,
|
|
"loss": 0.0683,
|
|
"step": 55
|
|
},
|
|
{
|
|
"epoch": 0.1279317697228145,
|
|
"grad_norm": 1.0625122785568237,
|
|
"learning_rate": 4.872068230277186e-05,
|
|
"loss": 0.0945,
|
|
"step": 60
|
|
},
|
|
{
|
|
"epoch": 0.13859275053304904,
|
|
"grad_norm": 0.5553732514381409,
|
|
"learning_rate": 4.861407249466951e-05,
|
|
"loss": 0.0851,
|
|
"step": 65
|
|
},
|
|
{
|
|
"epoch": 0.14925373134328357,
|
|
"grad_norm": 0.5356264114379883,
|
|
"learning_rate": 4.850746268656717e-05,
|
|
"loss": 0.0773,
|
|
"step": 70
|
|
},
|
|
{
|
|
"epoch": 0.15991471215351813,
|
|
"grad_norm": 1.28850257396698,
|
|
"learning_rate": 4.840085287846482e-05,
|
|
"loss": 0.0431,
|
|
"step": 75
|
|
},
|
|
{
|
|
"epoch": 0.17057569296375266,
|
|
"grad_norm": 0.18133750557899475,
|
|
"learning_rate": 4.829424307036248e-05,
|
|
"loss": 0.0413,
|
|
"step": 80
|
|
},
|
|
{
|
|
"epoch": 0.1812366737739872,
|
|
"grad_norm": 0.17871356010437012,
|
|
"learning_rate": 4.8187633262260126e-05,
|
|
"loss": 0.0268,
|
|
"step": 85
|
|
},
|
|
{
|
|
"epoch": 0.19189765458422176,
|
|
"grad_norm": 0.15951958298683167,
|
|
"learning_rate": 4.808102345415779e-05,
|
|
"loss": 0.0268,
|
|
"step": 90
|
|
},
|
|
{
|
|
"epoch": 0.2025586353944563,
|
|
"grad_norm": 0.4204452335834503,
|
|
"learning_rate": 4.7974413646055436e-05,
|
|
"loss": 0.0441,
|
|
"step": 95
|
|
},
|
|
{
|
|
"epoch": 0.21321961620469082,
|
|
"grad_norm": 0.15377482771873474,
|
|
"learning_rate": 4.78678038379531e-05,
|
|
"loss": 0.0458,
|
|
"step": 100
|
|
},
|
|
{
|
|
"epoch": 0.22388059701492538,
|
|
"grad_norm": 0.16754835844039917,
|
|
"learning_rate": 4.7761194029850745e-05,
|
|
"loss": 0.0223,
|
|
"step": 105
|
|
},
|
|
{
|
|
"epoch": 0.2345415778251599,
|
|
"grad_norm": 2.464712619781494,
|
|
"learning_rate": 4.765458422174841e-05,
|
|
"loss": 0.0697,
|
|
"step": 110
|
|
},
|
|
{
|
|
"epoch": 0.24520255863539445,
|
|
"grad_norm": 1.1218258142471313,
|
|
"learning_rate": 4.7547974413646055e-05,
|
|
"loss": 0.0235,
|
|
"step": 115
|
|
},
|
|
{
|
|
"epoch": 0.255863539445629,
|
|
"grad_norm": 0.5324303507804871,
|
|
"learning_rate": 4.7441364605543716e-05,
|
|
"loss": 0.0404,
|
|
"step": 120
|
|
},
|
|
{
|
|
"epoch": 0.26652452025586354,
|
|
"grad_norm": 0.15958300232887268,
|
|
"learning_rate": 4.7334754797441364e-05,
|
|
"loss": 0.0674,
|
|
"step": 125
|
|
},
|
|
{
|
|
"epoch": 0.2771855010660981,
|
|
"grad_norm": 0.14252367615699768,
|
|
"learning_rate": 4.7228144989339026e-05,
|
|
"loss": 0.016,
|
|
"step": 130
|
|
},
|
|
{
|
|
"epoch": 0.2878464818763326,
|
|
"grad_norm": 0.10775002092123032,
|
|
"learning_rate": 4.7121535181236674e-05,
|
|
"loss": 0.0394,
|
|
"step": 135
|
|
},
|
|
{
|
|
"epoch": 0.29850746268656714,
|
|
"grad_norm": 0.0994935929775238,
|
|
"learning_rate": 4.7014925373134335e-05,
|
|
"loss": 0.0221,
|
|
"step": 140
|
|
},
|
|
{
|
|
"epoch": 0.3091684434968017,
|
|
"grad_norm": 0.40200895071029663,
|
|
"learning_rate": 4.690831556503198e-05,
|
|
"loss": 0.0446,
|
|
"step": 145
|
|
},
|
|
{
|
|
"epoch": 0.31982942430703626,
|
|
"grad_norm": 0.0959523469209671,
|
|
"learning_rate": 4.6801705756929645e-05,
|
|
"loss": 0.0876,
|
|
"step": 150
|
|
},
|
|
{
|
|
"epoch": 0.3304904051172708,
|
|
"grad_norm": 1.6825580596923828,
|
|
"learning_rate": 4.669509594882729e-05,
|
|
"loss": 0.0709,
|
|
"step": 155
|
|
},
|
|
{
|
|
"epoch": 0.3411513859275053,
|
|
"grad_norm": 0.09323113411664963,
|
|
"learning_rate": 4.658848614072495e-05,
|
|
"loss": 0.031,
|
|
"step": 160
|
|
},
|
|
{
|
|
"epoch": 0.35181236673773986,
|
|
"grad_norm": 3.48555588722229,
|
|
"learning_rate": 4.64818763326226e-05,
|
|
"loss": 0.0495,
|
|
"step": 165
|
|
},
|
|
{
|
|
"epoch": 0.3624733475479744,
|
|
"grad_norm": 0.11460929363965988,
|
|
"learning_rate": 4.637526652452026e-05,
|
|
"loss": 0.0216,
|
|
"step": 170
|
|
},
|
|
{
|
|
"epoch": 0.373134328358209,
|
|
"grad_norm": 0.5878208875656128,
|
|
"learning_rate": 4.626865671641791e-05,
|
|
"loss": 0.0665,
|
|
"step": 175
|
|
},
|
|
{
|
|
"epoch": 0.3837953091684435,
|
|
"grad_norm": 0.49647751450538635,
|
|
"learning_rate": 4.6162046908315566e-05,
|
|
"loss": 0.0207,
|
|
"step": 180
|
|
},
|
|
{
|
|
"epoch": 0.39445628997867804,
|
|
"grad_norm": 1.6696819067001343,
|
|
"learning_rate": 4.605543710021322e-05,
|
|
"loss": 0.0667,
|
|
"step": 185
|
|
},
|
|
{
|
|
"epoch": 0.4051172707889126,
|
|
"grad_norm": 0.13710583746433258,
|
|
"learning_rate": 4.5948827292110876e-05,
|
|
"loss": 0.0156,
|
|
"step": 190
|
|
},
|
|
{
|
|
"epoch": 0.4157782515991471,
|
|
"grad_norm": 0.22347423434257507,
|
|
"learning_rate": 4.584221748400853e-05,
|
|
"loss": 0.0212,
|
|
"step": 195
|
|
},
|
|
{
|
|
"epoch": 0.42643923240938164,
|
|
"grad_norm": 3.933608293533325,
|
|
"learning_rate": 4.5735607675906185e-05,
|
|
"loss": 0.0454,
|
|
"step": 200
|
|
},
|
|
{
|
|
"epoch": 0.43710021321961623,
|
|
"grad_norm": 0.0807705745100975,
|
|
"learning_rate": 4.562899786780384e-05,
|
|
"loss": 0.0119,
|
|
"step": 205
|
|
},
|
|
{
|
|
"epoch": 0.44776119402985076,
|
|
"grad_norm": 0.07300090044736862,
|
|
"learning_rate": 4.5522388059701495e-05,
|
|
"loss": 0.0128,
|
|
"step": 210
|
|
},
|
|
{
|
|
"epoch": 0.4584221748400853,
|
|
"grad_norm": 0.8413541913032532,
|
|
"learning_rate": 4.541577825159915e-05,
|
|
"loss": 0.0129,
|
|
"step": 215
|
|
},
|
|
{
|
|
"epoch": 0.4690831556503198,
|
|
"grad_norm": 0.06723010540008545,
|
|
"learning_rate": 4.5309168443496804e-05,
|
|
"loss": 0.0113,
|
|
"step": 220
|
|
},
|
|
{
|
|
"epoch": 0.47974413646055436,
|
|
"grad_norm": 0.4990728795528412,
|
|
"learning_rate": 4.520255863539446e-05,
|
|
"loss": 0.0522,
|
|
"step": 225
|
|
},
|
|
{
|
|
"epoch": 0.4904051172707889,
|
|
"grad_norm": 2.1194300651550293,
|
|
"learning_rate": 4.5095948827292114e-05,
|
|
"loss": 0.0349,
|
|
"step": 230
|
|
},
|
|
{
|
|
"epoch": 0.5010660980810234,
|
|
"grad_norm": 1.9606651067733765,
|
|
"learning_rate": 4.498933901918977e-05,
|
|
"loss": 0.0108,
|
|
"step": 235
|
|
},
|
|
{
|
|
"epoch": 0.511727078891258,
|
|
"grad_norm": 9.19564151763916,
|
|
"learning_rate": 4.488272921108742e-05,
|
|
"loss": 0.038,
|
|
"step": 240
|
|
},
|
|
{
|
|
"epoch": 0.5223880597014925,
|
|
"grad_norm": 0.06613659113645554,
|
|
"learning_rate": 4.477611940298508e-05,
|
|
"loss": 0.0281,
|
|
"step": 245
|
|
},
|
|
{
|
|
"epoch": 0.5330490405117271,
|
|
"grad_norm": 4.227926254272461,
|
|
"learning_rate": 4.466950959488273e-05,
|
|
"loss": 0.0399,
|
|
"step": 250
|
|
},
|
|
{
|
|
"epoch": 0.5437100213219617,
|
|
"grad_norm": 0.21188561618328094,
|
|
"learning_rate": 4.456289978678039e-05,
|
|
"loss": 0.0096,
|
|
"step": 255
|
|
},
|
|
{
|
|
"epoch": 0.5543710021321961,
|
|
"grad_norm": 3.619363307952881,
|
|
"learning_rate": 4.445628997867804e-05,
|
|
"loss": 0.0709,
|
|
"step": 260
|
|
},
|
|
{
|
|
"epoch": 0.5650319829424307,
|
|
"grad_norm": 0.06145023927092552,
|
|
"learning_rate": 4.43496801705757e-05,
|
|
"loss": 0.0368,
|
|
"step": 265
|
|
},
|
|
{
|
|
"epoch": 0.5756929637526652,
|
|
"grad_norm": 0.0769047662615776,
|
|
"learning_rate": 4.424307036247335e-05,
|
|
"loss": 0.0418,
|
|
"step": 270
|
|
},
|
|
{
|
|
"epoch": 0.5863539445628998,
|
|
"grad_norm": 0.5211371779441833,
|
|
"learning_rate": 4.4136460554371006e-05,
|
|
"loss": 0.0243,
|
|
"step": 275
|
|
},
|
|
{
|
|
"epoch": 0.5970149253731343,
|
|
"grad_norm": 6.135162830352783,
|
|
"learning_rate": 4.402985074626866e-05,
|
|
"loss": 0.0365,
|
|
"step": 280
|
|
},
|
|
{
|
|
"epoch": 0.6076759061833689,
|
|
"grad_norm": 0.081791453063488,
|
|
"learning_rate": 4.3923240938166316e-05,
|
|
"loss": 0.0611,
|
|
"step": 285
|
|
},
|
|
{
|
|
"epoch": 0.6183368869936035,
|
|
"grad_norm": 0.07080066949129105,
|
|
"learning_rate": 4.381663113006397e-05,
|
|
"loss": 0.0506,
|
|
"step": 290
|
|
},
|
|
{
|
|
"epoch": 0.6289978678038379,
|
|
"grad_norm": 5.437733173370361,
|
|
"learning_rate": 4.3710021321961625e-05,
|
|
"loss": 0.0302,
|
|
"step": 295
|
|
},
|
|
{
|
|
"epoch": 0.6396588486140725,
|
|
"grad_norm": 0.05375446379184723,
|
|
"learning_rate": 4.360341151385928e-05,
|
|
"loss": 0.0137,
|
|
"step": 300
|
|
},
|
|
{
|
|
"epoch": 0.650319829424307,
|
|
"grad_norm": 0.4377392828464508,
|
|
"learning_rate": 4.3496801705756935e-05,
|
|
"loss": 0.0621,
|
|
"step": 305
|
|
},
|
|
{
|
|
"epoch": 0.6609808102345416,
|
|
"grad_norm": 0.05853692814707756,
|
|
"learning_rate": 4.339019189765459e-05,
|
|
"loss": 0.0095,
|
|
"step": 310
|
|
},
|
|
{
|
|
"epoch": 0.6716417910447762,
|
|
"grad_norm": 1.8842341899871826,
|
|
"learning_rate": 4.328358208955224e-05,
|
|
"loss": 0.1015,
|
|
"step": 315
|
|
},
|
|
{
|
|
"epoch": 0.6823027718550106,
|
|
"grad_norm": 0.055179413408041,
|
|
"learning_rate": 4.31769722814499e-05,
|
|
"loss": 0.0075,
|
|
"step": 320
|
|
},
|
|
{
|
|
"epoch": 0.6929637526652452,
|
|
"grad_norm": 0.054512593895196915,
|
|
"learning_rate": 4.307036247334755e-05,
|
|
"loss": 0.0531,
|
|
"step": 325
|
|
},
|
|
{
|
|
"epoch": 0.7036247334754797,
|
|
"grad_norm": 0.0497431717813015,
|
|
"learning_rate": 4.29637526652452e-05,
|
|
"loss": 0.0362,
|
|
"step": 330
|
|
},
|
|
{
|
|
"epoch": 0.7142857142857143,
|
|
"grad_norm": 0.15405867993831635,
|
|
"learning_rate": 4.2857142857142856e-05,
|
|
"loss": 0.0481,
|
|
"step": 335
|
|
},
|
|
{
|
|
"epoch": 0.7249466950959488,
|
|
"grad_norm": 0.04826023057103157,
|
|
"learning_rate": 4.275053304904051e-05,
|
|
"loss": 0.0199,
|
|
"step": 340
|
|
},
|
|
{
|
|
"epoch": 0.7356076759061834,
|
|
"grad_norm": 0.05482298508286476,
|
|
"learning_rate": 4.2643923240938166e-05,
|
|
"loss": 0.0581,
|
|
"step": 345
|
|
},
|
|
{
|
|
"epoch": 0.746268656716418,
|
|
"grad_norm": 0.0809388980269432,
|
|
"learning_rate": 4.253731343283582e-05,
|
|
"loss": 0.0264,
|
|
"step": 350
|
|
},
|
|
{
|
|
"epoch": 0.7569296375266524,
|
|
"grad_norm": 0.05720314383506775,
|
|
"learning_rate": 4.2430703624733475e-05,
|
|
"loss": 0.0102,
|
|
"step": 355
|
|
},
|
|
{
|
|
"epoch": 0.767590618336887,
|
|
"grad_norm": 1.6452670097351074,
|
|
"learning_rate": 4.232409381663113e-05,
|
|
"loss": 0.0335,
|
|
"step": 360
|
|
},
|
|
{
|
|
"epoch": 0.7782515991471215,
|
|
"grad_norm": 2.9225914478302,
|
|
"learning_rate": 4.2217484008528785e-05,
|
|
"loss": 0.0105,
|
|
"step": 365
|
|
},
|
|
{
|
|
"epoch": 0.7889125799573561,
|
|
"grad_norm": 0.04976259917020798,
|
|
"learning_rate": 4.211087420042644e-05,
|
|
"loss": 0.0305,
|
|
"step": 370
|
|
},
|
|
{
|
|
"epoch": 0.7995735607675906,
|
|
"grad_norm": 0.068101666867733,
|
|
"learning_rate": 4.2004264392324094e-05,
|
|
"loss": 0.033,
|
|
"step": 375
|
|
},
|
|
{
|
|
"epoch": 0.8102345415778252,
|
|
"grad_norm": 3.587697982788086,
|
|
"learning_rate": 4.189765458422175e-05,
|
|
"loss": 0.012,
|
|
"step": 380
|
|
},
|
|
{
|
|
"epoch": 0.8208955223880597,
|
|
"grad_norm": 2.2964396476745605,
|
|
"learning_rate": 4.1791044776119404e-05,
|
|
"loss": 0.0381,
|
|
"step": 385
|
|
},
|
|
{
|
|
"epoch": 0.8315565031982942,
|
|
"grad_norm": 0.20035025477409363,
|
|
"learning_rate": 4.168443496801706e-05,
|
|
"loss": 0.0916,
|
|
"step": 390
|
|
},
|
|
{
|
|
"epoch": 0.8422174840085288,
|
|
"grad_norm": 0.08300536870956421,
|
|
"learning_rate": 4.157782515991471e-05,
|
|
"loss": 0.0304,
|
|
"step": 395
|
|
},
|
|
{
|
|
"epoch": 0.8528784648187633,
|
|
"grad_norm": 0.0842461958527565,
|
|
"learning_rate": 4.147121535181237e-05,
|
|
"loss": 0.0555,
|
|
"step": 400
|
|
},
|
|
{
|
|
"epoch": 0.8635394456289979,
|
|
"grad_norm": 1.019338846206665,
|
|
"learning_rate": 4.136460554371002e-05,
|
|
"loss": 0.0907,
|
|
"step": 405
|
|
},
|
|
{
|
|
"epoch": 0.8742004264392325,
|
|
"grad_norm": 1.417564034461975,
|
|
"learning_rate": 4.125799573560768e-05,
|
|
"loss": 0.0751,
|
|
"step": 410
|
|
},
|
|
{
|
|
"epoch": 0.8848614072494669,
|
|
"grad_norm": 0.34951287508010864,
|
|
"learning_rate": 4.115138592750533e-05,
|
|
"loss": 0.0099,
|
|
"step": 415
|
|
},
|
|
{
|
|
"epoch": 0.8955223880597015,
|
|
"grad_norm": 0.12608490884304047,
|
|
"learning_rate": 4.104477611940299e-05,
|
|
"loss": 0.0247,
|
|
"step": 420
|
|
},
|
|
{
|
|
"epoch": 0.906183368869936,
|
|
"grad_norm": 0.5821333527565002,
|
|
"learning_rate": 4.093816631130064e-05,
|
|
"loss": 0.0395,
|
|
"step": 425
|
|
},
|
|
{
|
|
"epoch": 0.9168443496801706,
|
|
"grad_norm": 1.0737979412078857,
|
|
"learning_rate": 4.0831556503198296e-05,
|
|
"loss": 0.0375,
|
|
"step": 430
|
|
},
|
|
{
|
|
"epoch": 0.9275053304904051,
|
|
"grad_norm": 3.866457939147949,
|
|
"learning_rate": 4.072494669509595e-05,
|
|
"loss": 0.0189,
|
|
"step": 435
|
|
},
|
|
{
|
|
"epoch": 0.9381663113006397,
|
|
"grad_norm": 0.06562425196170807,
|
|
"learning_rate": 4.0618336886993606e-05,
|
|
"loss": 0.0373,
|
|
"step": 440
|
|
},
|
|
{
|
|
"epoch": 0.9488272921108742,
|
|
"grad_norm": 0.2278442084789276,
|
|
"learning_rate": 4.051172707889126e-05,
|
|
"loss": 0.047,
|
|
"step": 445
|
|
},
|
|
{
|
|
"epoch": 0.9594882729211087,
|
|
"grad_norm": 0.05468602105975151,
|
|
"learning_rate": 4.0405117270788915e-05,
|
|
"loss": 0.0061,
|
|
"step": 450
|
|
},
|
|
{
|
|
"epoch": 0.9701492537313433,
|
|
"grad_norm": 0.8784050941467285,
|
|
"learning_rate": 4.029850746268657e-05,
|
|
"loss": 0.0243,
|
|
"step": 455
|
|
},
|
|
{
|
|
"epoch": 0.9808102345415778,
|
|
"grad_norm": 3.179516553878784,
|
|
"learning_rate": 4.0191897654584225e-05,
|
|
"loss": 0.0569,
|
|
"step": 460
|
|
},
|
|
{
|
|
"epoch": 0.9914712153518124,
|
|
"grad_norm": 1.1745928525924683,
|
|
"learning_rate": 4.008528784648188e-05,
|
|
"loss": 0.0533,
|
|
"step": 465
|
|
},
|
|
{
|
|
"epoch": 1.0,
|
|
"eval_accuracy": 0.9733333333333334,
|
|
"eval_loss": 0.0818064734339714,
|
|
"eval_runtime": 16.5375,
|
|
"eval_samples_per_second": 226.757,
|
|
"eval_steps_per_second": 7.135,
|
|
"step": 469
|
|
},
|
|
{
|
|
"epoch": 1.0021321961620469,
|
|
"grad_norm": 0.06827237457036972,
|
|
"learning_rate": 3.997867803837953e-05,
|
|
"loss": 0.0325,
|
|
"step": 470
|
|
},
|
|
{
|
|
"epoch": 1.0127931769722816,
|
|
"grad_norm": 0.0652860701084137,
|
|
"learning_rate": 3.987206823027719e-05,
|
|
"loss": 0.0438,
|
|
"step": 475
|
|
},
|
|
{
|
|
"epoch": 1.023454157782516,
|
|
"grad_norm": 0.03856111317873001,
|
|
"learning_rate": 3.976545842217484e-05,
|
|
"loss": 0.028,
|
|
"step": 480
|
|
},
|
|
{
|
|
"epoch": 1.0341151385927505,
|
|
"grad_norm": 0.2875308096408844,
|
|
"learning_rate": 3.96588486140725e-05,
|
|
"loss": 0.0124,
|
|
"step": 485
|
|
},
|
|
{
|
|
"epoch": 1.044776119402985,
|
|
"grad_norm": 0.09098055213689804,
|
|
"learning_rate": 3.9552238805970146e-05,
|
|
"loss": 0.038,
|
|
"step": 490
|
|
},
|
|
{
|
|
"epoch": 1.0554371002132197,
|
|
"grad_norm": 0.11972873657941818,
|
|
"learning_rate": 3.944562899786781e-05,
|
|
"loss": 0.0176,
|
|
"step": 495
|
|
},
|
|
{
|
|
"epoch": 1.0660980810234542,
|
|
"grad_norm": 0.05295296385884285,
|
|
"learning_rate": 3.9339019189765456e-05,
|
|
"loss": 0.0059,
|
|
"step": 500
|
|
},
|
|
{
|
|
"epoch": 1.0767590618336886,
|
|
"grad_norm": 0.0477861650288105,
|
|
"learning_rate": 3.923240938166312e-05,
|
|
"loss": 0.0049,
|
|
"step": 505
|
|
},
|
|
{
|
|
"epoch": 1.0874200426439233,
|
|
"grad_norm": 3.127377986907959,
|
|
"learning_rate": 3.9125799573560765e-05,
|
|
"loss": 0.0202,
|
|
"step": 510
|
|
},
|
|
{
|
|
"epoch": 1.0980810234541578,
|
|
"grad_norm": 0.044001467525959015,
|
|
"learning_rate": 3.901918976545843e-05,
|
|
"loss": 0.0083,
|
|
"step": 515
|
|
},
|
|
{
|
|
"epoch": 1.1087420042643923,
|
|
"grad_norm": 0.05000412091612816,
|
|
"learning_rate": 3.8912579957356075e-05,
|
|
"loss": 0.0058,
|
|
"step": 520
|
|
},
|
|
{
|
|
"epoch": 1.1194029850746268,
|
|
"grad_norm": 0.09975217282772064,
|
|
"learning_rate": 3.8805970149253736e-05,
|
|
"loss": 0.0051,
|
|
"step": 525
|
|
},
|
|
{
|
|
"epoch": 1.1300639658848615,
|
|
"grad_norm": 0.06310124695301056,
|
|
"learning_rate": 3.8699360341151384e-05,
|
|
"loss": 0.0167,
|
|
"step": 530
|
|
},
|
|
{
|
|
"epoch": 1.140724946695096,
|
|
"grad_norm": 0.036271832883358,
|
|
"learning_rate": 3.8592750533049046e-05,
|
|
"loss": 0.005,
|
|
"step": 535
|
|
},
|
|
{
|
|
"epoch": 1.1513859275053304,
|
|
"grad_norm": 0.6132103800773621,
|
|
"learning_rate": 3.8486140724946694e-05,
|
|
"loss": 0.0378,
|
|
"step": 540
|
|
},
|
|
{
|
|
"epoch": 1.1620469083155651,
|
|
"grad_norm": 0.18139950931072235,
|
|
"learning_rate": 3.8379530916844355e-05,
|
|
"loss": 0.0202,
|
|
"step": 545
|
|
},
|
|
{
|
|
"epoch": 1.1727078891257996,
|
|
"grad_norm": 0.039762232452631,
|
|
"learning_rate": 3.8272921108742e-05,
|
|
"loss": 0.0043,
|
|
"step": 550
|
|
},
|
|
{
|
|
"epoch": 1.183368869936034,
|
|
"grad_norm": 2.904137134552002,
|
|
"learning_rate": 3.8166311300639665e-05,
|
|
"loss": 0.0175,
|
|
"step": 555
|
|
},
|
|
{
|
|
"epoch": 1.1940298507462686,
|
|
"grad_norm": 0.032053444534540176,
|
|
"learning_rate": 3.805970149253731e-05,
|
|
"loss": 0.0048,
|
|
"step": 560
|
|
},
|
|
{
|
|
"epoch": 1.2046908315565032,
|
|
"grad_norm": 0.05431621894240379,
|
|
"learning_rate": 3.7953091684434974e-05,
|
|
"loss": 0.0059,
|
|
"step": 565
|
|
},
|
|
{
|
|
"epoch": 1.2153518123667377,
|
|
"grad_norm": 0.031247926875948906,
|
|
"learning_rate": 3.784648187633262e-05,
|
|
"loss": 0.0038,
|
|
"step": 570
|
|
},
|
|
{
|
|
"epoch": 1.2260127931769722,
|
|
"grad_norm": 0.14448019862174988,
|
|
"learning_rate": 3.7739872068230284e-05,
|
|
"loss": 0.0039,
|
|
"step": 575
|
|
},
|
|
{
|
|
"epoch": 1.236673773987207,
|
|
"grad_norm": 0.04965840280056,
|
|
"learning_rate": 3.763326226012793e-05,
|
|
"loss": 0.0038,
|
|
"step": 580
|
|
},
|
|
{
|
|
"epoch": 1.2473347547974414,
|
|
"grad_norm": 0.03658933565020561,
|
|
"learning_rate": 3.752665245202559e-05,
|
|
"loss": 0.0049,
|
|
"step": 585
|
|
},
|
|
{
|
|
"epoch": 1.2579957356076759,
|
|
"grad_norm": 0.0635569840669632,
|
|
"learning_rate": 3.742004264392324e-05,
|
|
"loss": 0.004,
|
|
"step": 590
|
|
},
|
|
{
|
|
"epoch": 1.2686567164179103,
|
|
"grad_norm": 0.027837136760354042,
|
|
"learning_rate": 3.73134328358209e-05,
|
|
"loss": 0.0035,
|
|
"step": 595
|
|
},
|
|
{
|
|
"epoch": 1.279317697228145,
|
|
"grad_norm": 1.39699387550354,
|
|
"learning_rate": 3.720682302771855e-05,
|
|
"loss": 0.0055,
|
|
"step": 600
|
|
},
|
|
{
|
|
"epoch": 1.2899786780383795,
|
|
"grad_norm": 3.0259928703308105,
|
|
"learning_rate": 3.710021321961621e-05,
|
|
"loss": 0.0062,
|
|
"step": 605
|
|
},
|
|
{
|
|
"epoch": 1.3006396588486142,
|
|
"grad_norm": 0.026535384356975555,
|
|
"learning_rate": 3.699360341151386e-05,
|
|
"loss": 0.0033,
|
|
"step": 610
|
|
},
|
|
{
|
|
"epoch": 1.3113006396588487,
|
|
"grad_norm": 0.025836490094661713,
|
|
"learning_rate": 3.6886993603411515e-05,
|
|
"loss": 0.0376,
|
|
"step": 615
|
|
},
|
|
{
|
|
"epoch": 1.3219616204690832,
|
|
"grad_norm": 0.027299480512738228,
|
|
"learning_rate": 3.678038379530917e-05,
|
|
"loss": 0.0033,
|
|
"step": 620
|
|
},
|
|
{
|
|
"epoch": 1.3326226012793176,
|
|
"grad_norm": 0.06287551671266556,
|
|
"learning_rate": 3.6673773987206824e-05,
|
|
"loss": 0.0101,
|
|
"step": 625
|
|
},
|
|
{
|
|
"epoch": 1.3432835820895521,
|
|
"grad_norm": 0.026839163154363632,
|
|
"learning_rate": 3.656716417910448e-05,
|
|
"loss": 0.0038,
|
|
"step": 630
|
|
},
|
|
{
|
|
"epoch": 1.3539445628997868,
|
|
"grad_norm": 0.02797815576195717,
|
|
"learning_rate": 3.6460554371002134e-05,
|
|
"loss": 0.0037,
|
|
"step": 635
|
|
},
|
|
{
|
|
"epoch": 1.3646055437100213,
|
|
"grad_norm": 0.02635449357330799,
|
|
"learning_rate": 3.635394456289979e-05,
|
|
"loss": 0.0092,
|
|
"step": 640
|
|
},
|
|
{
|
|
"epoch": 1.375266524520256,
|
|
"grad_norm": 0.11475779861211777,
|
|
"learning_rate": 3.624733475479744e-05,
|
|
"loss": 0.0034,
|
|
"step": 645
|
|
},
|
|
{
|
|
"epoch": 1.3859275053304905,
|
|
"grad_norm": 0.02528528869152069,
|
|
"learning_rate": 3.61407249466951e-05,
|
|
"loss": 0.0269,
|
|
"step": 650
|
|
},
|
|
{
|
|
"epoch": 1.396588486140725,
|
|
"grad_norm": 0.02465369552373886,
|
|
"learning_rate": 3.603411513859275e-05,
|
|
"loss": 0.0039,
|
|
"step": 655
|
|
},
|
|
{
|
|
"epoch": 1.4072494669509594,
|
|
"grad_norm": 0.025823432952165604,
|
|
"learning_rate": 3.592750533049041e-05,
|
|
"loss": 0.0189,
|
|
"step": 660
|
|
},
|
|
{
|
|
"epoch": 1.417910447761194,
|
|
"grad_norm": 0.024902813136577606,
|
|
"learning_rate": 3.582089552238806e-05,
|
|
"loss": 0.0095,
|
|
"step": 665
|
|
},
|
|
{
|
|
"epoch": 1.4285714285714286,
|
|
"grad_norm": 0.022751104086637497,
|
|
"learning_rate": 3.571428571428572e-05,
|
|
"loss": 0.0078,
|
|
"step": 670
|
|
},
|
|
{
|
|
"epoch": 1.439232409381663,
|
|
"grad_norm": 0.024917876347899437,
|
|
"learning_rate": 3.560767590618337e-05,
|
|
"loss": 0.003,
|
|
"step": 675
|
|
},
|
|
{
|
|
"epoch": 1.4498933901918978,
|
|
"grad_norm": 0.025312010198831558,
|
|
"learning_rate": 3.5501066098081026e-05,
|
|
"loss": 0.0639,
|
|
"step": 680
|
|
},
|
|
{
|
|
"epoch": 1.4605543710021323,
|
|
"grad_norm": 0.21212908625602722,
|
|
"learning_rate": 3.539445628997868e-05,
|
|
"loss": 0.0049,
|
|
"step": 685
|
|
},
|
|
{
|
|
"epoch": 1.4712153518123667,
|
|
"grad_norm": 0.33906319737434387,
|
|
"learning_rate": 3.5287846481876336e-05,
|
|
"loss": 0.0037,
|
|
"step": 690
|
|
},
|
|
{
|
|
"epoch": 1.4818763326226012,
|
|
"grad_norm": 0.026129761710762978,
|
|
"learning_rate": 3.518123667377399e-05,
|
|
"loss": 0.0148,
|
|
"step": 695
|
|
},
|
|
{
|
|
"epoch": 1.4925373134328357,
|
|
"grad_norm": 0.2674362361431122,
|
|
"learning_rate": 3.5074626865671645e-05,
|
|
"loss": 0.0125,
|
|
"step": 700
|
|
},
|
|
{
|
|
"epoch": 1.5031982942430704,
|
|
"grad_norm": 0.02207585982978344,
|
|
"learning_rate": 3.496801705756929e-05,
|
|
"loss": 0.0034,
|
|
"step": 705
|
|
},
|
|
{
|
|
"epoch": 1.5138592750533049,
|
|
"grad_norm": 0.022782066836953163,
|
|
"learning_rate": 3.4861407249466955e-05,
|
|
"loss": 0.0038,
|
|
"step": 710
|
|
},
|
|
{
|
|
"epoch": 1.5245202558635396,
|
|
"grad_norm": 0.024613749235868454,
|
|
"learning_rate": 3.47547974413646e-05,
|
|
"loss": 0.0027,
|
|
"step": 715
|
|
},
|
|
{
|
|
"epoch": 1.535181236673774,
|
|
"grad_norm": 0.025194577872753143,
|
|
"learning_rate": 3.4648187633262264e-05,
|
|
"loss": 0.004,
|
|
"step": 720
|
|
},
|
|
{
|
|
"epoch": 1.5458422174840085,
|
|
"grad_norm": 0.023784196004271507,
|
|
"learning_rate": 3.454157782515991e-05,
|
|
"loss": 0.0055,
|
|
"step": 725
|
|
},
|
|
{
|
|
"epoch": 1.556503198294243,
|
|
"grad_norm": 0.04763465374708176,
|
|
"learning_rate": 3.4434968017057574e-05,
|
|
"loss": 0.0036,
|
|
"step": 730
|
|
},
|
|
{
|
|
"epoch": 1.5671641791044775,
|
|
"grad_norm": 0.02031880058348179,
|
|
"learning_rate": 3.432835820895522e-05,
|
|
"loss": 0.0303,
|
|
"step": 735
|
|
},
|
|
{
|
|
"epoch": 1.5778251599147122,
|
|
"grad_norm": 0.021390356123447418,
|
|
"learning_rate": 3.422174840085288e-05,
|
|
"loss": 0.0039,
|
|
"step": 740
|
|
},
|
|
{
|
|
"epoch": 1.5884861407249466,
|
|
"grad_norm": 0.02824382856488228,
|
|
"learning_rate": 3.411513859275053e-05,
|
|
"loss": 0.0084,
|
|
"step": 745
|
|
},
|
|
{
|
|
"epoch": 1.5991471215351813,
|
|
"grad_norm": 0.02049950882792473,
|
|
"learning_rate": 3.400852878464819e-05,
|
|
"loss": 0.0557,
|
|
"step": 750
|
|
},
|
|
{
|
|
"epoch": 1.6098081023454158,
|
|
"grad_norm": 0.02126970887184143,
|
|
"learning_rate": 3.390191897654584e-05,
|
|
"loss": 0.0034,
|
|
"step": 755
|
|
},
|
|
{
|
|
"epoch": 1.6204690831556503,
|
|
"grad_norm": 0.019766928628087044,
|
|
"learning_rate": 3.37953091684435e-05,
|
|
"loss": 0.0059,
|
|
"step": 760
|
|
},
|
|
{
|
|
"epoch": 1.6311300639658848,
|
|
"grad_norm": 0.0202566497027874,
|
|
"learning_rate": 3.368869936034115e-05,
|
|
"loss": 0.0025,
|
|
"step": 765
|
|
},
|
|
{
|
|
"epoch": 1.6417910447761193,
|
|
"grad_norm": 0.019975759088993073,
|
|
"learning_rate": 3.358208955223881e-05,
|
|
"loss": 0.0064,
|
|
"step": 770
|
|
},
|
|
{
|
|
"epoch": 1.652452025586354,
|
|
"grad_norm": 0.020405394956469536,
|
|
"learning_rate": 3.347547974413646e-05,
|
|
"loss": 0.0308,
|
|
"step": 775
|
|
},
|
|
{
|
|
"epoch": 1.6631130063965884,
|
|
"grad_norm": 0.019549036398530006,
|
|
"learning_rate": 3.336886993603412e-05,
|
|
"loss": 0.0229,
|
|
"step": 780
|
|
},
|
|
{
|
|
"epoch": 1.6737739872068231,
|
|
"grad_norm": 0.019896037876605988,
|
|
"learning_rate": 3.326226012793177e-05,
|
|
"loss": 0.0028,
|
|
"step": 785
|
|
},
|
|
{
|
|
"epoch": 1.6844349680170576,
|
|
"grad_norm": 0.019387420266866684,
|
|
"learning_rate": 3.3155650319829424e-05,
|
|
"loss": 0.0098,
|
|
"step": 790
|
|
},
|
|
{
|
|
"epoch": 1.695095948827292,
|
|
"grad_norm": 0.02730773575603962,
|
|
"learning_rate": 3.304904051172708e-05,
|
|
"loss": 0.0024,
|
|
"step": 795
|
|
},
|
|
{
|
|
"epoch": 1.7057569296375266,
|
|
"grad_norm": 0.04167855903506279,
|
|
"learning_rate": 3.294243070362473e-05,
|
|
"loss": 0.0066,
|
|
"step": 800
|
|
},
|
|
{
|
|
"epoch": 1.716417910447761,
|
|
"grad_norm": 0.021796202287077904,
|
|
"learning_rate": 3.283582089552239e-05,
|
|
"loss": 0.0023,
|
|
"step": 805
|
|
},
|
|
{
|
|
"epoch": 1.7270788912579957,
|
|
"grad_norm": 1.0335674285888672,
|
|
"learning_rate": 3.272921108742004e-05,
|
|
"loss": 0.0388,
|
|
"step": 810
|
|
},
|
|
{
|
|
"epoch": 1.7377398720682304,
|
|
"grad_norm": 3.0446865558624268,
|
|
"learning_rate": 3.26226012793177e-05,
|
|
"loss": 0.0409,
|
|
"step": 815
|
|
},
|
|
{
|
|
"epoch": 1.748400852878465,
|
|
"grad_norm": 0.019520392641425133,
|
|
"learning_rate": 3.251599147121535e-05,
|
|
"loss": 0.0024,
|
|
"step": 820
|
|
},
|
|
{
|
|
"epoch": 1.7590618336886994,
|
|
"grad_norm": 0.02665073238313198,
|
|
"learning_rate": 3.240938166311301e-05,
|
|
"loss": 0.0024,
|
|
"step": 825
|
|
},
|
|
{
|
|
"epoch": 1.7697228144989339,
|
|
"grad_norm": 0.033884141594171524,
|
|
"learning_rate": 3.230277185501066e-05,
|
|
"loss": 0.0056,
|
|
"step": 830
|
|
},
|
|
{
|
|
"epoch": 1.7803837953091683,
|
|
"grad_norm": 0.018473170697689056,
|
|
"learning_rate": 3.2196162046908317e-05,
|
|
"loss": 0.0026,
|
|
"step": 835
|
|
},
|
|
{
|
|
"epoch": 1.7910447761194028,
|
|
"grad_norm": 0.018962884321808815,
|
|
"learning_rate": 3.208955223880597e-05,
|
|
"loss": 0.0026,
|
|
"step": 840
|
|
},
|
|
{
|
|
"epoch": 1.8017057569296375,
|
|
"grad_norm": 0.020032551139593124,
|
|
"learning_rate": 3.1982942430703626e-05,
|
|
"loss": 0.0023,
|
|
"step": 845
|
|
},
|
|
{
|
|
"epoch": 1.8123667377398722,
|
|
"grad_norm": 0.019089415669441223,
|
|
"learning_rate": 3.187633262260128e-05,
|
|
"loss": 0.0024,
|
|
"step": 850
|
|
},
|
|
{
|
|
"epoch": 1.8230277185501067,
|
|
"grad_norm": 0.01713535375893116,
|
|
"learning_rate": 3.1769722814498935e-05,
|
|
"loss": 0.0059,
|
|
"step": 855
|
|
},
|
|
{
|
|
"epoch": 1.8336886993603412,
|
|
"grad_norm": 0.019119979813694954,
|
|
"learning_rate": 3.166311300639659e-05,
|
|
"loss": 0.0022,
|
|
"step": 860
|
|
},
|
|
{
|
|
"epoch": 1.8443496801705757,
|
|
"grad_norm": 5.530079364776611,
|
|
"learning_rate": 3.1556503198294245e-05,
|
|
"loss": 0.0241,
|
|
"step": 865
|
|
},
|
|
{
|
|
"epoch": 1.8550106609808101,
|
|
"grad_norm": 0.025466658174991608,
|
|
"learning_rate": 3.14498933901919e-05,
|
|
"loss": 0.006,
|
|
"step": 870
|
|
},
|
|
{
|
|
"epoch": 1.8656716417910446,
|
|
"grad_norm": 4.075272560119629,
|
|
"learning_rate": 3.1343283582089554e-05,
|
|
"loss": 0.01,
|
|
"step": 875
|
|
},
|
|
{
|
|
"epoch": 1.8763326226012793,
|
|
"grad_norm": 0.022275226190686226,
|
|
"learning_rate": 3.123667377398721e-05,
|
|
"loss": 0.0022,
|
|
"step": 880
|
|
},
|
|
{
|
|
"epoch": 1.886993603411514,
|
|
"grad_norm": 0.08238299190998077,
|
|
"learning_rate": 3.1130063965884864e-05,
|
|
"loss": 0.0023,
|
|
"step": 885
|
|
},
|
|
{
|
|
"epoch": 1.8976545842217485,
|
|
"grad_norm": 0.017617972567677498,
|
|
"learning_rate": 3.102345415778252e-05,
|
|
"loss": 0.0139,
|
|
"step": 890
|
|
},
|
|
{
|
|
"epoch": 1.908315565031983,
|
|
"grad_norm": 0.016991961747407913,
|
|
"learning_rate": 3.0916844349680173e-05,
|
|
"loss": 0.0026,
|
|
"step": 895
|
|
},
|
|
{
|
|
"epoch": 1.9189765458422174,
|
|
"grad_norm": 0.016072068363428116,
|
|
"learning_rate": 3.081023454157783e-05,
|
|
"loss": 0.0021,
|
|
"step": 900
|
|
},
|
|
{
|
|
"epoch": 1.929637526652452,
|
|
"grad_norm": 0.01626187562942505,
|
|
"learning_rate": 3.070362473347548e-05,
|
|
"loss": 0.0022,
|
|
"step": 905
|
|
},
|
|
{
|
|
"epoch": 1.9402985074626866,
|
|
"grad_norm": 2.3906805515289307,
|
|
"learning_rate": 3.059701492537314e-05,
|
|
"loss": 0.033,
|
|
"step": 910
|
|
},
|
|
{
|
|
"epoch": 1.950959488272921,
|
|
"grad_norm": 0.04245917871594429,
|
|
"learning_rate": 3.0490405117270792e-05,
|
|
"loss": 0.0021,
|
|
"step": 915
|
|
},
|
|
{
|
|
"epoch": 1.9616204690831558,
|
|
"grad_norm": 0.019344056025147438,
|
|
"learning_rate": 3.0383795309168444e-05,
|
|
"loss": 0.0026,
|
|
"step": 920
|
|
},
|
|
{
|
|
"epoch": 1.9722814498933903,
|
|
"grad_norm": 4.797880172729492,
|
|
"learning_rate": 3.0277185501066102e-05,
|
|
"loss": 0.0201,
|
|
"step": 925
|
|
},
|
|
{
|
|
"epoch": 1.9829424307036247,
|
|
"grad_norm": 0.01574319414794445,
|
|
"learning_rate": 3.0170575692963753e-05,
|
|
"loss": 0.002,
|
|
"step": 930
|
|
},
|
|
{
|
|
"epoch": 1.9936034115138592,
|
|
"grad_norm": 0.018245955929160118,
|
|
"learning_rate": 3.006396588486141e-05,
|
|
"loss": 0.0019,
|
|
"step": 935
|
|
},
|
|
{
|
|
"epoch": 2.0,
|
|
"eval_accuracy": 0.9928,
|
|
"eval_loss": 0.02574796974658966,
|
|
"eval_runtime": 16.4989,
|
|
"eval_samples_per_second": 227.288,
|
|
"eval_steps_per_second": 7.152,
|
|
"step": 938
|
|
},
|
|
{
|
|
"epoch": 2.0042643923240937,
|
|
"grad_norm": 0.016762305051088333,
|
|
"learning_rate": 2.9957356076759063e-05,
|
|
"loss": 0.0019,
|
|
"step": 940
|
|
},
|
|
{
|
|
"epoch": 2.014925373134328,
|
|
"grad_norm": 0.017123844474554062,
|
|
"learning_rate": 2.9850746268656714e-05,
|
|
"loss": 0.0021,
|
|
"step": 945
|
|
},
|
|
{
|
|
"epoch": 2.025586353944563,
|
|
"grad_norm": 0.017272012308239937,
|
|
"learning_rate": 2.9744136460554372e-05,
|
|
"loss": 0.002,
|
|
"step": 950
|
|
},
|
|
{
|
|
"epoch": 2.0362473347547976,
|
|
"grad_norm": 0.015683073550462723,
|
|
"learning_rate": 2.9637526652452023e-05,
|
|
"loss": 0.002,
|
|
"step": 955
|
|
},
|
|
{
|
|
"epoch": 2.046908315565032,
|
|
"grad_norm": 0.014545220881700516,
|
|
"learning_rate": 2.953091684434968e-05,
|
|
"loss": 0.002,
|
|
"step": 960
|
|
},
|
|
{
|
|
"epoch": 2.0575692963752665,
|
|
"grad_norm": 0.1848621368408203,
|
|
"learning_rate": 2.9424307036247333e-05,
|
|
"loss": 0.0022,
|
|
"step": 965
|
|
},
|
|
{
|
|
"epoch": 2.068230277185501,
|
|
"grad_norm": 0.015435046516358852,
|
|
"learning_rate": 2.931769722814499e-05,
|
|
"loss": 0.0018,
|
|
"step": 970
|
|
},
|
|
{
|
|
"epoch": 2.0788912579957355,
|
|
"grad_norm": 0.016446534544229507,
|
|
"learning_rate": 2.9211087420042642e-05,
|
|
"loss": 0.0019,
|
|
"step": 975
|
|
},
|
|
{
|
|
"epoch": 2.08955223880597,
|
|
"grad_norm": 0.014508787542581558,
|
|
"learning_rate": 2.91044776119403e-05,
|
|
"loss": 0.0021,
|
|
"step": 980
|
|
},
|
|
{
|
|
"epoch": 2.100213219616205,
|
|
"grad_norm": 0.3053794503211975,
|
|
"learning_rate": 2.8997867803837952e-05,
|
|
"loss": 0.002,
|
|
"step": 985
|
|
},
|
|
{
|
|
"epoch": 2.1108742004264394,
|
|
"grad_norm": 0.014296243898570538,
|
|
"learning_rate": 2.889125799573561e-05,
|
|
"loss": 0.0017,
|
|
"step": 990
|
|
},
|
|
{
|
|
"epoch": 2.121535181236674,
|
|
"grad_norm": 0.014104560017585754,
|
|
"learning_rate": 2.878464818763326e-05,
|
|
"loss": 0.0018,
|
|
"step": 995
|
|
},
|
|
{
|
|
"epoch": 2.1321961620469083,
|
|
"grad_norm": 0.013995345681905746,
|
|
"learning_rate": 2.867803837953092e-05,
|
|
"loss": 0.0022,
|
|
"step": 1000
|
|
},
|
|
{
|
|
"epoch": 2.142857142857143,
|
|
"grad_norm": 0.014830300584435463,
|
|
"learning_rate": 2.857142857142857e-05,
|
|
"loss": 0.0016,
|
|
"step": 1005
|
|
},
|
|
{
|
|
"epoch": 2.1535181236673773,
|
|
"grad_norm": 0.018911438062787056,
|
|
"learning_rate": 2.846481876332623e-05,
|
|
"loss": 0.0017,
|
|
"step": 1010
|
|
},
|
|
{
|
|
"epoch": 2.1641791044776117,
|
|
"grad_norm": 0.014000472612679005,
|
|
"learning_rate": 2.835820895522388e-05,
|
|
"loss": 0.0017,
|
|
"step": 1015
|
|
},
|
|
{
|
|
"epoch": 2.1748400852878467,
|
|
"grad_norm": 0.014963737688958645,
|
|
"learning_rate": 2.825159914712154e-05,
|
|
"loss": 0.0017,
|
|
"step": 1020
|
|
},
|
|
{
|
|
"epoch": 2.185501066098081,
|
|
"grad_norm": 0.024099769070744514,
|
|
"learning_rate": 2.814498933901919e-05,
|
|
"loss": 0.0026,
|
|
"step": 1025
|
|
},
|
|
{
|
|
"epoch": 2.1961620469083156,
|
|
"grad_norm": 0.01498680841177702,
|
|
"learning_rate": 2.8038379530916848e-05,
|
|
"loss": 0.0017,
|
|
"step": 1030
|
|
},
|
|
{
|
|
"epoch": 2.20682302771855,
|
|
"grad_norm": 0.012932020239531994,
|
|
"learning_rate": 2.79317697228145e-05,
|
|
"loss": 0.0016,
|
|
"step": 1035
|
|
},
|
|
{
|
|
"epoch": 2.2174840085287846,
|
|
"grad_norm": 0.013172201812267303,
|
|
"learning_rate": 2.7825159914712157e-05,
|
|
"loss": 0.0017,
|
|
"step": 1040
|
|
},
|
|
{
|
|
"epoch": 2.228144989339019,
|
|
"grad_norm": 0.01667962037026882,
|
|
"learning_rate": 2.771855010660981e-05,
|
|
"loss": 0.002,
|
|
"step": 1045
|
|
},
|
|
{
|
|
"epoch": 2.2388059701492535,
|
|
"grad_norm": 0.012727811001241207,
|
|
"learning_rate": 2.7611940298507467e-05,
|
|
"loss": 0.0016,
|
|
"step": 1050
|
|
},
|
|
{
|
|
"epoch": 2.2494669509594885,
|
|
"grad_norm": 0.012849349528551102,
|
|
"learning_rate": 2.7505330490405118e-05,
|
|
"loss": 0.0016,
|
|
"step": 1055
|
|
},
|
|
{
|
|
"epoch": 2.260127931769723,
|
|
"grad_norm": 0.0134620675817132,
|
|
"learning_rate": 2.7398720682302776e-05,
|
|
"loss": 0.0032,
|
|
"step": 1060
|
|
},
|
|
{
|
|
"epoch": 2.2707889125799574,
|
|
"grad_norm": 0.0129215307533741,
|
|
"learning_rate": 2.7292110874200428e-05,
|
|
"loss": 0.0015,
|
|
"step": 1065
|
|
},
|
|
{
|
|
"epoch": 2.281449893390192,
|
|
"grad_norm": 0.012851559557020664,
|
|
"learning_rate": 2.7185501066098086e-05,
|
|
"loss": 0.0015,
|
|
"step": 1070
|
|
},
|
|
{
|
|
"epoch": 2.2921108742004264,
|
|
"grad_norm": 0.01229612622410059,
|
|
"learning_rate": 2.7078891257995737e-05,
|
|
"loss": 0.0016,
|
|
"step": 1075
|
|
},
|
|
{
|
|
"epoch": 2.302771855010661,
|
|
"grad_norm": 0.012455361895263195,
|
|
"learning_rate": 2.6972281449893395e-05,
|
|
"loss": 0.0015,
|
|
"step": 1080
|
|
},
|
|
{
|
|
"epoch": 2.3134328358208958,
|
|
"grad_norm": 0.012231805361807346,
|
|
"learning_rate": 2.6865671641791047e-05,
|
|
"loss": 0.0015,
|
|
"step": 1085
|
|
},
|
|
{
|
|
"epoch": 2.3240938166311302,
|
|
"grad_norm": 0.01212611235678196,
|
|
"learning_rate": 2.6759061833688705e-05,
|
|
"loss": 0.0014,
|
|
"step": 1090
|
|
},
|
|
{
|
|
"epoch": 2.3347547974413647,
|
|
"grad_norm": 0.012929637916386127,
|
|
"learning_rate": 2.6652452025586356e-05,
|
|
"loss": 0.0022,
|
|
"step": 1095
|
|
},
|
|
{
|
|
"epoch": 2.345415778251599,
|
|
"grad_norm": 0.012110981158912182,
|
|
"learning_rate": 2.6545842217484007e-05,
|
|
"loss": 0.0101,
|
|
"step": 1100
|
|
},
|
|
{
|
|
"epoch": 2.3560767590618337,
|
|
"grad_norm": 0.012155153788626194,
|
|
"learning_rate": 2.6439232409381666e-05,
|
|
"loss": 0.0014,
|
|
"step": 1105
|
|
},
|
|
{
|
|
"epoch": 2.366737739872068,
|
|
"grad_norm": 0.01241675391793251,
|
|
"learning_rate": 2.6332622601279317e-05,
|
|
"loss": 0.0041,
|
|
"step": 1110
|
|
},
|
|
{
|
|
"epoch": 2.3773987206823026,
|
|
"grad_norm": 0.012072499841451645,
|
|
"learning_rate": 2.6226012793176975e-05,
|
|
"loss": 0.0018,
|
|
"step": 1115
|
|
},
|
|
{
|
|
"epoch": 2.388059701492537,
|
|
"grad_norm": 0.01313025876879692,
|
|
"learning_rate": 2.6119402985074626e-05,
|
|
"loss": 0.0015,
|
|
"step": 1120
|
|
},
|
|
{
|
|
"epoch": 2.398720682302772,
|
|
"grad_norm": 0.012141906656324863,
|
|
"learning_rate": 2.6012793176972285e-05,
|
|
"loss": 0.0015,
|
|
"step": 1125
|
|
},
|
|
{
|
|
"epoch": 2.4093816631130065,
|
|
"grad_norm": 0.012537244707345963,
|
|
"learning_rate": 2.5906183368869936e-05,
|
|
"loss": 0.0014,
|
|
"step": 1130
|
|
},
|
|
{
|
|
"epoch": 2.420042643923241,
|
|
"grad_norm": 0.011667958460748196,
|
|
"learning_rate": 2.5799573560767594e-05,
|
|
"loss": 0.0014,
|
|
"step": 1135
|
|
},
|
|
{
|
|
"epoch": 2.4307036247334755,
|
|
"grad_norm": 0.0581309050321579,
|
|
"learning_rate": 2.5692963752665245e-05,
|
|
"loss": 0.0014,
|
|
"step": 1140
|
|
},
|
|
{
|
|
"epoch": 2.44136460554371,
|
|
"grad_norm": 0.01184321939945221,
|
|
"learning_rate": 2.5586353944562904e-05,
|
|
"loss": 0.0014,
|
|
"step": 1145
|
|
},
|
|
{
|
|
"epoch": 2.4520255863539444,
|
|
"grad_norm": 0.011883780360221863,
|
|
"learning_rate": 2.5479744136460555e-05,
|
|
"loss": 0.0014,
|
|
"step": 1150
|
|
},
|
|
{
|
|
"epoch": 2.4626865671641793,
|
|
"grad_norm": 0.011959915980696678,
|
|
"learning_rate": 2.537313432835821e-05,
|
|
"loss": 0.0014,
|
|
"step": 1155
|
|
},
|
|
{
|
|
"epoch": 2.473347547974414,
|
|
"grad_norm": 0.011394010856747627,
|
|
"learning_rate": 2.5266524520255864e-05,
|
|
"loss": 0.008,
|
|
"step": 1160
|
|
},
|
|
{
|
|
"epoch": 2.4840085287846483,
|
|
"grad_norm": 0.012583576142787933,
|
|
"learning_rate": 2.515991471215352e-05,
|
|
"loss": 0.0013,
|
|
"step": 1165
|
|
},
|
|
{
|
|
"epoch": 2.4946695095948828,
|
|
"grad_norm": 0.01106097362935543,
|
|
"learning_rate": 2.5053304904051174e-05,
|
|
"loss": 0.0024,
|
|
"step": 1170
|
|
},
|
|
{
|
|
"epoch": 2.5053304904051172,
|
|
"grad_norm": 0.0174319539219141,
|
|
"learning_rate": 2.494669509594883e-05,
|
|
"loss": 0.0014,
|
|
"step": 1175
|
|
},
|
|
{
|
|
"epoch": 2.5159914712153517,
|
|
"grad_norm": 0.04720267280936241,
|
|
"learning_rate": 2.4840085287846483e-05,
|
|
"loss": 0.0016,
|
|
"step": 1180
|
|
},
|
|
{
|
|
"epoch": 2.526652452025586,
|
|
"grad_norm": 0.010901020839810371,
|
|
"learning_rate": 2.4733475479744138e-05,
|
|
"loss": 0.0028,
|
|
"step": 1185
|
|
},
|
|
{
|
|
"epoch": 2.5373134328358207,
|
|
"grad_norm": 8.997549057006836,
|
|
"learning_rate": 2.4626865671641793e-05,
|
|
"loss": 0.0187,
|
|
"step": 1190
|
|
},
|
|
{
|
|
"epoch": 2.5479744136460556,
|
|
"grad_norm": 0.0113821467384696,
|
|
"learning_rate": 2.4520255863539444e-05,
|
|
"loss": 0.0013,
|
|
"step": 1195
|
|
},
|
|
{
|
|
"epoch": 2.55863539445629,
|
|
"grad_norm": 0.010736453346908092,
|
|
"learning_rate": 2.44136460554371e-05,
|
|
"loss": 0.0013,
|
|
"step": 1200
|
|
},
|
|
{
|
|
"epoch": 2.5692963752665245,
|
|
"grad_norm": 0.01081756316125393,
|
|
"learning_rate": 2.4307036247334754e-05,
|
|
"loss": 0.0013,
|
|
"step": 1205
|
|
},
|
|
{
|
|
"epoch": 2.579957356076759,
|
|
"grad_norm": 0.01089539285749197,
|
|
"learning_rate": 2.420042643923241e-05,
|
|
"loss": 0.0013,
|
|
"step": 1210
|
|
},
|
|
{
|
|
"epoch": 2.5906183368869935,
|
|
"grad_norm": 0.010750004090368748,
|
|
"learning_rate": 2.4093816631130063e-05,
|
|
"loss": 0.0014,
|
|
"step": 1215
|
|
},
|
|
{
|
|
"epoch": 2.6012793176972284,
|
|
"grad_norm": 0.010633810423314571,
|
|
"learning_rate": 2.3987206823027718e-05,
|
|
"loss": 0.0045,
|
|
"step": 1220
|
|
},
|
|
{
|
|
"epoch": 2.611940298507463,
|
|
"grad_norm": 0.010548440739512444,
|
|
"learning_rate": 2.3880597014925373e-05,
|
|
"loss": 0.0013,
|
|
"step": 1225
|
|
},
|
|
{
|
|
"epoch": 2.6226012793176974,
|
|
"grad_norm": 0.010777181945741177,
|
|
"learning_rate": 2.3773987206823027e-05,
|
|
"loss": 0.0016,
|
|
"step": 1230
|
|
},
|
|
{
|
|
"epoch": 2.633262260127932,
|
|
"grad_norm": 0.010288032703101635,
|
|
"learning_rate": 2.3667377398720682e-05,
|
|
"loss": 0.0012,
|
|
"step": 1235
|
|
},
|
|
{
|
|
"epoch": 2.6439232409381663,
|
|
"grad_norm": 0.29548385739326477,
|
|
"learning_rate": 2.3560767590618337e-05,
|
|
"loss": 0.0059,
|
|
"step": 1240
|
|
},
|
|
{
|
|
"epoch": 2.654584221748401,
|
|
"grad_norm": 0.04246773570775986,
|
|
"learning_rate": 2.345415778251599e-05,
|
|
"loss": 0.0016,
|
|
"step": 1245
|
|
},
|
|
{
|
|
"epoch": 2.6652452025586353,
|
|
"grad_norm": 0.035821229219436646,
|
|
"learning_rate": 2.3347547974413646e-05,
|
|
"loss": 0.0015,
|
|
"step": 1250
|
|
},
|
|
{
|
|
"epoch": 2.6759061833688698,
|
|
"grad_norm": 0.010515713132917881,
|
|
"learning_rate": 2.32409381663113e-05,
|
|
"loss": 0.0012,
|
|
"step": 1255
|
|
},
|
|
{
|
|
"epoch": 2.6865671641791042,
|
|
"grad_norm": 0.010527647100389004,
|
|
"learning_rate": 2.3134328358208956e-05,
|
|
"loss": 0.0154,
|
|
"step": 1260
|
|
},
|
|
{
|
|
"epoch": 2.697228144989339,
|
|
"grad_norm": 0.011587481945753098,
|
|
"learning_rate": 2.302771855010661e-05,
|
|
"loss": 0.0013,
|
|
"step": 1265
|
|
},
|
|
{
|
|
"epoch": 2.7078891257995736,
|
|
"grad_norm": 0.015431459993124008,
|
|
"learning_rate": 2.2921108742004265e-05,
|
|
"loss": 0.0014,
|
|
"step": 1270
|
|
},
|
|
{
|
|
"epoch": 2.718550106609808,
|
|
"grad_norm": 0.010750065557658672,
|
|
"learning_rate": 2.281449893390192e-05,
|
|
"loss": 0.0012,
|
|
"step": 1275
|
|
},
|
|
{
|
|
"epoch": 2.7292110874200426,
|
|
"grad_norm": 0.011150922626256943,
|
|
"learning_rate": 2.2707889125799575e-05,
|
|
"loss": 0.003,
|
|
"step": 1280
|
|
},
|
|
{
|
|
"epoch": 2.739872068230277,
|
|
"grad_norm": 0.01143800001591444,
|
|
"learning_rate": 2.260127931769723e-05,
|
|
"loss": 0.0012,
|
|
"step": 1285
|
|
},
|
|
{
|
|
"epoch": 2.750533049040512,
|
|
"grad_norm": 0.010583270341157913,
|
|
"learning_rate": 2.2494669509594884e-05,
|
|
"loss": 0.0012,
|
|
"step": 1290
|
|
},
|
|
{
|
|
"epoch": 2.7611940298507465,
|
|
"grad_norm": 0.010834074579179287,
|
|
"learning_rate": 2.238805970149254e-05,
|
|
"loss": 0.024,
|
|
"step": 1295
|
|
},
|
|
{
|
|
"epoch": 2.771855010660981,
|
|
"grad_norm": 0.009713114239275455,
|
|
"learning_rate": 2.2281449893390194e-05,
|
|
"loss": 0.0011,
|
|
"step": 1300
|
|
},
|
|
{
|
|
"epoch": 2.7825159914712154,
|
|
"grad_norm": 0.009917298331856728,
|
|
"learning_rate": 2.217484008528785e-05,
|
|
"loss": 0.0013,
|
|
"step": 1305
|
|
},
|
|
{
|
|
"epoch": 2.79317697228145,
|
|
"grad_norm": 0.009798646904528141,
|
|
"learning_rate": 2.2068230277185503e-05,
|
|
"loss": 0.0011,
|
|
"step": 1310
|
|
},
|
|
{
|
|
"epoch": 2.8038379530916844,
|
|
"grad_norm": 4.101874351501465,
|
|
"learning_rate": 2.1961620469083158e-05,
|
|
"loss": 0.0263,
|
|
"step": 1315
|
|
},
|
|
{
|
|
"epoch": 2.814498933901919,
|
|
"grad_norm": 0.01014296617358923,
|
|
"learning_rate": 2.1855010660980813e-05,
|
|
"loss": 0.0012,
|
|
"step": 1320
|
|
},
|
|
{
|
|
"epoch": 2.8251599147121533,
|
|
"grad_norm": 0.009693730622529984,
|
|
"learning_rate": 2.1748400852878467e-05,
|
|
"loss": 0.0014,
|
|
"step": 1325
|
|
},
|
|
{
|
|
"epoch": 2.835820895522388,
|
|
"grad_norm": 0.014164119027554989,
|
|
"learning_rate": 2.164179104477612e-05,
|
|
"loss": 0.0012,
|
|
"step": 1330
|
|
},
|
|
{
|
|
"epoch": 2.8464818763326227,
|
|
"grad_norm": 0.009962335228919983,
|
|
"learning_rate": 2.1535181236673773e-05,
|
|
"loss": 0.0012,
|
|
"step": 1335
|
|
},
|
|
{
|
|
"epoch": 2.857142857142857,
|
|
"grad_norm": 0.010051365941762924,
|
|
"learning_rate": 2.1428571428571428e-05,
|
|
"loss": 0.0012,
|
|
"step": 1340
|
|
},
|
|
{
|
|
"epoch": 2.8678038379530917,
|
|
"grad_norm": 0.013271057978272438,
|
|
"learning_rate": 2.1321961620469083e-05,
|
|
"loss": 0.0012,
|
|
"step": 1345
|
|
},
|
|
{
|
|
"epoch": 2.878464818763326,
|
|
"grad_norm": 0.010365043766796589,
|
|
"learning_rate": 2.1215351812366738e-05,
|
|
"loss": 0.0012,
|
|
"step": 1350
|
|
},
|
|
{
|
|
"epoch": 2.8891257995735606,
|
|
"grad_norm": 0.009932233020663261,
|
|
"learning_rate": 2.1108742004264392e-05,
|
|
"loss": 0.0454,
|
|
"step": 1355
|
|
},
|
|
{
|
|
"epoch": 2.8997867803837956,
|
|
"grad_norm": 0.01092168502509594,
|
|
"learning_rate": 2.1002132196162047e-05,
|
|
"loss": 0.0217,
|
|
"step": 1360
|
|
},
|
|
{
|
|
"epoch": 2.91044776119403,
|
|
"grad_norm": 0.02956121787428856,
|
|
"learning_rate": 2.0895522388059702e-05,
|
|
"loss": 0.0014,
|
|
"step": 1365
|
|
},
|
|
{
|
|
"epoch": 2.9211087420042645,
|
|
"grad_norm": 0.011923235841095448,
|
|
"learning_rate": 2.0788912579957357e-05,
|
|
"loss": 0.0014,
|
|
"step": 1370
|
|
},
|
|
{
|
|
"epoch": 2.931769722814499,
|
|
"grad_norm": 0.012663044966757298,
|
|
"learning_rate": 2.068230277185501e-05,
|
|
"loss": 0.0027,
|
|
"step": 1375
|
|
},
|
|
{
|
|
"epoch": 2.9424307036247335,
|
|
"grad_norm": 0.014775395393371582,
|
|
"learning_rate": 2.0575692963752666e-05,
|
|
"loss": 0.0016,
|
|
"step": 1380
|
|
},
|
|
{
|
|
"epoch": 2.953091684434968,
|
|
"grad_norm": 0.010912032797932625,
|
|
"learning_rate": 2.046908315565032e-05,
|
|
"loss": 0.0013,
|
|
"step": 1385
|
|
},
|
|
{
|
|
"epoch": 2.9637526652452024,
|
|
"grad_norm": 0.009865554980933666,
|
|
"learning_rate": 2.0362473347547976e-05,
|
|
"loss": 0.0012,
|
|
"step": 1390
|
|
},
|
|
{
|
|
"epoch": 2.974413646055437,
|
|
"grad_norm": 0.010289540514349937,
|
|
"learning_rate": 2.025586353944563e-05,
|
|
"loss": 0.0014,
|
|
"step": 1395
|
|
},
|
|
{
|
|
"epoch": 2.9850746268656714,
|
|
"grad_norm": 0.010022145695984364,
|
|
"learning_rate": 2.0149253731343285e-05,
|
|
"loss": 0.0011,
|
|
"step": 1400
|
|
},
|
|
{
|
|
"epoch": 2.9957356076759063,
|
|
"grad_norm": 0.009564647451043129,
|
|
"learning_rate": 2.004264392324094e-05,
|
|
"loss": 0.0011,
|
|
"step": 1405
|
|
},
|
|
{
|
|
"epoch": 3.0,
|
|
"eval_accuracy": 0.9941333333333333,
|
|
"eval_loss": 0.02565709874033928,
|
|
"eval_runtime": 17.1228,
|
|
"eval_samples_per_second": 219.007,
|
|
"eval_steps_per_second": 6.891,
|
|
"step": 1407
|
|
},
|
|
{
|
|
"epoch": 3.0063965884861408,
|
|
"grad_norm": 0.009828134439885616,
|
|
"learning_rate": 1.9936034115138594e-05,
|
|
"loss": 0.0011,
|
|
"step": 1410
|
|
},
|
|
{
|
|
"epoch": 3.0170575692963753,
|
|
"grad_norm": 0.010314960032701492,
|
|
"learning_rate": 1.982942430703625e-05,
|
|
"loss": 0.0011,
|
|
"step": 1415
|
|
},
|
|
{
|
|
"epoch": 3.0277185501066097,
|
|
"grad_norm": 0.009500714018940926,
|
|
"learning_rate": 1.9722814498933904e-05,
|
|
"loss": 0.0014,
|
|
"step": 1420
|
|
},
|
|
{
|
|
"epoch": 3.038379530916844,
|
|
"grad_norm": 0.009401720948517323,
|
|
"learning_rate": 1.961620469083156e-05,
|
|
"loss": 0.0011,
|
|
"step": 1425
|
|
},
|
|
{
|
|
"epoch": 3.0490405117270787,
|
|
"grad_norm": 0.010178277269005775,
|
|
"learning_rate": 1.9509594882729213e-05,
|
|
"loss": 0.0011,
|
|
"step": 1430
|
|
},
|
|
{
|
|
"epoch": 3.0597014925373136,
|
|
"grad_norm": 0.009113337844610214,
|
|
"learning_rate": 1.9402985074626868e-05,
|
|
"loss": 0.0099,
|
|
"step": 1435
|
|
},
|
|
{
|
|
"epoch": 3.070362473347548,
|
|
"grad_norm": 0.009055834263563156,
|
|
"learning_rate": 1.9296375266524523e-05,
|
|
"loss": 0.0011,
|
|
"step": 1440
|
|
},
|
|
{
|
|
"epoch": 3.0810234541577826,
|
|
"grad_norm": 0.009133790619671345,
|
|
"learning_rate": 1.9189765458422178e-05,
|
|
"loss": 0.0011,
|
|
"step": 1445
|
|
},
|
|
{
|
|
"epoch": 3.091684434968017,
|
|
"grad_norm": 0.009086220525205135,
|
|
"learning_rate": 1.9083155650319832e-05,
|
|
"loss": 0.0011,
|
|
"step": 1450
|
|
},
|
|
{
|
|
"epoch": 3.1023454157782515,
|
|
"grad_norm": 0.0094444639980793,
|
|
"learning_rate": 1.8976545842217487e-05,
|
|
"loss": 0.0011,
|
|
"step": 1455
|
|
},
|
|
{
|
|
"epoch": 3.113006396588486,
|
|
"grad_norm": 0.009052727371454239,
|
|
"learning_rate": 1.8869936034115142e-05,
|
|
"loss": 0.0011,
|
|
"step": 1460
|
|
},
|
|
{
|
|
"epoch": 3.1236673773987205,
|
|
"grad_norm": 0.01352308876812458,
|
|
"learning_rate": 1.8763326226012797e-05,
|
|
"loss": 0.0011,
|
|
"step": 1465
|
|
},
|
|
{
|
|
"epoch": 3.1343283582089554,
|
|
"grad_norm": 0.008976473473012447,
|
|
"learning_rate": 1.865671641791045e-05,
|
|
"loss": 0.0011,
|
|
"step": 1470
|
|
},
|
|
{
|
|
"epoch": 3.14498933901919,
|
|
"grad_norm": 0.008790930733084679,
|
|
"learning_rate": 1.8550106609808106e-05,
|
|
"loss": 0.0012,
|
|
"step": 1475
|
|
},
|
|
{
|
|
"epoch": 3.1556503198294243,
|
|
"grad_norm": 0.009338583797216415,
|
|
"learning_rate": 1.8443496801705757e-05,
|
|
"loss": 0.0068,
|
|
"step": 1480
|
|
},
|
|
{
|
|
"epoch": 3.166311300639659,
|
|
"grad_norm": 0.009441640228033066,
|
|
"learning_rate": 1.8336886993603412e-05,
|
|
"loss": 0.001,
|
|
"step": 1485
|
|
},
|
|
{
|
|
"epoch": 3.1769722814498933,
|
|
"grad_norm": 0.008873417042195797,
|
|
"learning_rate": 1.8230277185501067e-05,
|
|
"loss": 0.001,
|
|
"step": 1490
|
|
},
|
|
{
|
|
"epoch": 3.1876332622601278,
|
|
"grad_norm": 0.025134578347206116,
|
|
"learning_rate": 1.812366737739872e-05,
|
|
"loss": 0.0013,
|
|
"step": 1495
|
|
},
|
|
{
|
|
"epoch": 3.1982942430703627,
|
|
"grad_norm": 0.00857387762516737,
|
|
"learning_rate": 1.8017057569296376e-05,
|
|
"loss": 0.001,
|
|
"step": 1500
|
|
},
|
|
{
|
|
"epoch": 3.208955223880597,
|
|
"grad_norm": 0.009085839614272118,
|
|
"learning_rate": 1.791044776119403e-05,
|
|
"loss": 0.0011,
|
|
"step": 1505
|
|
},
|
|
{
|
|
"epoch": 3.2196162046908317,
|
|
"grad_norm": 0.008526238612830639,
|
|
"learning_rate": 1.7803837953091686e-05,
|
|
"loss": 0.001,
|
|
"step": 1510
|
|
},
|
|
{
|
|
"epoch": 3.230277185501066,
|
|
"grad_norm": 0.07096794992685318,
|
|
"learning_rate": 1.769722814498934e-05,
|
|
"loss": 0.0011,
|
|
"step": 1515
|
|
},
|
|
{
|
|
"epoch": 3.2409381663113006,
|
|
"grad_norm": 0.00855859275907278,
|
|
"learning_rate": 1.7590618336886995e-05,
|
|
"loss": 0.001,
|
|
"step": 1520
|
|
},
|
|
{
|
|
"epoch": 3.251599147121535,
|
|
"grad_norm": 0.008998198434710503,
|
|
"learning_rate": 1.7484008528784647e-05,
|
|
"loss": 0.0011,
|
|
"step": 1525
|
|
},
|
|
{
|
|
"epoch": 3.2622601279317696,
|
|
"grad_norm": 0.008810482919216156,
|
|
"learning_rate": 1.73773987206823e-05,
|
|
"loss": 0.001,
|
|
"step": 1530
|
|
},
|
|
{
|
|
"epoch": 3.272921108742004,
|
|
"grad_norm": 0.00840053241699934,
|
|
"learning_rate": 1.7270788912579956e-05,
|
|
"loss": 0.001,
|
|
"step": 1535
|
|
},
|
|
{
|
|
"epoch": 3.283582089552239,
|
|
"grad_norm": 0.008688554167747498,
|
|
"learning_rate": 1.716417910447761e-05,
|
|
"loss": 0.001,
|
|
"step": 1540
|
|
},
|
|
{
|
|
"epoch": 3.2942430703624734,
|
|
"grad_norm": 0.009259545244276524,
|
|
"learning_rate": 1.7057569296375266e-05,
|
|
"loss": 0.001,
|
|
"step": 1545
|
|
},
|
|
{
|
|
"epoch": 3.304904051172708,
|
|
"grad_norm": 0.008386638015508652,
|
|
"learning_rate": 1.695095948827292e-05,
|
|
"loss": 0.001,
|
|
"step": 1550
|
|
},
|
|
{
|
|
"epoch": 3.3155650319829424,
|
|
"grad_norm": 0.009084336459636688,
|
|
"learning_rate": 1.6844349680170575e-05,
|
|
"loss": 0.001,
|
|
"step": 1555
|
|
},
|
|
{
|
|
"epoch": 3.326226012793177,
|
|
"grad_norm": 0.008589472621679306,
|
|
"learning_rate": 1.673773987206823e-05,
|
|
"loss": 0.001,
|
|
"step": 1560
|
|
},
|
|
{
|
|
"epoch": 3.3368869936034113,
|
|
"grad_norm": 0.5658795237541199,
|
|
"learning_rate": 1.6631130063965885e-05,
|
|
"loss": 0.0012,
|
|
"step": 1565
|
|
},
|
|
{
|
|
"epoch": 3.3475479744136463,
|
|
"grad_norm": 0.008506689220666885,
|
|
"learning_rate": 1.652452025586354e-05,
|
|
"loss": 0.0011,
|
|
"step": 1570
|
|
},
|
|
{
|
|
"epoch": 3.3582089552238807,
|
|
"grad_norm": 0.009010609239339828,
|
|
"learning_rate": 1.6417910447761194e-05,
|
|
"loss": 0.001,
|
|
"step": 1575
|
|
},
|
|
{
|
|
"epoch": 3.368869936034115,
|
|
"grad_norm": 0.008315706625580788,
|
|
"learning_rate": 1.631130063965885e-05,
|
|
"loss": 0.001,
|
|
"step": 1580
|
|
},
|
|
{
|
|
"epoch": 3.3795309168443497,
|
|
"grad_norm": 0.008143344894051552,
|
|
"learning_rate": 1.6204690831556504e-05,
|
|
"loss": 0.001,
|
|
"step": 1585
|
|
},
|
|
{
|
|
"epoch": 3.390191897654584,
|
|
"grad_norm": 0.008493823930621147,
|
|
"learning_rate": 1.6098081023454158e-05,
|
|
"loss": 0.0009,
|
|
"step": 1590
|
|
},
|
|
{
|
|
"epoch": 3.4008528784648187,
|
|
"grad_norm": 0.008614439517259598,
|
|
"learning_rate": 1.5991471215351813e-05,
|
|
"loss": 0.001,
|
|
"step": 1595
|
|
},
|
|
{
|
|
"epoch": 3.411513859275053,
|
|
"grad_norm": 0.008163101971149445,
|
|
"learning_rate": 1.5884861407249468e-05,
|
|
"loss": 0.0009,
|
|
"step": 1600
|
|
},
|
|
{
|
|
"epoch": 3.4221748400852876,
|
|
"grad_norm": 0.008104193024337292,
|
|
"learning_rate": 1.5778251599147122e-05,
|
|
"loss": 0.0009,
|
|
"step": 1605
|
|
},
|
|
{
|
|
"epoch": 3.4328358208955225,
|
|
"grad_norm": 0.0078506488353014,
|
|
"learning_rate": 1.5671641791044777e-05,
|
|
"loss": 0.001,
|
|
"step": 1610
|
|
},
|
|
{
|
|
"epoch": 3.443496801705757,
|
|
"grad_norm": 0.00830670353025198,
|
|
"learning_rate": 1.5565031982942432e-05,
|
|
"loss": 0.0011,
|
|
"step": 1615
|
|
},
|
|
{
|
|
"epoch": 3.4541577825159915,
|
|
"grad_norm": 0.008246001787483692,
|
|
"learning_rate": 1.5458422174840087e-05,
|
|
"loss": 0.0009,
|
|
"step": 1620
|
|
},
|
|
{
|
|
"epoch": 3.464818763326226,
|
|
"grad_norm": 0.008115461096167564,
|
|
"learning_rate": 1.535181236673774e-05,
|
|
"loss": 0.0019,
|
|
"step": 1625
|
|
},
|
|
{
|
|
"epoch": 3.4754797441364604,
|
|
"grad_norm": 0.008616996929049492,
|
|
"learning_rate": 1.5245202558635396e-05,
|
|
"loss": 0.0009,
|
|
"step": 1630
|
|
},
|
|
{
|
|
"epoch": 3.486140724946695,
|
|
"grad_norm": 0.008351802825927734,
|
|
"learning_rate": 1.5138592750533051e-05,
|
|
"loss": 0.0009,
|
|
"step": 1635
|
|
},
|
|
{
|
|
"epoch": 3.49680170575693,
|
|
"grad_norm": 0.008309202268719673,
|
|
"learning_rate": 1.5031982942430706e-05,
|
|
"loss": 0.0051,
|
|
"step": 1640
|
|
},
|
|
{
|
|
"epoch": 3.5074626865671643,
|
|
"grad_norm": 0.007914500311017036,
|
|
"learning_rate": 1.4925373134328357e-05,
|
|
"loss": 0.0009,
|
|
"step": 1645
|
|
},
|
|
{
|
|
"epoch": 3.518123667377399,
|
|
"grad_norm": 0.007788552902638912,
|
|
"learning_rate": 1.4818763326226012e-05,
|
|
"loss": 0.0009,
|
|
"step": 1650
|
|
},
|
|
{
|
|
"epoch": 3.5287846481876333,
|
|
"grad_norm": 0.007906898856163025,
|
|
"learning_rate": 1.4712153518123666e-05,
|
|
"loss": 0.001,
|
|
"step": 1655
|
|
},
|
|
{
|
|
"epoch": 3.5394456289978677,
|
|
"grad_norm": 0.007877010852098465,
|
|
"learning_rate": 1.4605543710021321e-05,
|
|
"loss": 0.0009,
|
|
"step": 1660
|
|
},
|
|
{
|
|
"epoch": 3.550106609808102,
|
|
"grad_norm": 0.009270151145756245,
|
|
"learning_rate": 1.4498933901918976e-05,
|
|
"loss": 0.0009,
|
|
"step": 1665
|
|
},
|
|
{
|
|
"epoch": 3.5607675906183367,
|
|
"grad_norm": 0.007788475602865219,
|
|
"learning_rate": 1.439232409381663e-05,
|
|
"loss": 0.0009,
|
|
"step": 1670
|
|
},
|
|
{
|
|
"epoch": 3.571428571428571,
|
|
"grad_norm": 0.011030365712940693,
|
|
"learning_rate": 1.4285714285714285e-05,
|
|
"loss": 0.001,
|
|
"step": 1675
|
|
},
|
|
{
|
|
"epoch": 3.582089552238806,
|
|
"grad_norm": 0.010477794334292412,
|
|
"learning_rate": 1.417910447761194e-05,
|
|
"loss": 0.0009,
|
|
"step": 1680
|
|
},
|
|
{
|
|
"epoch": 3.5927505330490406,
|
|
"grad_norm": 0.007875391282141209,
|
|
"learning_rate": 1.4072494669509595e-05,
|
|
"loss": 0.0048,
|
|
"step": 1685
|
|
},
|
|
{
|
|
"epoch": 3.603411513859275,
|
|
"grad_norm": 0.008347186259925365,
|
|
"learning_rate": 1.396588486140725e-05,
|
|
"loss": 0.0009,
|
|
"step": 1690
|
|
},
|
|
{
|
|
"epoch": 3.6140724946695095,
|
|
"grad_norm": 0.007649347186088562,
|
|
"learning_rate": 1.3859275053304904e-05,
|
|
"loss": 0.0012,
|
|
"step": 1695
|
|
},
|
|
{
|
|
"epoch": 3.624733475479744,
|
|
"grad_norm": 0.007521898951381445,
|
|
"learning_rate": 1.3752665245202559e-05,
|
|
"loss": 0.0009,
|
|
"step": 1700
|
|
},
|
|
{
|
|
"epoch": 3.635394456289979,
|
|
"grad_norm": 0.007732005789875984,
|
|
"learning_rate": 1.3646055437100214e-05,
|
|
"loss": 0.0009,
|
|
"step": 1705
|
|
},
|
|
{
|
|
"epoch": 3.6460554371002134,
|
|
"grad_norm": 0.00785753969103098,
|
|
"learning_rate": 1.3539445628997869e-05,
|
|
"loss": 0.0009,
|
|
"step": 1710
|
|
},
|
|
{
|
|
"epoch": 3.656716417910448,
|
|
"grad_norm": 0.02039065957069397,
|
|
"learning_rate": 1.3432835820895523e-05,
|
|
"loss": 0.001,
|
|
"step": 1715
|
|
},
|
|
{
|
|
"epoch": 3.6673773987206824,
|
|
"grad_norm": 0.007418110966682434,
|
|
"learning_rate": 1.3326226012793178e-05,
|
|
"loss": 0.0009,
|
|
"step": 1720
|
|
},
|
|
{
|
|
"epoch": 3.678038379530917,
|
|
"grad_norm": 0.0076897707767784595,
|
|
"learning_rate": 1.3219616204690833e-05,
|
|
"loss": 0.0009,
|
|
"step": 1725
|
|
},
|
|
{
|
|
"epoch": 3.6886993603411513,
|
|
"grad_norm": 0.007577942684292793,
|
|
"learning_rate": 1.3113006396588488e-05,
|
|
"loss": 0.0009,
|
|
"step": 1730
|
|
},
|
|
{
|
|
"epoch": 3.699360341151386,
|
|
"grad_norm": 0.045052897185087204,
|
|
"learning_rate": 1.3006396588486142e-05,
|
|
"loss": 0.0087,
|
|
"step": 1735
|
|
},
|
|
{
|
|
"epoch": 3.7100213219616203,
|
|
"grad_norm": 0.007579253055155277,
|
|
"learning_rate": 1.2899786780383797e-05,
|
|
"loss": 0.0009,
|
|
"step": 1740
|
|
},
|
|
{
|
|
"epoch": 3.7206823027718547,
|
|
"grad_norm": 0.007551407441496849,
|
|
"learning_rate": 1.2793176972281452e-05,
|
|
"loss": 0.0009,
|
|
"step": 1745
|
|
},
|
|
{
|
|
"epoch": 3.7313432835820897,
|
|
"grad_norm": 0.007589246146380901,
|
|
"learning_rate": 1.2686567164179105e-05,
|
|
"loss": 0.0009,
|
|
"step": 1750
|
|
},
|
|
{
|
|
"epoch": 3.742004264392324,
|
|
"grad_norm": 0.007337269838899374,
|
|
"learning_rate": 1.257995735607676e-05,
|
|
"loss": 0.0009,
|
|
"step": 1755
|
|
},
|
|
{
|
|
"epoch": 3.7526652452025586,
|
|
"grad_norm": 0.007588915526866913,
|
|
"learning_rate": 1.2473347547974414e-05,
|
|
"loss": 0.0011,
|
|
"step": 1760
|
|
},
|
|
{
|
|
"epoch": 3.763326226012793,
|
|
"grad_norm": 0.0073504517786204815,
|
|
"learning_rate": 1.2366737739872069e-05,
|
|
"loss": 0.0009,
|
|
"step": 1765
|
|
},
|
|
{
|
|
"epoch": 3.7739872068230276,
|
|
"grad_norm": 0.007319787982851267,
|
|
"learning_rate": 1.2260127931769722e-05,
|
|
"loss": 0.0009,
|
|
"step": 1770
|
|
},
|
|
{
|
|
"epoch": 3.7846481876332625,
|
|
"grad_norm": 0.007190846838057041,
|
|
"learning_rate": 1.2153518123667377e-05,
|
|
"loss": 0.0008,
|
|
"step": 1775
|
|
},
|
|
{
|
|
"epoch": 3.795309168443497,
|
|
"grad_norm": 0.007195090409368277,
|
|
"learning_rate": 1.2046908315565032e-05,
|
|
"loss": 0.0008,
|
|
"step": 1780
|
|
},
|
|
{
|
|
"epoch": 3.8059701492537314,
|
|
"grad_norm": 0.00734932953491807,
|
|
"learning_rate": 1.1940298507462686e-05,
|
|
"loss": 0.0008,
|
|
"step": 1785
|
|
},
|
|
{
|
|
"epoch": 3.816631130063966,
|
|
"grad_norm": 0.008201192133128643,
|
|
"learning_rate": 1.1833688699360341e-05,
|
|
"loss": 0.0009,
|
|
"step": 1790
|
|
},
|
|
{
|
|
"epoch": 3.8272921108742004,
|
|
"grad_norm": 0.007222187705338001,
|
|
"learning_rate": 1.1727078891257996e-05,
|
|
"loss": 0.0008,
|
|
"step": 1795
|
|
},
|
|
{
|
|
"epoch": 3.837953091684435,
|
|
"grad_norm": 0.0077033452689647675,
|
|
"learning_rate": 1.162046908315565e-05,
|
|
"loss": 0.0008,
|
|
"step": 1800
|
|
},
|
|
{
|
|
"epoch": 3.8486140724946694,
|
|
"grad_norm": 0.007204404566437006,
|
|
"learning_rate": 1.1513859275053305e-05,
|
|
"loss": 0.0048,
|
|
"step": 1805
|
|
},
|
|
{
|
|
"epoch": 3.859275053304904,
|
|
"grad_norm": 0.009604867547750473,
|
|
"learning_rate": 1.140724946695096e-05,
|
|
"loss": 0.0009,
|
|
"step": 1810
|
|
},
|
|
{
|
|
"epoch": 3.8699360341151388,
|
|
"grad_norm": 0.017421873286366463,
|
|
"learning_rate": 1.1300639658848615e-05,
|
|
"loss": 0.0012,
|
|
"step": 1815
|
|
},
|
|
{
|
|
"epoch": 3.8805970149253732,
|
|
"grad_norm": 0.09896726161241531,
|
|
"learning_rate": 1.119402985074627e-05,
|
|
"loss": 0.0016,
|
|
"step": 1820
|
|
},
|
|
{
|
|
"epoch": 3.8912579957356077,
|
|
"grad_norm": 0.007400326896458864,
|
|
"learning_rate": 1.1087420042643924e-05,
|
|
"loss": 0.0009,
|
|
"step": 1825
|
|
},
|
|
{
|
|
"epoch": 3.901918976545842,
|
|
"grad_norm": 0.007371744140982628,
|
|
"learning_rate": 1.0980810234541579e-05,
|
|
"loss": 0.0008,
|
|
"step": 1830
|
|
},
|
|
{
|
|
"epoch": 3.9125799573560767,
|
|
"grad_norm": 0.009902180172502995,
|
|
"learning_rate": 1.0874200426439234e-05,
|
|
"loss": 0.0008,
|
|
"step": 1835
|
|
},
|
|
{
|
|
"epoch": 3.923240938166311,
|
|
"grad_norm": 0.006909618154168129,
|
|
"learning_rate": 1.0767590618336887e-05,
|
|
"loss": 0.0012,
|
|
"step": 1840
|
|
},
|
|
{
|
|
"epoch": 3.933901918976546,
|
|
"grad_norm": 0.007653252687305212,
|
|
"learning_rate": 1.0660980810234541e-05,
|
|
"loss": 0.0008,
|
|
"step": 1845
|
|
},
|
|
{
|
|
"epoch": 3.9445628997867805,
|
|
"grad_norm": 0.006854928098618984,
|
|
"learning_rate": 1.0554371002132196e-05,
|
|
"loss": 0.0008,
|
|
"step": 1850
|
|
},
|
|
{
|
|
"epoch": 3.955223880597015,
|
|
"grad_norm": 0.007255952339619398,
|
|
"learning_rate": 1.0447761194029851e-05,
|
|
"loss": 0.0008,
|
|
"step": 1855
|
|
},
|
|
{
|
|
"epoch": 3.9658848614072495,
|
|
"grad_norm": 0.007009653374552727,
|
|
"learning_rate": 1.0341151385927506e-05,
|
|
"loss": 0.0008,
|
|
"step": 1860
|
|
},
|
|
{
|
|
"epoch": 3.976545842217484,
|
|
"grad_norm": 0.006912988144904375,
|
|
"learning_rate": 1.023454157782516e-05,
|
|
"loss": 0.0008,
|
|
"step": 1865
|
|
},
|
|
{
|
|
"epoch": 3.9872068230277184,
|
|
"grad_norm": 0.006904042325913906,
|
|
"learning_rate": 1.0127931769722815e-05,
|
|
"loss": 0.0014,
|
|
"step": 1870
|
|
},
|
|
{
|
|
"epoch": 3.997867803837953,
|
|
"grad_norm": 0.006982207763940096,
|
|
"learning_rate": 1.002132196162047e-05,
|
|
"loss": 0.0008,
|
|
"step": 1875
|
|
},
|
|
{
|
|
"epoch": 4.0,
|
|
"eval_accuracy": 0.9936,
|
|
"eval_loss": 0.024724164977669716,
|
|
"eval_runtime": 17.1715,
|
|
"eval_samples_per_second": 218.385,
|
|
"eval_steps_per_second": 6.872,
|
|
"step": 1876
|
|
},
|
|
{
|
|
"epoch": 4.008528784648187,
|
|
"grad_norm": 0.006963193882256746,
|
|
"learning_rate": 9.914712153518125e-06,
|
|
"loss": 0.0008,
|
|
"step": 1880
|
|
},
|
|
{
|
|
"epoch": 4.019189765458422,
|
|
"grad_norm": 0.0072043538093566895,
|
|
"learning_rate": 9.80810234541578e-06,
|
|
"loss": 0.001,
|
|
"step": 1885
|
|
},
|
|
{
|
|
"epoch": 4.029850746268656,
|
|
"grad_norm": 0.007021595723927021,
|
|
"learning_rate": 9.701492537313434e-06,
|
|
"loss": 0.0008,
|
|
"step": 1890
|
|
},
|
|
{
|
|
"epoch": 4.040511727078891,
|
|
"grad_norm": 0.0073632909916341305,
|
|
"learning_rate": 9.594882729211089e-06,
|
|
"loss": 0.0008,
|
|
"step": 1895
|
|
},
|
|
{
|
|
"epoch": 4.051172707889126,
|
|
"grad_norm": 0.006964248139411211,
|
|
"learning_rate": 9.488272921108744e-06,
|
|
"loss": 0.0011,
|
|
"step": 1900
|
|
},
|
|
{
|
|
"epoch": 4.061833688699361,
|
|
"grad_norm": 0.006830501835793257,
|
|
"learning_rate": 9.381663113006398e-06,
|
|
"loss": 0.0011,
|
|
"step": 1905
|
|
},
|
|
{
|
|
"epoch": 4.072494669509595,
|
|
"grad_norm": 0.0067960359156131744,
|
|
"learning_rate": 9.275053304904053e-06,
|
|
"loss": 0.0008,
|
|
"step": 1910
|
|
},
|
|
{
|
|
"epoch": 4.08315565031983,
|
|
"grad_norm": 0.007004799321293831,
|
|
"learning_rate": 9.168443496801706e-06,
|
|
"loss": 0.0008,
|
|
"step": 1915
|
|
},
|
|
{
|
|
"epoch": 4.093816631130064,
|
|
"grad_norm": 0.006617402657866478,
|
|
"learning_rate": 9.06183368869936e-06,
|
|
"loss": 0.0011,
|
|
"step": 1920
|
|
},
|
|
{
|
|
"epoch": 4.104477611940299,
|
|
"grad_norm": 0.006631907541304827,
|
|
"learning_rate": 8.955223880597016e-06,
|
|
"loss": 0.0008,
|
|
"step": 1925
|
|
},
|
|
{
|
|
"epoch": 4.115138592750533,
|
|
"grad_norm": 0.00678821187466383,
|
|
"learning_rate": 8.84861407249467e-06,
|
|
"loss": 0.0009,
|
|
"step": 1930
|
|
},
|
|
{
|
|
"epoch": 4.1257995735607675,
|
|
"grad_norm": 0.007164095528423786,
|
|
"learning_rate": 8.742004264392323e-06,
|
|
"loss": 0.0008,
|
|
"step": 1935
|
|
},
|
|
{
|
|
"epoch": 4.136460554371002,
|
|
"grad_norm": 0.006931856274604797,
|
|
"learning_rate": 8.635394456289978e-06,
|
|
"loss": 0.0008,
|
|
"step": 1940
|
|
},
|
|
{
|
|
"epoch": 4.1471215351812365,
|
|
"grad_norm": 0.006960602011531591,
|
|
"learning_rate": 8.528784648187633e-06,
|
|
"loss": 0.0008,
|
|
"step": 1945
|
|
},
|
|
{
|
|
"epoch": 4.157782515991471,
|
|
"grad_norm": 0.006681125611066818,
|
|
"learning_rate": 8.422174840085288e-06,
|
|
"loss": 0.0008,
|
|
"step": 1950
|
|
},
|
|
{
|
|
"epoch": 4.1684434968017055,
|
|
"grad_norm": 0.007712583988904953,
|
|
"learning_rate": 8.315565031982942e-06,
|
|
"loss": 0.0035,
|
|
"step": 1955
|
|
},
|
|
{
|
|
"epoch": 4.17910447761194,
|
|
"grad_norm": 0.007660465780645609,
|
|
"learning_rate": 8.208955223880597e-06,
|
|
"loss": 0.0008,
|
|
"step": 1960
|
|
},
|
|
{
|
|
"epoch": 4.189765458422174,
|
|
"grad_norm": 0.006708270870149136,
|
|
"learning_rate": 8.102345415778252e-06,
|
|
"loss": 0.0008,
|
|
"step": 1965
|
|
},
|
|
{
|
|
"epoch": 4.20042643923241,
|
|
"grad_norm": 0.006539624184370041,
|
|
"learning_rate": 7.995735607675907e-06,
|
|
"loss": 0.0008,
|
|
"step": 1970
|
|
},
|
|
{
|
|
"epoch": 4.211087420042644,
|
|
"grad_norm": 0.007015930488705635,
|
|
"learning_rate": 7.889125799573561e-06,
|
|
"loss": 0.0008,
|
|
"step": 1975
|
|
},
|
|
{
|
|
"epoch": 4.221748400852879,
|
|
"grad_norm": 0.006601154338568449,
|
|
"learning_rate": 7.782515991471216e-06,
|
|
"loss": 0.0008,
|
|
"step": 1980
|
|
},
|
|
{
|
|
"epoch": 4.232409381663113,
|
|
"grad_norm": 0.2629002034664154,
|
|
"learning_rate": 7.67590618336887e-06,
|
|
"loss": 0.0081,
|
|
"step": 1985
|
|
},
|
|
{
|
|
"epoch": 4.243070362473348,
|
|
"grad_norm": 0.0067113423720002174,
|
|
"learning_rate": 7.5692963752665255e-06,
|
|
"loss": 0.0008,
|
|
"step": 1990
|
|
},
|
|
{
|
|
"epoch": 4.253731343283582,
|
|
"grad_norm": 0.0066283149644732475,
|
|
"learning_rate": 7.4626865671641785e-06,
|
|
"loss": 0.0008,
|
|
"step": 1995
|
|
},
|
|
{
|
|
"epoch": 4.264392324093817,
|
|
"grad_norm": 0.008227039128541946,
|
|
"learning_rate": 7.356076759061833e-06,
|
|
"loss": 0.0008,
|
|
"step": 2000
|
|
},
|
|
{
|
|
"epoch": 4.275053304904051,
|
|
"grad_norm": 0.006502778269350529,
|
|
"learning_rate": 7.249466950959488e-06,
|
|
"loss": 0.0015,
|
|
"step": 2005
|
|
},
|
|
{
|
|
"epoch": 4.285714285714286,
|
|
"grad_norm": 0.006729608867317438,
|
|
"learning_rate": 7.142857142857143e-06,
|
|
"loss": 0.0008,
|
|
"step": 2010
|
|
},
|
|
{
|
|
"epoch": 4.29637526652452,
|
|
"grad_norm": 0.007483046036213636,
|
|
"learning_rate": 7.0362473347547975e-06,
|
|
"loss": 0.0008,
|
|
"step": 2015
|
|
},
|
|
{
|
|
"epoch": 4.3070362473347545,
|
|
"grad_norm": 0.006529318168759346,
|
|
"learning_rate": 6.929637526652452e-06,
|
|
"loss": 0.0008,
|
|
"step": 2020
|
|
},
|
|
{
|
|
"epoch": 4.317697228144989,
|
|
"grad_norm": 0.12051839381456375,
|
|
"learning_rate": 6.823027718550107e-06,
|
|
"loss": 0.0037,
|
|
"step": 2025
|
|
},
|
|
{
|
|
"epoch": 4.3283582089552235,
|
|
"grad_norm": 0.006774805020540953,
|
|
"learning_rate": 6.716417910447762e-06,
|
|
"loss": 0.0008,
|
|
"step": 2030
|
|
},
|
|
{
|
|
"epoch": 4.339019189765459,
|
|
"grad_norm": 0.007289433386176825,
|
|
"learning_rate": 6.609808102345416e-06,
|
|
"loss": 0.0008,
|
|
"step": 2035
|
|
},
|
|
{
|
|
"epoch": 4.349680170575693,
|
|
"grad_norm": 0.0072594922967255116,
|
|
"learning_rate": 6.503198294243071e-06,
|
|
"loss": 0.0008,
|
|
"step": 2040
|
|
},
|
|
{
|
|
"epoch": 4.360341151385928,
|
|
"grad_norm": 0.007265009917318821,
|
|
"learning_rate": 6.396588486140726e-06,
|
|
"loss": 0.0008,
|
|
"step": 2045
|
|
},
|
|
{
|
|
"epoch": 4.371002132196162,
|
|
"grad_norm": 0.00652899919077754,
|
|
"learning_rate": 6.28997867803838e-06,
|
|
"loss": 0.0008,
|
|
"step": 2050
|
|
},
|
|
{
|
|
"epoch": 4.381663113006397,
|
|
"grad_norm": 0.0065870932303369045,
|
|
"learning_rate": 6.1833688699360345e-06,
|
|
"loss": 0.0008,
|
|
"step": 2055
|
|
},
|
|
{
|
|
"epoch": 4.392324093816631,
|
|
"grad_norm": 0.007370310835540295,
|
|
"learning_rate": 6.076759061833688e-06,
|
|
"loss": 0.0008,
|
|
"step": 2060
|
|
},
|
|
{
|
|
"epoch": 4.402985074626866,
|
|
"grad_norm": 0.006778411567211151,
|
|
"learning_rate": 5.970149253731343e-06,
|
|
"loss": 0.0008,
|
|
"step": 2065
|
|
},
|
|
{
|
|
"epoch": 4.4136460554371,
|
|
"grad_norm": 0.006409814581274986,
|
|
"learning_rate": 5.863539445628998e-06,
|
|
"loss": 0.0007,
|
|
"step": 2070
|
|
},
|
|
{
|
|
"epoch": 4.424307036247335,
|
|
"grad_norm": 0.025670163333415985,
|
|
"learning_rate": 5.756929637526653e-06,
|
|
"loss": 0.001,
|
|
"step": 2075
|
|
},
|
|
{
|
|
"epoch": 4.434968017057569,
|
|
"grad_norm": 0.006416319403797388,
|
|
"learning_rate": 5.650319829424307e-06,
|
|
"loss": 0.0007,
|
|
"step": 2080
|
|
},
|
|
{
|
|
"epoch": 4.445628997867804,
|
|
"grad_norm": 0.006596830673515797,
|
|
"learning_rate": 5.543710021321962e-06,
|
|
"loss": 0.0008,
|
|
"step": 2085
|
|
},
|
|
{
|
|
"epoch": 4.456289978678038,
|
|
"grad_norm": 0.007961906492710114,
|
|
"learning_rate": 5.437100213219617e-06,
|
|
"loss": 0.0008,
|
|
"step": 2090
|
|
},
|
|
{
|
|
"epoch": 4.466950959488273,
|
|
"grad_norm": 0.006419972516596317,
|
|
"learning_rate": 5.330490405117271e-06,
|
|
"loss": 0.0008,
|
|
"step": 2095
|
|
},
|
|
{
|
|
"epoch": 4.477611940298507,
|
|
"grad_norm": 0.006359638646245003,
|
|
"learning_rate": 5.2238805970149255e-06,
|
|
"loss": 0.0007,
|
|
"step": 2100
|
|
},
|
|
{
|
|
"epoch": 4.4882729211087415,
|
|
"grad_norm": 0.0067171575501561165,
|
|
"learning_rate": 5.11727078891258e-06,
|
|
"loss": 0.0007,
|
|
"step": 2105
|
|
},
|
|
{
|
|
"epoch": 4.498933901918977,
|
|
"grad_norm": 0.006516472902148962,
|
|
"learning_rate": 5.010660980810235e-06,
|
|
"loss": 0.0009,
|
|
"step": 2110
|
|
},
|
|
{
|
|
"epoch": 4.509594882729211,
|
|
"grad_norm": 0.006364680361002684,
|
|
"learning_rate": 4.90405117270789e-06,
|
|
"loss": 0.0007,
|
|
"step": 2115
|
|
},
|
|
{
|
|
"epoch": 4.520255863539446,
|
|
"grad_norm": 0.006779138930141926,
|
|
"learning_rate": 4.797441364605544e-06,
|
|
"loss": 0.0008,
|
|
"step": 2120
|
|
},
|
|
{
|
|
"epoch": 4.53091684434968,
|
|
"grad_norm": 0.007456774823367596,
|
|
"learning_rate": 4.690831556503199e-06,
|
|
"loss": 0.0007,
|
|
"step": 2125
|
|
},
|
|
{
|
|
"epoch": 4.541577825159915,
|
|
"grad_norm": 0.006384179461747408,
|
|
"learning_rate": 4.584221748400853e-06,
|
|
"loss": 0.0007,
|
|
"step": 2130
|
|
},
|
|
{
|
|
"epoch": 4.552238805970149,
|
|
"grad_norm": 0.006552668754011393,
|
|
"learning_rate": 4.477611940298508e-06,
|
|
"loss": 0.0007,
|
|
"step": 2135
|
|
},
|
|
{
|
|
"epoch": 4.562899786780384,
|
|
"grad_norm": 0.006376944482326508,
|
|
"learning_rate": 4.371002132196162e-06,
|
|
"loss": 0.0007,
|
|
"step": 2140
|
|
},
|
|
{
|
|
"epoch": 4.573560767590618,
|
|
"grad_norm": 0.006424845661967993,
|
|
"learning_rate": 4.264392324093816e-06,
|
|
"loss": 0.0008,
|
|
"step": 2145
|
|
},
|
|
{
|
|
"epoch": 4.584221748400853,
|
|
"grad_norm": 0.007834420539438725,
|
|
"learning_rate": 4.157782515991471e-06,
|
|
"loss": 0.0007,
|
|
"step": 2150
|
|
},
|
|
{
|
|
"epoch": 4.594882729211087,
|
|
"grad_norm": 0.00667730113491416,
|
|
"learning_rate": 4.051172707889126e-06,
|
|
"loss": 0.0007,
|
|
"step": 2155
|
|
},
|
|
{
|
|
"epoch": 4.605543710021322,
|
|
"grad_norm": 0.006556932348757982,
|
|
"learning_rate": 3.944562899786781e-06,
|
|
"loss": 0.0007,
|
|
"step": 2160
|
|
},
|
|
{
|
|
"epoch": 4.616204690831556,
|
|
"grad_norm": 0.006338969338685274,
|
|
"learning_rate": 3.837953091684435e-06,
|
|
"loss": 0.0007,
|
|
"step": 2165
|
|
},
|
|
{
|
|
"epoch": 4.6268656716417915,
|
|
"grad_norm": 0.006363058462738991,
|
|
"learning_rate": 3.7313432835820893e-06,
|
|
"loss": 0.0007,
|
|
"step": 2170
|
|
},
|
|
{
|
|
"epoch": 4.637526652452026,
|
|
"grad_norm": 0.006266034673899412,
|
|
"learning_rate": 3.624733475479744e-06,
|
|
"loss": 0.0008,
|
|
"step": 2175
|
|
},
|
|
{
|
|
"epoch": 4.6481876332622605,
|
|
"grad_norm": 0.006374978460371494,
|
|
"learning_rate": 3.5181236673773987e-06,
|
|
"loss": 0.0007,
|
|
"step": 2180
|
|
},
|
|
{
|
|
"epoch": 4.658848614072495,
|
|
"grad_norm": 0.006381528452038765,
|
|
"learning_rate": 3.4115138592750535e-06,
|
|
"loss": 0.0007,
|
|
"step": 2185
|
|
},
|
|
{
|
|
"epoch": 4.669509594882729,
|
|
"grad_norm": 0.00640105502679944,
|
|
"learning_rate": 3.304904051172708e-06,
|
|
"loss": 0.0007,
|
|
"step": 2190
|
|
},
|
|
{
|
|
"epoch": 4.680170575692964,
|
|
"grad_norm": 0.006519278045743704,
|
|
"learning_rate": 3.198294243070363e-06,
|
|
"loss": 0.0007,
|
|
"step": 2195
|
|
},
|
|
{
|
|
"epoch": 4.690831556503198,
|
|
"grad_norm": 0.0064932783134281635,
|
|
"learning_rate": 3.0916844349680173e-06,
|
|
"loss": 0.0007,
|
|
"step": 2200
|
|
},
|
|
{
|
|
"epoch": 4.701492537313433,
|
|
"grad_norm": 0.006555797997862101,
|
|
"learning_rate": 2.9850746268656716e-06,
|
|
"loss": 0.0007,
|
|
"step": 2205
|
|
},
|
|
{
|
|
"epoch": 4.712153518123667,
|
|
"grad_norm": 0.006301179528236389,
|
|
"learning_rate": 2.8784648187633263e-06,
|
|
"loss": 0.0007,
|
|
"step": 2210
|
|
},
|
|
{
|
|
"epoch": 4.722814498933902,
|
|
"grad_norm": 0.20231878757476807,
|
|
"learning_rate": 2.771855010660981e-06,
|
|
"loss": 0.0069,
|
|
"step": 2215
|
|
},
|
|
{
|
|
"epoch": 4.733475479744136,
|
|
"grad_norm": 0.00633511645719409,
|
|
"learning_rate": 2.6652452025586354e-06,
|
|
"loss": 0.0007,
|
|
"step": 2220
|
|
},
|
|
{
|
|
"epoch": 4.744136460554371,
|
|
"grad_norm": 0.006273421458899975,
|
|
"learning_rate": 2.55863539445629e-06,
|
|
"loss": 0.0007,
|
|
"step": 2225
|
|
},
|
|
{
|
|
"epoch": 4.754797441364605,
|
|
"grad_norm": 0.006471653934568167,
|
|
"learning_rate": 2.452025586353945e-06,
|
|
"loss": 0.0007,
|
|
"step": 2230
|
|
},
|
|
{
|
|
"epoch": 4.76545842217484,
|
|
"grad_norm": 0.0072460053488612175,
|
|
"learning_rate": 2.3454157782515996e-06,
|
|
"loss": 0.0009,
|
|
"step": 2235
|
|
},
|
|
{
|
|
"epoch": 4.776119402985074,
|
|
"grad_norm": 0.006297562271356583,
|
|
"learning_rate": 2.238805970149254e-06,
|
|
"loss": 0.0007,
|
|
"step": 2240
|
|
},
|
|
{
|
|
"epoch": 4.786780383795309,
|
|
"grad_norm": 0.006563864648342133,
|
|
"learning_rate": 2.132196162046908e-06,
|
|
"loss": 0.0007,
|
|
"step": 2245
|
|
},
|
|
{
|
|
"epoch": 4.797441364605544,
|
|
"grad_norm": 0.006317877676337957,
|
|
"learning_rate": 2.025586353944563e-06,
|
|
"loss": 0.0007,
|
|
"step": 2250
|
|
},
|
|
{
|
|
"epoch": 4.8081023454157785,
|
|
"grad_norm": 0.006284181959927082,
|
|
"learning_rate": 1.9189765458422177e-06,
|
|
"loss": 0.0008,
|
|
"step": 2255
|
|
},
|
|
{
|
|
"epoch": 4.818763326226013,
|
|
"grad_norm": 0.0062506478279829025,
|
|
"learning_rate": 1.812366737739872e-06,
|
|
"loss": 0.0007,
|
|
"step": 2260
|
|
},
|
|
{
|
|
"epoch": 4.8294243070362475,
|
|
"grad_norm": 0.006383291445672512,
|
|
"learning_rate": 1.7057569296375267e-06,
|
|
"loss": 0.0007,
|
|
"step": 2265
|
|
},
|
|
{
|
|
"epoch": 4.840085287846482,
|
|
"grad_norm": 0.006247894372791052,
|
|
"learning_rate": 1.5991471215351815e-06,
|
|
"loss": 0.0007,
|
|
"step": 2270
|
|
},
|
|
{
|
|
"epoch": 4.850746268656716,
|
|
"grad_norm": 0.006671397481113672,
|
|
"learning_rate": 1.4925373134328358e-06,
|
|
"loss": 0.0007,
|
|
"step": 2275
|
|
},
|
|
{
|
|
"epoch": 4.861407249466951,
|
|
"grad_norm": 0.006213662214577198,
|
|
"learning_rate": 1.3859275053304905e-06,
|
|
"loss": 0.0008,
|
|
"step": 2280
|
|
},
|
|
{
|
|
"epoch": 4.872068230277185,
|
|
"grad_norm": 0.006177013274282217,
|
|
"learning_rate": 1.279317697228145e-06,
|
|
"loss": 0.0007,
|
|
"step": 2285
|
|
},
|
|
{
|
|
"epoch": 4.88272921108742,
|
|
"grad_norm": 0.006617304403334856,
|
|
"learning_rate": 1.1727078891257998e-06,
|
|
"loss": 0.0007,
|
|
"step": 2290
|
|
},
|
|
{
|
|
"epoch": 4.893390191897654,
|
|
"grad_norm": 0.006598794367164373,
|
|
"learning_rate": 1.066098081023454e-06,
|
|
"loss": 0.0034,
|
|
"step": 2295
|
|
},
|
|
{
|
|
"epoch": 4.904051172707889,
|
|
"grad_norm": 0.006416900549083948,
|
|
"learning_rate": 9.594882729211088e-07,
|
|
"loss": 0.0008,
|
|
"step": 2300
|
|
},
|
|
{
|
|
"epoch": 4.914712153518123,
|
|
"grad_norm": 0.0069372220896184444,
|
|
"learning_rate": 8.528784648187634e-07,
|
|
"loss": 0.0007,
|
|
"step": 2305
|
|
},
|
|
{
|
|
"epoch": 4.925373134328359,
|
|
"grad_norm": 0.006252443417906761,
|
|
"learning_rate": 7.462686567164179e-07,
|
|
"loss": 0.0007,
|
|
"step": 2310
|
|
},
|
|
{
|
|
"epoch": 4.936034115138593,
|
|
"grad_norm": 0.009452194906771183,
|
|
"learning_rate": 6.396588486140725e-07,
|
|
"loss": 0.0009,
|
|
"step": 2315
|
|
},
|
|
{
|
|
"epoch": 4.946695095948828,
|
|
"grad_norm": 0.006242725532501936,
|
|
"learning_rate": 5.33049040511727e-07,
|
|
"loss": 0.0007,
|
|
"step": 2320
|
|
},
|
|
{
|
|
"epoch": 4.957356076759062,
|
|
"grad_norm": 0.006272901315242052,
|
|
"learning_rate": 4.264392324093817e-07,
|
|
"loss": 0.0007,
|
|
"step": 2325
|
|
},
|
|
{
|
|
"epoch": 4.968017057569297,
|
|
"grad_norm": 0.006333344615995884,
|
|
"learning_rate": 3.1982942430703626e-07,
|
|
"loss": 0.0007,
|
|
"step": 2330
|
|
},
|
|
{
|
|
"epoch": 4.978678038379531,
|
|
"grad_norm": 0.02618986926972866,
|
|
"learning_rate": 2.1321961620469084e-07,
|
|
"loss": 0.0009,
|
|
"step": 2335
|
|
},
|
|
{
|
|
"epoch": 4.9893390191897655,
|
|
"grad_norm": 0.006180465221405029,
|
|
"learning_rate": 1.0660980810234542e-07,
|
|
"loss": 0.0007,
|
|
"step": 2340
|
|
},
|
|
{
|
|
"epoch": 5.0,
|
|
"grad_norm": 0.009060376323759556,
|
|
"learning_rate": 0.0,
|
|
"loss": 0.0007,
|
|
"step": 2345
|
|
},
|
|
{
|
|
"epoch": 5.0,
|
|
"eval_accuracy": 0.9936,
|
|
"eval_loss": 0.02515118010342121,
|
|
"eval_runtime": 16.7067,
|
|
"eval_samples_per_second": 224.461,
|
|
"eval_steps_per_second": 7.063,
|
|
"step": 2345
|
|
},
|
|
{
|
|
"epoch": 5.0,
|
|
"step": 2345,
|
|
"total_flos": 5.8118992210944e+18,
|
|
"train_loss": 0.015574438872236782,
|
|
"train_runtime": 768.242,
|
|
"train_samples_per_second": 97.625,
|
|
"train_steps_per_second": 3.052
|
|
}
|
|
],
|
|
"logging_steps": 5,
|
|
"max_steps": 2345,
|
|
"num_input_tokens_seen": 0,
|
|
"num_train_epochs": 5,
|
|
"save_steps": 500,
|
|
"stateful_callbacks": {
|
|
"TrainerControl": {
|
|
"args": {
|
|
"should_epoch_stop": false,
|
|
"should_evaluate": false,
|
|
"should_log": false,
|
|
"should_save": true,
|
|
"should_training_stop": true
|
|
},
|
|
"attributes": {}
|
|
}
|
|
},
|
|
"total_flos": 5.8118992210944e+18,
|
|
"train_batch_size": 32,
|
|
"trial_name": null,
|
|
"trial_params": null
|
|
}
|
|
|