|
{
|
|
"best_metric": 0.07764233648777008,
|
|
"best_model_checkpoint": "vit_epochs5_batch32_lr5e-05_size224_tiles3_seed1_q2_complexity\\checkpoint-469",
|
|
"epoch": 5.0,
|
|
"eval_steps": 500,
|
|
"global_step": 2345,
|
|
"is_hyper_param_search": false,
|
|
"is_local_process_zero": true,
|
|
"is_world_process_zero": true,
|
|
"log_history": [
|
|
{
|
|
"epoch": 0.010660980810234541,
|
|
"grad_norm": 0.10418372601270676,
|
|
"learning_rate": 4.989339019189766e-05,
|
|
"loss": 0.0007,
|
|
"step": 5
|
|
},
|
|
{
|
|
"epoch": 0.021321961620469083,
|
|
"grad_norm": 0.0009404171141795814,
|
|
"learning_rate": 4.978678038379531e-05,
|
|
"loss": 0.0306,
|
|
"step": 10
|
|
},
|
|
{
|
|
"epoch": 0.031982942430703626,
|
|
"grad_norm": 0.0019161738455295563,
|
|
"learning_rate": 4.9680170575692967e-05,
|
|
"loss": 0.0002,
|
|
"step": 15
|
|
},
|
|
{
|
|
"epoch": 0.042643923240938165,
|
|
"grad_norm": 0.012494619004428387,
|
|
"learning_rate": 4.957356076759062e-05,
|
|
"loss": 0.0021,
|
|
"step": 20
|
|
},
|
|
{
|
|
"epoch": 0.053304904051172705,
|
|
"grad_norm": 0.13835696876049042,
|
|
"learning_rate": 4.9466950959488276e-05,
|
|
"loss": 0.0006,
|
|
"step": 25
|
|
},
|
|
{
|
|
"epoch": 0.06396588486140725,
|
|
"grad_norm": 0.001739045837894082,
|
|
"learning_rate": 4.936034115138593e-05,
|
|
"loss": 0.0425,
|
|
"step": 30
|
|
},
|
|
{
|
|
"epoch": 0.07462686567164178,
|
|
"grad_norm": 0.0008469136082567275,
|
|
"learning_rate": 4.9253731343283586e-05,
|
|
"loss": 0.0002,
|
|
"step": 35
|
|
},
|
|
{
|
|
"epoch": 0.08528784648187633,
|
|
"grad_norm": 0.07120460271835327,
|
|
"learning_rate": 4.914712153518124e-05,
|
|
"loss": 0.0005,
|
|
"step": 40
|
|
},
|
|
{
|
|
"epoch": 0.09594882729211088,
|
|
"grad_norm": 6.837788105010986,
|
|
"learning_rate": 4.904051172707889e-05,
|
|
"loss": 0.0047,
|
|
"step": 45
|
|
},
|
|
{
|
|
"epoch": 0.10660980810234541,
|
|
"grad_norm": 0.005607880651950836,
|
|
"learning_rate": 4.893390191897655e-05,
|
|
"loss": 0.0002,
|
|
"step": 50
|
|
},
|
|
{
|
|
"epoch": 0.11727078891257996,
|
|
"grad_norm": 0.0008682875195518136,
|
|
"learning_rate": 4.88272921108742e-05,
|
|
"loss": 0.0011,
|
|
"step": 55
|
|
},
|
|
{
|
|
"epoch": 0.1279317697228145,
|
|
"grad_norm": 0.059712864458560944,
|
|
"learning_rate": 4.872068230277186e-05,
|
|
"loss": 0.0001,
|
|
"step": 60
|
|
},
|
|
{
|
|
"epoch": 0.13859275053304904,
|
|
"grad_norm": 0.05684630200266838,
|
|
"learning_rate": 4.861407249466951e-05,
|
|
"loss": 0.0002,
|
|
"step": 65
|
|
},
|
|
{
|
|
"epoch": 0.14925373134328357,
|
|
"grad_norm": 0.023246610537171364,
|
|
"learning_rate": 4.850746268656717e-05,
|
|
"loss": 0.0171,
|
|
"step": 70
|
|
},
|
|
{
|
|
"epoch": 0.15991471215351813,
|
|
"grad_norm": 0.00399391446262598,
|
|
"learning_rate": 4.840085287846482e-05,
|
|
"loss": 0.0733,
|
|
"step": 75
|
|
},
|
|
{
|
|
"epoch": 0.17057569296375266,
|
|
"grad_norm": 0.0014481056714430451,
|
|
"learning_rate": 4.829424307036248e-05,
|
|
"loss": 0.0004,
|
|
"step": 80
|
|
},
|
|
{
|
|
"epoch": 0.1812366737739872,
|
|
"grad_norm": 0.275854229927063,
|
|
"learning_rate": 4.8187633262260126e-05,
|
|
"loss": 0.0004,
|
|
"step": 85
|
|
},
|
|
{
|
|
"epoch": 0.19189765458422176,
|
|
"grad_norm": 12.864147186279297,
|
|
"learning_rate": 4.808102345415779e-05,
|
|
"loss": 0.0303,
|
|
"step": 90
|
|
},
|
|
{
|
|
"epoch": 0.2025586353944563,
|
|
"grad_norm": 0.032325007021427155,
|
|
"learning_rate": 4.7974413646055436e-05,
|
|
"loss": 0.0002,
|
|
"step": 95
|
|
},
|
|
{
|
|
"epoch": 0.21321961620469082,
|
|
"grad_norm": 4.2547993659973145,
|
|
"learning_rate": 4.78678038379531e-05,
|
|
"loss": 0.0336,
|
|
"step": 100
|
|
},
|
|
{
|
|
"epoch": 0.22388059701492538,
|
|
"grad_norm": 0.48527249693870544,
|
|
"learning_rate": 4.7761194029850745e-05,
|
|
"loss": 0.0056,
|
|
"step": 105
|
|
},
|
|
{
|
|
"epoch": 0.2345415778251599,
|
|
"grad_norm": 7.385329723358154,
|
|
"learning_rate": 4.765458422174841e-05,
|
|
"loss": 0.0062,
|
|
"step": 110
|
|
},
|
|
{
|
|
"epoch": 0.24520255863539445,
|
|
"grad_norm": 0.05659874528646469,
|
|
"learning_rate": 4.7547974413646055e-05,
|
|
"loss": 0.0003,
|
|
"step": 115
|
|
},
|
|
{
|
|
"epoch": 0.255863539445629,
|
|
"grad_norm": 0.00441378029063344,
|
|
"learning_rate": 4.7441364605543716e-05,
|
|
"loss": 0.0283,
|
|
"step": 120
|
|
},
|
|
{
|
|
"epoch": 0.26652452025586354,
|
|
"grad_norm": 1.9568856954574585,
|
|
"learning_rate": 4.7334754797441364e-05,
|
|
"loss": 0.0114,
|
|
"step": 125
|
|
},
|
|
{
|
|
"epoch": 0.2771855010660981,
|
|
"grad_norm": 0.1403774619102478,
|
|
"learning_rate": 4.7228144989339026e-05,
|
|
"loss": 0.0106,
|
|
"step": 130
|
|
},
|
|
{
|
|
"epoch": 0.2878464818763326,
|
|
"grad_norm": 0.009321413934230804,
|
|
"learning_rate": 4.7121535181236674e-05,
|
|
"loss": 0.0023,
|
|
"step": 135
|
|
},
|
|
{
|
|
"epoch": 0.29850746268656714,
|
|
"grad_norm": 0.12014082074165344,
|
|
"learning_rate": 4.7014925373134335e-05,
|
|
"loss": 0.0003,
|
|
"step": 140
|
|
},
|
|
{
|
|
"epoch": 0.3091684434968017,
|
|
"grad_norm": 0.0011854141484946012,
|
|
"learning_rate": 4.690831556503198e-05,
|
|
"loss": 0.0147,
|
|
"step": 145
|
|
},
|
|
{
|
|
"epoch": 0.31982942430703626,
|
|
"grad_norm": 0.0013706301106140018,
|
|
"learning_rate": 4.6801705756929645e-05,
|
|
"loss": 0.0025,
|
|
"step": 150
|
|
},
|
|
{
|
|
"epoch": 0.3304904051172708,
|
|
"grad_norm": 0.01695791445672512,
|
|
"learning_rate": 4.669509594882729e-05,
|
|
"loss": 0.0002,
|
|
"step": 155
|
|
},
|
|
{
|
|
"epoch": 0.3411513859275053,
|
|
"grad_norm": 0.015150121413171291,
|
|
"learning_rate": 4.658848614072495e-05,
|
|
"loss": 0.0001,
|
|
"step": 160
|
|
},
|
|
{
|
|
"epoch": 0.35181236673773986,
|
|
"grad_norm": 0.0029198750853538513,
|
|
"learning_rate": 4.64818763326226e-05,
|
|
"loss": 0.0022,
|
|
"step": 165
|
|
},
|
|
{
|
|
"epoch": 0.3624733475479744,
|
|
"grad_norm": 0.06715065240859985,
|
|
"learning_rate": 4.637526652452026e-05,
|
|
"loss": 0.0096,
|
|
"step": 170
|
|
},
|
|
{
|
|
"epoch": 0.373134328358209,
|
|
"grad_norm": 0.009593715891242027,
|
|
"learning_rate": 4.626865671641791e-05,
|
|
"loss": 0.0289,
|
|
"step": 175
|
|
},
|
|
{
|
|
"epoch": 0.3837953091684435,
|
|
"grad_norm": 0.0018478701822459698,
|
|
"learning_rate": 4.6162046908315566e-05,
|
|
"loss": 0.01,
|
|
"step": 180
|
|
},
|
|
{
|
|
"epoch": 0.39445628997867804,
|
|
"grad_norm": 0.06168442964553833,
|
|
"learning_rate": 4.605543710021322e-05,
|
|
"loss": 0.0001,
|
|
"step": 185
|
|
},
|
|
{
|
|
"epoch": 0.4051172707889126,
|
|
"grad_norm": 9.343092918395996,
|
|
"learning_rate": 4.5948827292110876e-05,
|
|
"loss": 0.036,
|
|
"step": 190
|
|
},
|
|
{
|
|
"epoch": 0.4157782515991471,
|
|
"grad_norm": 0.4198198616504669,
|
|
"learning_rate": 4.584221748400853e-05,
|
|
"loss": 0.0007,
|
|
"step": 195
|
|
},
|
|
{
|
|
"epoch": 0.42643923240938164,
|
|
"grad_norm": 0.014806258492171764,
|
|
"learning_rate": 4.5735607675906185e-05,
|
|
"loss": 0.0098,
|
|
"step": 200
|
|
},
|
|
{
|
|
"epoch": 0.43710021321961623,
|
|
"grad_norm": 0.3997659683227539,
|
|
"learning_rate": 4.562899786780384e-05,
|
|
"loss": 0.0003,
|
|
"step": 205
|
|
},
|
|
{
|
|
"epoch": 0.44776119402985076,
|
|
"grad_norm": 0.00046464521437883377,
|
|
"learning_rate": 4.5522388059701495e-05,
|
|
"loss": 0.0002,
|
|
"step": 210
|
|
},
|
|
{
|
|
"epoch": 0.4584221748400853,
|
|
"grad_norm": 3.510711431503296,
|
|
"learning_rate": 4.541577825159915e-05,
|
|
"loss": 0.0009,
|
|
"step": 215
|
|
},
|
|
{
|
|
"epoch": 0.4690831556503198,
|
|
"grad_norm": 6.993237495422363,
|
|
"learning_rate": 4.5309168443496804e-05,
|
|
"loss": 0.0688,
|
|
"step": 220
|
|
},
|
|
{
|
|
"epoch": 0.47974413646055436,
|
|
"grad_norm": 0.0005058702663518488,
|
|
"learning_rate": 4.520255863539446e-05,
|
|
"loss": 0.0003,
|
|
"step": 225
|
|
},
|
|
{
|
|
"epoch": 0.4904051172707889,
|
|
"grad_norm": 0.008164706639945507,
|
|
"learning_rate": 4.5095948827292114e-05,
|
|
"loss": 0.0237,
|
|
"step": 230
|
|
},
|
|
{
|
|
"epoch": 0.5010660980810234,
|
|
"grad_norm": 0.016866471618413925,
|
|
"learning_rate": 4.498933901918977e-05,
|
|
"loss": 0.0001,
|
|
"step": 235
|
|
},
|
|
{
|
|
"epoch": 0.511727078891258,
|
|
"grad_norm": 0.0005052869091741741,
|
|
"learning_rate": 4.488272921108742e-05,
|
|
"loss": 0.0002,
|
|
"step": 240
|
|
},
|
|
{
|
|
"epoch": 0.5223880597014925,
|
|
"grad_norm": 0.0013908357359468937,
|
|
"learning_rate": 4.477611940298508e-05,
|
|
"loss": 0.0001,
|
|
"step": 245
|
|
},
|
|
{
|
|
"epoch": 0.5330490405117271,
|
|
"grad_norm": 0.0010287690674886107,
|
|
"learning_rate": 4.466950959488273e-05,
|
|
"loss": 0.0142,
|
|
"step": 250
|
|
},
|
|
{
|
|
"epoch": 0.5437100213219617,
|
|
"grad_norm": 0.06469212472438812,
|
|
"learning_rate": 4.456289978678039e-05,
|
|
"loss": 0.0594,
|
|
"step": 255
|
|
},
|
|
{
|
|
"epoch": 0.5543710021321961,
|
|
"grad_norm": 0.010235542431473732,
|
|
"learning_rate": 4.445628997867804e-05,
|
|
"loss": 0.0016,
|
|
"step": 260
|
|
},
|
|
{
|
|
"epoch": 0.5650319829424307,
|
|
"grad_norm": 0.0006951680989004672,
|
|
"learning_rate": 4.43496801705757e-05,
|
|
"loss": 0.0188,
|
|
"step": 265
|
|
},
|
|
{
|
|
"epoch": 0.5756929637526652,
|
|
"grad_norm": 0.0056811426766216755,
|
|
"learning_rate": 4.424307036247335e-05,
|
|
"loss": 0.0171,
|
|
"step": 270
|
|
},
|
|
{
|
|
"epoch": 0.5863539445628998,
|
|
"grad_norm": 0.004655084107071161,
|
|
"learning_rate": 4.4136460554371006e-05,
|
|
"loss": 0.0183,
|
|
"step": 275
|
|
},
|
|
{
|
|
"epoch": 0.5970149253731343,
|
|
"grad_norm": 0.006924769841134548,
|
|
"learning_rate": 4.402985074626866e-05,
|
|
"loss": 0.0003,
|
|
"step": 280
|
|
},
|
|
{
|
|
"epoch": 0.6076759061833689,
|
|
"grad_norm": 0.0007150747114792466,
|
|
"learning_rate": 4.3923240938166316e-05,
|
|
"loss": 0.0004,
|
|
"step": 285
|
|
},
|
|
{
|
|
"epoch": 0.6183368869936035,
|
|
"grad_norm": 0.005002723541110754,
|
|
"learning_rate": 4.381663113006397e-05,
|
|
"loss": 0.0067,
|
|
"step": 290
|
|
},
|
|
{
|
|
"epoch": 0.6289978678038379,
|
|
"grad_norm": 8.503571510314941,
|
|
"learning_rate": 4.3710021321961625e-05,
|
|
"loss": 0.029,
|
|
"step": 295
|
|
},
|
|
{
|
|
"epoch": 0.6396588486140725,
|
|
"grad_norm": 0.005672371946275234,
|
|
"learning_rate": 4.360341151385928e-05,
|
|
"loss": 0.025,
|
|
"step": 300
|
|
},
|
|
{
|
|
"epoch": 0.650319829424307,
|
|
"grad_norm": 0.0015132506377995014,
|
|
"learning_rate": 4.3496801705756935e-05,
|
|
"loss": 0.0003,
|
|
"step": 305
|
|
},
|
|
{
|
|
"epoch": 0.6609808102345416,
|
|
"grad_norm": 0.002195100300014019,
|
|
"learning_rate": 4.339019189765459e-05,
|
|
"loss": 0.0351,
|
|
"step": 310
|
|
},
|
|
{
|
|
"epoch": 0.6716417910447762,
|
|
"grad_norm": 0.023223498836159706,
|
|
"learning_rate": 4.328358208955224e-05,
|
|
"loss": 0.0001,
|
|
"step": 315
|
|
},
|
|
{
|
|
"epoch": 0.6823027718550106,
|
|
"grad_norm": 0.0007878734031692147,
|
|
"learning_rate": 4.31769722814499e-05,
|
|
"loss": 0.0014,
|
|
"step": 320
|
|
},
|
|
{
|
|
"epoch": 0.6929637526652452,
|
|
"grad_norm": 0.09411798417568207,
|
|
"learning_rate": 4.307036247334755e-05,
|
|
"loss": 0.0006,
|
|
"step": 325
|
|
},
|
|
{
|
|
"epoch": 0.7036247334754797,
|
|
"grad_norm": 0.7988659143447876,
|
|
"learning_rate": 4.29637526652452e-05,
|
|
"loss": 0.007,
|
|
"step": 330
|
|
},
|
|
{
|
|
"epoch": 0.7142857142857143,
|
|
"grad_norm": 0.013898869045078754,
|
|
"learning_rate": 4.2857142857142856e-05,
|
|
"loss": 0.0002,
|
|
"step": 335
|
|
},
|
|
{
|
|
"epoch": 0.7249466950959488,
|
|
"grad_norm": 0.0005716294981539249,
|
|
"learning_rate": 4.275053304904051e-05,
|
|
"loss": 0.0002,
|
|
"step": 340
|
|
},
|
|
{
|
|
"epoch": 0.7356076759061834,
|
|
"grad_norm": 0.0505865141749382,
|
|
"learning_rate": 4.2643923240938166e-05,
|
|
"loss": 0.0003,
|
|
"step": 345
|
|
},
|
|
{
|
|
"epoch": 0.746268656716418,
|
|
"grad_norm": 0.000683732854668051,
|
|
"learning_rate": 4.253731343283582e-05,
|
|
"loss": 0.0067,
|
|
"step": 350
|
|
},
|
|
{
|
|
"epoch": 0.7569296375266524,
|
|
"grad_norm": 0.0023199687711894512,
|
|
"learning_rate": 4.2430703624733475e-05,
|
|
"loss": 0.0005,
|
|
"step": 355
|
|
},
|
|
{
|
|
"epoch": 0.767590618336887,
|
|
"grad_norm": 5.34326696395874,
|
|
"learning_rate": 4.232409381663113e-05,
|
|
"loss": 0.003,
|
|
"step": 360
|
|
},
|
|
{
|
|
"epoch": 0.7782515991471215,
|
|
"grad_norm": 0.0007228885078802705,
|
|
"learning_rate": 4.2217484008528785e-05,
|
|
"loss": 0.0001,
|
|
"step": 365
|
|
},
|
|
{
|
|
"epoch": 0.7889125799573561,
|
|
"grad_norm": 0.0005716294981539249,
|
|
"learning_rate": 4.211087420042644e-05,
|
|
"loss": 0.0093,
|
|
"step": 370
|
|
},
|
|
{
|
|
"epoch": 0.7995735607675906,
|
|
"grad_norm": 0.002282757544890046,
|
|
"learning_rate": 4.2004264392324094e-05,
|
|
"loss": 0.0015,
|
|
"step": 375
|
|
},
|
|
{
|
|
"epoch": 0.8102345415778252,
|
|
"grad_norm": 0.01682482287287712,
|
|
"learning_rate": 4.189765458422175e-05,
|
|
"loss": 0.0003,
|
|
"step": 380
|
|
},
|
|
{
|
|
"epoch": 0.8208955223880597,
|
|
"grad_norm": 0.0006660601356998086,
|
|
"learning_rate": 4.1791044776119404e-05,
|
|
"loss": 0.0011,
|
|
"step": 385
|
|
},
|
|
{
|
|
"epoch": 0.8315565031982942,
|
|
"grad_norm": 0.0011629801010712981,
|
|
"learning_rate": 4.168443496801706e-05,
|
|
"loss": 0.0001,
|
|
"step": 390
|
|
},
|
|
{
|
|
"epoch": 0.8422174840085288,
|
|
"grad_norm": 0.0007847646484151483,
|
|
"learning_rate": 4.157782515991471e-05,
|
|
"loss": 0.0001,
|
|
"step": 395
|
|
},
|
|
{
|
|
"epoch": 0.8528784648187633,
|
|
"grad_norm": 0.06353192031383514,
|
|
"learning_rate": 4.147121535181237e-05,
|
|
"loss": 0.0137,
|
|
"step": 400
|
|
},
|
|
{
|
|
"epoch": 0.8635394456289979,
|
|
"grad_norm": 0.6982009410858154,
|
|
"learning_rate": 4.136460554371002e-05,
|
|
"loss": 0.0305,
|
|
"step": 405
|
|
},
|
|
{
|
|
"epoch": 0.8742004264392325,
|
|
"grad_norm": 0.009664745070040226,
|
|
"learning_rate": 4.125799573560768e-05,
|
|
"loss": 0.0005,
|
|
"step": 410
|
|
},
|
|
{
|
|
"epoch": 0.8848614072494669,
|
|
"grad_norm": 0.11180636286735535,
|
|
"learning_rate": 4.115138592750533e-05,
|
|
"loss": 0.0007,
|
|
"step": 415
|
|
},
|
|
{
|
|
"epoch": 0.8955223880597015,
|
|
"grad_norm": 0.07350176572799683,
|
|
"learning_rate": 4.104477611940299e-05,
|
|
"loss": 0.0003,
|
|
"step": 420
|
|
},
|
|
{
|
|
"epoch": 0.906183368869936,
|
|
"grad_norm": 0.13126812875270844,
|
|
"learning_rate": 4.093816631130064e-05,
|
|
"loss": 0.0007,
|
|
"step": 425
|
|
},
|
|
{
|
|
"epoch": 0.9168443496801706,
|
|
"grad_norm": 0.005411725491285324,
|
|
"learning_rate": 4.0831556503198296e-05,
|
|
"loss": 0.0122,
|
|
"step": 430
|
|
},
|
|
{
|
|
"epoch": 0.9275053304904051,
|
|
"grad_norm": 0.0015354432398453355,
|
|
"learning_rate": 4.072494669509595e-05,
|
|
"loss": 0.0005,
|
|
"step": 435
|
|
},
|
|
{
|
|
"epoch": 0.9381663113006397,
|
|
"grad_norm": 0.0005887004663236439,
|
|
"learning_rate": 4.0618336886993606e-05,
|
|
"loss": 0.0001,
|
|
"step": 440
|
|
},
|
|
{
|
|
"epoch": 0.9488272921108742,
|
|
"grad_norm": 0.020183030515909195,
|
|
"learning_rate": 4.051172707889126e-05,
|
|
"loss": 0.0002,
|
|
"step": 445
|
|
},
|
|
{
|
|
"epoch": 0.9594882729211087,
|
|
"grad_norm": 9.81164836883545,
|
|
"learning_rate": 4.0405117270788915e-05,
|
|
"loss": 0.0099,
|
|
"step": 450
|
|
},
|
|
{
|
|
"epoch": 0.9701492537313433,
|
|
"grad_norm": 0.05199125409126282,
|
|
"learning_rate": 4.029850746268657e-05,
|
|
"loss": 0.0004,
|
|
"step": 455
|
|
},
|
|
{
|
|
"epoch": 0.9808102345415778,
|
|
"grad_norm": 11.525964736938477,
|
|
"learning_rate": 4.0191897654584225e-05,
|
|
"loss": 0.0076,
|
|
"step": 460
|
|
},
|
|
{
|
|
"epoch": 0.9914712153518124,
|
|
"grad_norm": 6.316874980926514,
|
|
"learning_rate": 4.008528784648188e-05,
|
|
"loss": 0.0288,
|
|
"step": 465
|
|
},
|
|
{
|
|
"epoch": 1.0,
|
|
"eval_accuracy": 0.9829333333333333,
|
|
"eval_loss": 0.07764233648777008,
|
|
"eval_runtime": 17.3051,
|
|
"eval_samples_per_second": 216.7,
|
|
"eval_steps_per_second": 6.819,
|
|
"step": 469
|
|
},
|
|
{
|
|
"epoch": 1.0021321961620469,
|
|
"grad_norm": 0.0009335101349279284,
|
|
"learning_rate": 3.997867803837953e-05,
|
|
"loss": 0.0028,
|
|
"step": 470
|
|
},
|
|
{
|
|
"epoch": 1.0127931769722816,
|
|
"grad_norm": 0.0005744777736254036,
|
|
"learning_rate": 3.987206823027719e-05,
|
|
"loss": 0.0002,
|
|
"step": 475
|
|
},
|
|
{
|
|
"epoch": 1.023454157782516,
|
|
"grad_norm": 0.0012754161143675447,
|
|
"learning_rate": 3.976545842217484e-05,
|
|
"loss": 0.003,
|
|
"step": 480
|
|
},
|
|
{
|
|
"epoch": 1.0341151385927505,
|
|
"grad_norm": 0.0006521099130623043,
|
|
"learning_rate": 3.96588486140725e-05,
|
|
"loss": 0.0001,
|
|
"step": 485
|
|
},
|
|
{
|
|
"epoch": 1.044776119402985,
|
|
"grad_norm": 0.0005485184374265373,
|
|
"learning_rate": 3.9552238805970146e-05,
|
|
"loss": 0.0022,
|
|
"step": 490
|
|
},
|
|
{
|
|
"epoch": 1.0554371002132197,
|
|
"grad_norm": 0.0008039661915972829,
|
|
"learning_rate": 3.944562899786781e-05,
|
|
"loss": 0.0006,
|
|
"step": 495
|
|
},
|
|
{
|
|
"epoch": 1.0660980810234542,
|
|
"grad_norm": 0.03789743408560753,
|
|
"learning_rate": 3.9339019189765456e-05,
|
|
"loss": 0.0002,
|
|
"step": 500
|
|
},
|
|
{
|
|
"epoch": 1.0767590618336886,
|
|
"grad_norm": 9.25595474243164,
|
|
"learning_rate": 3.923240938166312e-05,
|
|
"loss": 0.0128,
|
|
"step": 505
|
|
},
|
|
{
|
|
"epoch": 1.0874200426439233,
|
|
"grad_norm": 0.0019102554069831967,
|
|
"learning_rate": 3.9125799573560765e-05,
|
|
"loss": 0.0001,
|
|
"step": 510
|
|
},
|
|
{
|
|
"epoch": 1.0980810234541578,
|
|
"grad_norm": 0.0010919722262769938,
|
|
"learning_rate": 3.901918976545843e-05,
|
|
"loss": 0.0001,
|
|
"step": 515
|
|
},
|
|
{
|
|
"epoch": 1.1087420042643923,
|
|
"grad_norm": 1.5398660898208618,
|
|
"learning_rate": 3.8912579957356075e-05,
|
|
"loss": 0.0015,
|
|
"step": 520
|
|
},
|
|
{
|
|
"epoch": 1.1194029850746268,
|
|
"grad_norm": 0.00164665502961725,
|
|
"learning_rate": 3.8805970149253736e-05,
|
|
"loss": 0.0021,
|
|
"step": 525
|
|
},
|
|
{
|
|
"epoch": 1.1300639658848615,
|
|
"grad_norm": 0.005705105606466532,
|
|
"learning_rate": 3.8699360341151384e-05,
|
|
"loss": 0.022,
|
|
"step": 530
|
|
},
|
|
{
|
|
"epoch": 1.140724946695096,
|
|
"grad_norm": 0.0012973021948710084,
|
|
"learning_rate": 3.8592750533049046e-05,
|
|
"loss": 0.0002,
|
|
"step": 535
|
|
},
|
|
{
|
|
"epoch": 1.1513859275053304,
|
|
"grad_norm": 0.029590152204036713,
|
|
"learning_rate": 3.8486140724946694e-05,
|
|
"loss": 0.0007,
|
|
"step": 540
|
|
},
|
|
{
|
|
"epoch": 1.1620469083155651,
|
|
"grad_norm": 0.08817217499017715,
|
|
"learning_rate": 3.8379530916844355e-05,
|
|
"loss": 0.0013,
|
|
"step": 545
|
|
},
|
|
{
|
|
"epoch": 1.1727078891257996,
|
|
"grad_norm": 0.0015365479048341513,
|
|
"learning_rate": 3.8272921108742e-05,
|
|
"loss": 0.0001,
|
|
"step": 550
|
|
},
|
|
{
|
|
"epoch": 1.183368869936034,
|
|
"grad_norm": 0.00235858210362494,
|
|
"learning_rate": 3.8166311300639665e-05,
|
|
"loss": 0.0002,
|
|
"step": 555
|
|
},
|
|
{
|
|
"epoch": 1.1940298507462686,
|
|
"grad_norm": 0.0005989997880533338,
|
|
"learning_rate": 3.805970149253731e-05,
|
|
"loss": 0.0051,
|
|
"step": 560
|
|
},
|
|
{
|
|
"epoch": 1.2046908315565032,
|
|
"grad_norm": 0.03067871369421482,
|
|
"learning_rate": 3.7953091684434974e-05,
|
|
"loss": 0.0001,
|
|
"step": 565
|
|
},
|
|
{
|
|
"epoch": 1.2153518123667377,
|
|
"grad_norm": 0.0004781259340234101,
|
|
"learning_rate": 3.784648187633262e-05,
|
|
"loss": 0.0001,
|
|
"step": 570
|
|
},
|
|
{
|
|
"epoch": 1.2260127931769722,
|
|
"grad_norm": 0.016887221485376358,
|
|
"learning_rate": 3.7739872068230284e-05,
|
|
"loss": 0.0001,
|
|
"step": 575
|
|
},
|
|
{
|
|
"epoch": 1.236673773987207,
|
|
"grad_norm": 0.0004954261239618063,
|
|
"learning_rate": 3.763326226012793e-05,
|
|
"loss": 0.0001,
|
|
"step": 580
|
|
},
|
|
{
|
|
"epoch": 1.2473347547974414,
|
|
"grad_norm": 14.3848295211792,
|
|
"learning_rate": 3.752665245202559e-05,
|
|
"loss": 0.0313,
|
|
"step": 585
|
|
},
|
|
{
|
|
"epoch": 1.2579957356076759,
|
|
"grad_norm": 0.0008196301059797406,
|
|
"learning_rate": 3.742004264392324e-05,
|
|
"loss": 0.0001,
|
|
"step": 590
|
|
},
|
|
{
|
|
"epoch": 1.2686567164179103,
|
|
"grad_norm": 0.005778003484010696,
|
|
"learning_rate": 3.73134328358209e-05,
|
|
"loss": 0.0001,
|
|
"step": 595
|
|
},
|
|
{
|
|
"epoch": 1.279317697228145,
|
|
"grad_norm": 0.0006346408044919372,
|
|
"learning_rate": 3.720682302771855e-05,
|
|
"loss": 0.0001,
|
|
"step": 600
|
|
},
|
|
{
|
|
"epoch": 1.2899786780383795,
|
|
"grad_norm": 0.0012607659446075559,
|
|
"learning_rate": 3.710021321961621e-05,
|
|
"loss": 0.0001,
|
|
"step": 605
|
|
},
|
|
{
|
|
"epoch": 1.3006396588486142,
|
|
"grad_norm": 0.0005228128866292536,
|
|
"learning_rate": 3.699360341151386e-05,
|
|
"loss": 0.0001,
|
|
"step": 610
|
|
},
|
|
{
|
|
"epoch": 1.3113006396588487,
|
|
"grad_norm": 0.0006187814869917929,
|
|
"learning_rate": 3.6886993603411515e-05,
|
|
"loss": 0.0001,
|
|
"step": 615
|
|
},
|
|
{
|
|
"epoch": 1.3219616204690832,
|
|
"grad_norm": 0.0005267775850370526,
|
|
"learning_rate": 3.678038379530917e-05,
|
|
"loss": 0.0001,
|
|
"step": 620
|
|
},
|
|
{
|
|
"epoch": 1.3326226012793176,
|
|
"grad_norm": 0.0004988737055100501,
|
|
"learning_rate": 3.6673773987206824e-05,
|
|
"loss": 0.0001,
|
|
"step": 625
|
|
},
|
|
{
|
|
"epoch": 1.3432835820895521,
|
|
"grad_norm": 0.0016905238153412938,
|
|
"learning_rate": 3.656716417910448e-05,
|
|
"loss": 0.0002,
|
|
"step": 630
|
|
},
|
|
{
|
|
"epoch": 1.3539445628997868,
|
|
"grad_norm": 0.005467831622809172,
|
|
"learning_rate": 3.6460554371002134e-05,
|
|
"loss": 0.0001,
|
|
"step": 635
|
|
},
|
|
{
|
|
"epoch": 1.3646055437100213,
|
|
"grad_norm": 0.004559985361993313,
|
|
"learning_rate": 3.635394456289979e-05,
|
|
"loss": 0.0001,
|
|
"step": 640
|
|
},
|
|
{
|
|
"epoch": 1.375266524520256,
|
|
"grad_norm": 0.01161860954016447,
|
|
"learning_rate": 3.624733475479744e-05,
|
|
"loss": 0.0001,
|
|
"step": 645
|
|
},
|
|
{
|
|
"epoch": 1.3859275053304905,
|
|
"grad_norm": 10.308504104614258,
|
|
"learning_rate": 3.61407249466951e-05,
|
|
"loss": 0.022,
|
|
"step": 650
|
|
},
|
|
{
|
|
"epoch": 1.396588486140725,
|
|
"grad_norm": 0.01034360658377409,
|
|
"learning_rate": 3.603411513859275e-05,
|
|
"loss": 0.0004,
|
|
"step": 655
|
|
},
|
|
{
|
|
"epoch": 1.4072494669509594,
|
|
"grad_norm": 0.0004691320064011961,
|
|
"learning_rate": 3.592750533049041e-05,
|
|
"loss": 0.0001,
|
|
"step": 660
|
|
},
|
|
{
|
|
"epoch": 1.417910447761194,
|
|
"grad_norm": 0.10185609757900238,
|
|
"learning_rate": 3.582089552238806e-05,
|
|
"loss": 0.0001,
|
|
"step": 665
|
|
},
|
|
{
|
|
"epoch": 1.4285714285714286,
|
|
"grad_norm": 0.0005662160692736506,
|
|
"learning_rate": 3.571428571428572e-05,
|
|
"loss": 0.0001,
|
|
"step": 670
|
|
},
|
|
{
|
|
"epoch": 1.439232409381663,
|
|
"grad_norm": 1.5464709997177124,
|
|
"learning_rate": 3.560767590618337e-05,
|
|
"loss": 0.0008,
|
|
"step": 675
|
|
},
|
|
{
|
|
"epoch": 1.4498933901918978,
|
|
"grad_norm": 0.0006050506490282714,
|
|
"learning_rate": 3.5501066098081026e-05,
|
|
"loss": 0.0001,
|
|
"step": 680
|
|
},
|
|
{
|
|
"epoch": 1.4605543710021323,
|
|
"grad_norm": 0.0004750659572891891,
|
|
"learning_rate": 3.539445628997868e-05,
|
|
"loss": 0.0001,
|
|
"step": 685
|
|
},
|
|
{
|
|
"epoch": 1.4712153518123667,
|
|
"grad_norm": 0.00048713330761529505,
|
|
"learning_rate": 3.5287846481876336e-05,
|
|
"loss": 0.0001,
|
|
"step": 690
|
|
},
|
|
{
|
|
"epoch": 1.4818763326226012,
|
|
"grad_norm": 29.81216049194336,
|
|
"learning_rate": 3.518123667377399e-05,
|
|
"loss": 0.0132,
|
|
"step": 695
|
|
},
|
|
{
|
|
"epoch": 1.4925373134328357,
|
|
"grad_norm": 0.000694956339430064,
|
|
"learning_rate": 3.5074626865671645e-05,
|
|
"loss": 0.0001,
|
|
"step": 700
|
|
},
|
|
{
|
|
"epoch": 1.5031982942430704,
|
|
"grad_norm": 0.0019296599784865975,
|
|
"learning_rate": 3.496801705756929e-05,
|
|
"loss": 0.0232,
|
|
"step": 705
|
|
},
|
|
{
|
|
"epoch": 1.5138592750533049,
|
|
"grad_norm": 0.0005067252204753458,
|
|
"learning_rate": 3.4861407249466955e-05,
|
|
"loss": 0.0001,
|
|
"step": 710
|
|
},
|
|
{
|
|
"epoch": 1.5245202558635396,
|
|
"grad_norm": 0.06009787693619728,
|
|
"learning_rate": 3.47547974413646e-05,
|
|
"loss": 0.0002,
|
|
"step": 715
|
|
},
|
|
{
|
|
"epoch": 1.535181236673774,
|
|
"grad_norm": 0.000619211292359978,
|
|
"learning_rate": 3.4648187633262264e-05,
|
|
"loss": 0.052,
|
|
"step": 720
|
|
},
|
|
{
|
|
"epoch": 1.5458422174840085,
|
|
"grad_norm": 0.0015944514889270067,
|
|
"learning_rate": 3.454157782515991e-05,
|
|
"loss": 0.0008,
|
|
"step": 725
|
|
},
|
|
{
|
|
"epoch": 1.556503198294243,
|
|
"grad_norm": 0.0011096606031060219,
|
|
"learning_rate": 3.4434968017057574e-05,
|
|
"loss": 0.0001,
|
|
"step": 730
|
|
},
|
|
{
|
|
"epoch": 1.5671641791044775,
|
|
"grad_norm": 0.0005309173720888793,
|
|
"learning_rate": 3.432835820895522e-05,
|
|
"loss": 0.0317,
|
|
"step": 735
|
|
},
|
|
{
|
|
"epoch": 1.5778251599147122,
|
|
"grad_norm": 0.32308876514434814,
|
|
"learning_rate": 3.422174840085288e-05,
|
|
"loss": 0.0003,
|
|
"step": 740
|
|
},
|
|
{
|
|
"epoch": 1.5884861407249466,
|
|
"grad_norm": 0.0010738209821283817,
|
|
"learning_rate": 3.411513859275053e-05,
|
|
"loss": 0.0452,
|
|
"step": 745
|
|
},
|
|
{
|
|
"epoch": 1.5991471215351813,
|
|
"grad_norm": 0.0006256980705074966,
|
|
"learning_rate": 3.400852878464819e-05,
|
|
"loss": 0.0124,
|
|
"step": 750
|
|
},
|
|
{
|
|
"epoch": 1.6098081023454158,
|
|
"grad_norm": 0.0006480838055722415,
|
|
"learning_rate": 3.390191897654584e-05,
|
|
"loss": 0.0002,
|
|
"step": 755
|
|
},
|
|
{
|
|
"epoch": 1.6204690831556503,
|
|
"grad_norm": 0.6343570947647095,
|
|
"learning_rate": 3.37953091684435e-05,
|
|
"loss": 0.0008,
|
|
"step": 760
|
|
},
|
|
{
|
|
"epoch": 1.6311300639658848,
|
|
"grad_norm": 0.0011410909937694669,
|
|
"learning_rate": 3.368869936034115e-05,
|
|
"loss": 0.0002,
|
|
"step": 765
|
|
},
|
|
{
|
|
"epoch": 1.6417910447761193,
|
|
"grad_norm": 0.0006747875013388693,
|
|
"learning_rate": 3.358208955223881e-05,
|
|
"loss": 0.0001,
|
|
"step": 770
|
|
},
|
|
{
|
|
"epoch": 1.652452025586354,
|
|
"grad_norm": 0.0009736359934322536,
|
|
"learning_rate": 3.347547974413646e-05,
|
|
"loss": 0.0001,
|
|
"step": 775
|
|
},
|
|
{
|
|
"epoch": 1.6631130063965884,
|
|
"grad_norm": 0.0005615497357212007,
|
|
"learning_rate": 3.336886993603412e-05,
|
|
"loss": 0.0001,
|
|
"step": 780
|
|
},
|
|
{
|
|
"epoch": 1.6737739872068231,
|
|
"grad_norm": 0.010285994969308376,
|
|
"learning_rate": 3.326226012793177e-05,
|
|
"loss": 0.0001,
|
|
"step": 785
|
|
},
|
|
{
|
|
"epoch": 1.6844349680170576,
|
|
"grad_norm": 0.04828882962465286,
|
|
"learning_rate": 3.3155650319829424e-05,
|
|
"loss": 0.0011,
|
|
"step": 790
|
|
},
|
|
{
|
|
"epoch": 1.695095948827292,
|
|
"grad_norm": 0.009203250519931316,
|
|
"learning_rate": 3.304904051172708e-05,
|
|
"loss": 0.0029,
|
|
"step": 795
|
|
},
|
|
{
|
|
"epoch": 1.7057569296375266,
|
|
"grad_norm": 0.0004877850878983736,
|
|
"learning_rate": 3.294243070362473e-05,
|
|
"loss": 0.0003,
|
|
"step": 800
|
|
},
|
|
{
|
|
"epoch": 1.716417910447761,
|
|
"grad_norm": 0.0005571940564550459,
|
|
"learning_rate": 3.283582089552239e-05,
|
|
"loss": 0.0001,
|
|
"step": 805
|
|
},
|
|
{
|
|
"epoch": 1.7270788912579957,
|
|
"grad_norm": 0.000695103604812175,
|
|
"learning_rate": 3.272921108742004e-05,
|
|
"loss": 0.0207,
|
|
"step": 810
|
|
},
|
|
{
|
|
"epoch": 1.7377398720682304,
|
|
"grad_norm": 0.009688911028206348,
|
|
"learning_rate": 3.26226012793177e-05,
|
|
"loss": 0.0001,
|
|
"step": 815
|
|
},
|
|
{
|
|
"epoch": 1.748400852878465,
|
|
"grad_norm": 0.0030300819780677557,
|
|
"learning_rate": 3.251599147121535e-05,
|
|
"loss": 0.0001,
|
|
"step": 820
|
|
},
|
|
{
|
|
"epoch": 1.7590618336886994,
|
|
"grad_norm": 0.0007056622416712344,
|
|
"learning_rate": 3.240938166311301e-05,
|
|
"loss": 0.0012,
|
|
"step": 825
|
|
},
|
|
{
|
|
"epoch": 1.7697228144989339,
|
|
"grad_norm": 0.19790150225162506,
|
|
"learning_rate": 3.230277185501066e-05,
|
|
"loss": 0.0003,
|
|
"step": 830
|
|
},
|
|
{
|
|
"epoch": 1.7803837953091683,
|
|
"grad_norm": 0.042493995279073715,
|
|
"learning_rate": 3.2196162046908317e-05,
|
|
"loss": 0.0004,
|
|
"step": 835
|
|
},
|
|
{
|
|
"epoch": 1.7910447761194028,
|
|
"grad_norm": 0.0005138494889251888,
|
|
"learning_rate": 3.208955223880597e-05,
|
|
"loss": 0.042,
|
|
"step": 840
|
|
},
|
|
{
|
|
"epoch": 1.8017057569296375,
|
|
"grad_norm": 0.0006034639663994312,
|
|
"learning_rate": 3.1982942430703626e-05,
|
|
"loss": 0.0001,
|
|
"step": 845
|
|
},
|
|
{
|
|
"epoch": 1.8123667377398722,
|
|
"grad_norm": 0.00045840497477911413,
|
|
"learning_rate": 3.187633262260128e-05,
|
|
"loss": 0.0001,
|
|
"step": 850
|
|
},
|
|
{
|
|
"epoch": 1.8230277185501067,
|
|
"grad_norm": 0.00046087196096777916,
|
|
"learning_rate": 3.1769722814498935e-05,
|
|
"loss": 0.0001,
|
|
"step": 855
|
|
},
|
|
{
|
|
"epoch": 1.8336886993603412,
|
|
"grad_norm": 0.007470608688890934,
|
|
"learning_rate": 3.166311300639659e-05,
|
|
"loss": 0.0001,
|
|
"step": 860
|
|
},
|
|
{
|
|
"epoch": 1.8443496801705757,
|
|
"grad_norm": 0.001506677595898509,
|
|
"learning_rate": 3.1556503198294245e-05,
|
|
"loss": 0.0001,
|
|
"step": 865
|
|
},
|
|
{
|
|
"epoch": 1.8550106609808101,
|
|
"grad_norm": 0.001449568080715835,
|
|
"learning_rate": 3.14498933901919e-05,
|
|
"loss": 0.0001,
|
|
"step": 870
|
|
},
|
|
{
|
|
"epoch": 1.8656716417910446,
|
|
"grad_norm": 0.0025665760040283203,
|
|
"learning_rate": 3.1343283582089554e-05,
|
|
"loss": 0.0001,
|
|
"step": 875
|
|
},
|
|
{
|
|
"epoch": 1.8763326226012793,
|
|
"grad_norm": 0.008222298696637154,
|
|
"learning_rate": 3.123667377398721e-05,
|
|
"loss": 0.0001,
|
|
"step": 880
|
|
},
|
|
{
|
|
"epoch": 1.886993603411514,
|
|
"grad_norm": 0.0014738014433532953,
|
|
"learning_rate": 3.1130063965884864e-05,
|
|
"loss": 0.0043,
|
|
"step": 885
|
|
},
|
|
{
|
|
"epoch": 1.8976545842217485,
|
|
"grad_norm": 0.0005305655067786574,
|
|
"learning_rate": 3.102345415778252e-05,
|
|
"loss": 0.0001,
|
|
"step": 890
|
|
},
|
|
{
|
|
"epoch": 1.908315565031983,
|
|
"grad_norm": 0.0015132325934246182,
|
|
"learning_rate": 3.0916844349680173e-05,
|
|
"loss": 0.0001,
|
|
"step": 895
|
|
},
|
|
{
|
|
"epoch": 1.9189765458422174,
|
|
"grad_norm": 0.0004291821096558124,
|
|
"learning_rate": 3.081023454157783e-05,
|
|
"loss": 0.0001,
|
|
"step": 900
|
|
},
|
|
{
|
|
"epoch": 1.929637526652452,
|
|
"grad_norm": 0.0005372606101445854,
|
|
"learning_rate": 3.070362473347548e-05,
|
|
"loss": 0.0001,
|
|
"step": 905
|
|
},
|
|
{
|
|
"epoch": 1.9402985074626866,
|
|
"grad_norm": 0.003320530755445361,
|
|
"learning_rate": 3.059701492537314e-05,
|
|
"loss": 0.0002,
|
|
"step": 910
|
|
},
|
|
{
|
|
"epoch": 1.950959488272921,
|
|
"grad_norm": 0.00044781426549889147,
|
|
"learning_rate": 3.0490405117270792e-05,
|
|
"loss": 0.0,
|
|
"step": 915
|
|
},
|
|
{
|
|
"epoch": 1.9616204690831558,
|
|
"grad_norm": 0.0007252440555021167,
|
|
"learning_rate": 3.0383795309168444e-05,
|
|
"loss": 0.0001,
|
|
"step": 920
|
|
},
|
|
{
|
|
"epoch": 1.9722814498933903,
|
|
"grad_norm": 0.045549795031547546,
|
|
"learning_rate": 3.0277185501066102e-05,
|
|
"loss": 0.0238,
|
|
"step": 925
|
|
},
|
|
{
|
|
"epoch": 1.9829424307036247,
|
|
"grad_norm": 0.0004270165809430182,
|
|
"learning_rate": 3.0170575692963753e-05,
|
|
"loss": 0.0001,
|
|
"step": 930
|
|
},
|
|
{
|
|
"epoch": 1.9936034115138592,
|
|
"grad_norm": 0.0053723957389593124,
|
|
"learning_rate": 3.006396588486141e-05,
|
|
"loss": 0.0001,
|
|
"step": 935
|
|
},
|
|
{
|
|
"epoch": 2.0,
|
|
"eval_accuracy": 0.9832,
|
|
"eval_loss": 0.09167057275772095,
|
|
"eval_runtime": 17.4407,
|
|
"eval_samples_per_second": 215.015,
|
|
"eval_steps_per_second": 6.766,
|
|
"step": 938
|
|
},
|
|
{
|
|
"epoch": 2.0042643923240937,
|
|
"grad_norm": 0.0015149270184338093,
|
|
"learning_rate": 2.9957356076759063e-05,
|
|
"loss": 0.0001,
|
|
"step": 940
|
|
},
|
|
{
|
|
"epoch": 2.014925373134328,
|
|
"grad_norm": 0.047389041632413864,
|
|
"learning_rate": 2.9850746268656714e-05,
|
|
"loss": 0.0002,
|
|
"step": 945
|
|
},
|
|
{
|
|
"epoch": 2.025586353944563,
|
|
"grad_norm": 0.009697270579636097,
|
|
"learning_rate": 2.9744136460554372e-05,
|
|
"loss": 0.0001,
|
|
"step": 950
|
|
},
|
|
{
|
|
"epoch": 2.0362473347547976,
|
|
"grad_norm": 0.0004780073941219598,
|
|
"learning_rate": 2.9637526652452023e-05,
|
|
"loss": 0.0001,
|
|
"step": 955
|
|
},
|
|
{
|
|
"epoch": 2.046908315565032,
|
|
"grad_norm": 0.00869710836559534,
|
|
"learning_rate": 2.953091684434968e-05,
|
|
"loss": 0.0001,
|
|
"step": 960
|
|
},
|
|
{
|
|
"epoch": 2.0575692963752665,
|
|
"grad_norm": 0.05876876786351204,
|
|
"learning_rate": 2.9424307036247333e-05,
|
|
"loss": 0.0001,
|
|
"step": 965
|
|
},
|
|
{
|
|
"epoch": 2.068230277185501,
|
|
"grad_norm": 0.0024717338383197784,
|
|
"learning_rate": 2.931769722814499e-05,
|
|
"loss": 0.0002,
|
|
"step": 970
|
|
},
|
|
{
|
|
"epoch": 2.0788912579957355,
|
|
"grad_norm": 0.0007059109047986567,
|
|
"learning_rate": 2.9211087420042642e-05,
|
|
"loss": 0.0001,
|
|
"step": 975
|
|
},
|
|
{
|
|
"epoch": 2.08955223880597,
|
|
"grad_norm": 0.00044276739936321974,
|
|
"learning_rate": 2.91044776119403e-05,
|
|
"loss": 0.0001,
|
|
"step": 980
|
|
},
|
|
{
|
|
"epoch": 2.100213219616205,
|
|
"grad_norm": 0.00043687623110599816,
|
|
"learning_rate": 2.8997867803837952e-05,
|
|
"loss": 0.0001,
|
|
"step": 985
|
|
},
|
|
{
|
|
"epoch": 2.1108742004264394,
|
|
"grad_norm": 0.0005369892460294068,
|
|
"learning_rate": 2.889125799573561e-05,
|
|
"loss": 0.0001,
|
|
"step": 990
|
|
},
|
|
{
|
|
"epoch": 2.121535181236674,
|
|
"grad_norm": 0.00043421206646598876,
|
|
"learning_rate": 2.878464818763326e-05,
|
|
"loss": 0.0001,
|
|
"step": 995
|
|
},
|
|
{
|
|
"epoch": 2.1321961620469083,
|
|
"grad_norm": 0.0008949624025262892,
|
|
"learning_rate": 2.867803837953092e-05,
|
|
"loss": 0.0001,
|
|
"step": 1000
|
|
},
|
|
{
|
|
"epoch": 2.142857142857143,
|
|
"grad_norm": 0.0004758831055369228,
|
|
"learning_rate": 2.857142857142857e-05,
|
|
"loss": 0.0001,
|
|
"step": 1005
|
|
},
|
|
{
|
|
"epoch": 2.1535181236673773,
|
|
"grad_norm": 0.0010464921360835433,
|
|
"learning_rate": 2.846481876332623e-05,
|
|
"loss": 0.0001,
|
|
"step": 1010
|
|
},
|
|
{
|
|
"epoch": 2.1641791044776117,
|
|
"grad_norm": 0.00045838902588002384,
|
|
"learning_rate": 2.835820895522388e-05,
|
|
"loss": 0.0001,
|
|
"step": 1015
|
|
},
|
|
{
|
|
"epoch": 2.1748400852878467,
|
|
"grad_norm": 0.0007630742038600147,
|
|
"learning_rate": 2.825159914712154e-05,
|
|
"loss": 0.0001,
|
|
"step": 1020
|
|
},
|
|
{
|
|
"epoch": 2.185501066098081,
|
|
"grad_norm": 0.0004066063847858459,
|
|
"learning_rate": 2.814498933901919e-05,
|
|
"loss": 0.0001,
|
|
"step": 1025
|
|
},
|
|
{
|
|
"epoch": 2.1961620469083156,
|
|
"grad_norm": 0.0004280243592802435,
|
|
"learning_rate": 2.8038379530916848e-05,
|
|
"loss": 0.0001,
|
|
"step": 1030
|
|
},
|
|
{
|
|
"epoch": 2.20682302771855,
|
|
"grad_norm": 0.00039023839053697884,
|
|
"learning_rate": 2.79317697228145e-05,
|
|
"loss": 0.0001,
|
|
"step": 1035
|
|
},
|
|
{
|
|
"epoch": 2.2174840085287846,
|
|
"grad_norm": 0.00043795965029858053,
|
|
"learning_rate": 2.7825159914712157e-05,
|
|
"loss": 0.0001,
|
|
"step": 1040
|
|
},
|
|
{
|
|
"epoch": 2.228144989339019,
|
|
"grad_norm": 0.038659244775772095,
|
|
"learning_rate": 2.771855010660981e-05,
|
|
"loss": 0.0001,
|
|
"step": 1045
|
|
},
|
|
{
|
|
"epoch": 2.2388059701492535,
|
|
"grad_norm": 0.00043199799256399274,
|
|
"learning_rate": 2.7611940298507467e-05,
|
|
"loss": 0.0001,
|
|
"step": 1050
|
|
},
|
|
{
|
|
"epoch": 2.2494669509594885,
|
|
"grad_norm": 0.000450240564532578,
|
|
"learning_rate": 2.7505330490405118e-05,
|
|
"loss": 0.0001,
|
|
"step": 1055
|
|
},
|
|
{
|
|
"epoch": 2.260127931769723,
|
|
"grad_norm": 0.0027239543851464987,
|
|
"learning_rate": 2.7398720682302776e-05,
|
|
"loss": 0.0001,
|
|
"step": 1060
|
|
},
|
|
{
|
|
"epoch": 2.2707889125799574,
|
|
"grad_norm": 0.0005202888278290629,
|
|
"learning_rate": 2.7292110874200428e-05,
|
|
"loss": 0.0,
|
|
"step": 1065
|
|
},
|
|
{
|
|
"epoch": 2.281449893390192,
|
|
"grad_norm": 0.0003948301309719682,
|
|
"learning_rate": 2.7185501066098086e-05,
|
|
"loss": 0.0,
|
|
"step": 1070
|
|
},
|
|
{
|
|
"epoch": 2.2921108742004264,
|
|
"grad_norm": 0.0003954103449359536,
|
|
"learning_rate": 2.7078891257995737e-05,
|
|
"loss": 0.0001,
|
|
"step": 1075
|
|
},
|
|
{
|
|
"epoch": 2.302771855010661,
|
|
"grad_norm": 0.00039230516995303333,
|
|
"learning_rate": 2.6972281449893395e-05,
|
|
"loss": 0.0001,
|
|
"step": 1080
|
|
},
|
|
{
|
|
"epoch": 2.3134328358208958,
|
|
"grad_norm": 0.00039497955003753304,
|
|
"learning_rate": 2.6865671641791047e-05,
|
|
"loss": 0.0,
|
|
"step": 1085
|
|
},
|
|
{
|
|
"epoch": 2.3240938166311302,
|
|
"grad_norm": 0.0008549017948098481,
|
|
"learning_rate": 2.6759061833688705e-05,
|
|
"loss": 0.0001,
|
|
"step": 1090
|
|
},
|
|
{
|
|
"epoch": 2.3347547974413647,
|
|
"grad_norm": 0.0005017873481847346,
|
|
"learning_rate": 2.6652452025586356e-05,
|
|
"loss": 0.0,
|
|
"step": 1095
|
|
},
|
|
{
|
|
"epoch": 2.345415778251599,
|
|
"grad_norm": 0.0011325380764901638,
|
|
"learning_rate": 2.6545842217484007e-05,
|
|
"loss": 0.0001,
|
|
"step": 1100
|
|
},
|
|
{
|
|
"epoch": 2.3560767590618337,
|
|
"grad_norm": 0.00043634831672534347,
|
|
"learning_rate": 2.6439232409381666e-05,
|
|
"loss": 0.0001,
|
|
"step": 1105
|
|
},
|
|
{
|
|
"epoch": 2.366737739872068,
|
|
"grad_norm": 0.0004065588873345405,
|
|
"learning_rate": 2.6332622601279317e-05,
|
|
"loss": 0.0015,
|
|
"step": 1110
|
|
},
|
|
{
|
|
"epoch": 2.3773987206823026,
|
|
"grad_norm": 0.0006859657005406916,
|
|
"learning_rate": 2.6226012793176975e-05,
|
|
"loss": 0.0,
|
|
"step": 1115
|
|
},
|
|
{
|
|
"epoch": 2.388059701492537,
|
|
"grad_norm": 0.00043606595136225224,
|
|
"learning_rate": 2.6119402985074626e-05,
|
|
"loss": 0.0001,
|
|
"step": 1120
|
|
},
|
|
{
|
|
"epoch": 2.398720682302772,
|
|
"grad_norm": 0.0005418714135885239,
|
|
"learning_rate": 2.6012793176972285e-05,
|
|
"loss": 0.0001,
|
|
"step": 1125
|
|
},
|
|
{
|
|
"epoch": 2.4093816631130065,
|
|
"grad_norm": 0.010567808523774147,
|
|
"learning_rate": 2.5906183368869936e-05,
|
|
"loss": 0.0001,
|
|
"step": 1130
|
|
},
|
|
{
|
|
"epoch": 2.420042643923241,
|
|
"grad_norm": 0.0004621766274794936,
|
|
"learning_rate": 2.5799573560767594e-05,
|
|
"loss": 0.0001,
|
|
"step": 1135
|
|
},
|
|
{
|
|
"epoch": 2.4307036247334755,
|
|
"grad_norm": 0.00038448310806415975,
|
|
"learning_rate": 2.5692963752665245e-05,
|
|
"loss": 0.0,
|
|
"step": 1140
|
|
},
|
|
{
|
|
"epoch": 2.44136460554371,
|
|
"grad_norm": 0.003054052358493209,
|
|
"learning_rate": 2.5586353944562904e-05,
|
|
"loss": 0.0001,
|
|
"step": 1145
|
|
},
|
|
{
|
|
"epoch": 2.4520255863539444,
|
|
"grad_norm": 0.006396948359906673,
|
|
"learning_rate": 2.5479744136460555e-05,
|
|
"loss": 0.0001,
|
|
"step": 1150
|
|
},
|
|
{
|
|
"epoch": 2.4626865671641793,
|
|
"grad_norm": 0.0003959976602345705,
|
|
"learning_rate": 2.537313432835821e-05,
|
|
"loss": 0.0001,
|
|
"step": 1155
|
|
},
|
|
{
|
|
"epoch": 2.473347547974414,
|
|
"grad_norm": 0.0004430985718499869,
|
|
"learning_rate": 2.5266524520255864e-05,
|
|
"loss": 0.0001,
|
|
"step": 1160
|
|
},
|
|
{
|
|
"epoch": 2.4840085287846483,
|
|
"grad_norm": 0.00037877613794989884,
|
|
"learning_rate": 2.515991471215352e-05,
|
|
"loss": 0.0,
|
|
"step": 1165
|
|
},
|
|
{
|
|
"epoch": 2.4946695095948828,
|
|
"grad_norm": 0.00037804825115017593,
|
|
"learning_rate": 2.5053304904051174e-05,
|
|
"loss": 0.0,
|
|
"step": 1170
|
|
},
|
|
{
|
|
"epoch": 2.5053304904051172,
|
|
"grad_norm": 0.002878658240661025,
|
|
"learning_rate": 2.494669509594883e-05,
|
|
"loss": 0.0001,
|
|
"step": 1175
|
|
},
|
|
{
|
|
"epoch": 2.5159914712153517,
|
|
"grad_norm": 0.000403961370466277,
|
|
"learning_rate": 2.4840085287846483e-05,
|
|
"loss": 0.0053,
|
|
"step": 1180
|
|
},
|
|
{
|
|
"epoch": 2.526652452025586,
|
|
"grad_norm": 0.0004649708862416446,
|
|
"learning_rate": 2.4733475479744138e-05,
|
|
"loss": 0.0004,
|
|
"step": 1185
|
|
},
|
|
{
|
|
"epoch": 2.5373134328358207,
|
|
"grad_norm": 0.003935372922569513,
|
|
"learning_rate": 2.4626865671641793e-05,
|
|
"loss": 0.0,
|
|
"step": 1190
|
|
},
|
|
{
|
|
"epoch": 2.5479744136460556,
|
|
"grad_norm": 0.09437806904315948,
|
|
"learning_rate": 2.4520255863539444e-05,
|
|
"loss": 0.0002,
|
|
"step": 1195
|
|
},
|
|
{
|
|
"epoch": 2.55863539445629,
|
|
"grad_norm": 0.00040584628004580736,
|
|
"learning_rate": 2.44136460554371e-05,
|
|
"loss": 0.0,
|
|
"step": 1200
|
|
},
|
|
{
|
|
"epoch": 2.5692963752665245,
|
|
"grad_norm": 0.00037620539660565555,
|
|
"learning_rate": 2.4307036247334754e-05,
|
|
"loss": 0.0,
|
|
"step": 1205
|
|
},
|
|
{
|
|
"epoch": 2.579957356076759,
|
|
"grad_norm": 0.0005611660308204591,
|
|
"learning_rate": 2.420042643923241e-05,
|
|
"loss": 0.0,
|
|
"step": 1210
|
|
},
|
|
{
|
|
"epoch": 2.5906183368869935,
|
|
"grad_norm": 0.0003836385440081358,
|
|
"learning_rate": 2.4093816631130063e-05,
|
|
"loss": 0.0,
|
|
"step": 1215
|
|
},
|
|
{
|
|
"epoch": 2.6012793176972284,
|
|
"grad_norm": 0.00041127714212052524,
|
|
"learning_rate": 2.3987206823027718e-05,
|
|
"loss": 0.0,
|
|
"step": 1220
|
|
},
|
|
{
|
|
"epoch": 2.611940298507463,
|
|
"grad_norm": 0.0005083730793558061,
|
|
"learning_rate": 2.3880597014925373e-05,
|
|
"loss": 0.0,
|
|
"step": 1225
|
|
},
|
|
{
|
|
"epoch": 2.6226012793176974,
|
|
"grad_norm": 0.0003733741759788245,
|
|
"learning_rate": 2.3773987206823027e-05,
|
|
"loss": 0.0002,
|
|
"step": 1230
|
|
},
|
|
{
|
|
"epoch": 2.633262260127932,
|
|
"grad_norm": 0.00042687475797720253,
|
|
"learning_rate": 2.3667377398720682e-05,
|
|
"loss": 0.0,
|
|
"step": 1235
|
|
},
|
|
{
|
|
"epoch": 2.6439232409381663,
|
|
"grad_norm": 0.0037479845341295004,
|
|
"learning_rate": 2.3560767590618337e-05,
|
|
"loss": 0.0002,
|
|
"step": 1240
|
|
},
|
|
{
|
|
"epoch": 2.654584221748401,
|
|
"grad_norm": 0.00037869918742217124,
|
|
"learning_rate": 2.345415778251599e-05,
|
|
"loss": 0.0141,
|
|
"step": 1245
|
|
},
|
|
{
|
|
"epoch": 2.6652452025586353,
|
|
"grad_norm": 0.0004404525680001825,
|
|
"learning_rate": 2.3347547974413646e-05,
|
|
"loss": 0.0,
|
|
"step": 1250
|
|
},
|
|
{
|
|
"epoch": 2.6759061833688698,
|
|
"grad_norm": 0.00037526304367929697,
|
|
"learning_rate": 2.32409381663113e-05,
|
|
"loss": 0.0,
|
|
"step": 1255
|
|
},
|
|
{
|
|
"epoch": 2.6865671641791042,
|
|
"grad_norm": 0.0003638829803094268,
|
|
"learning_rate": 2.3134328358208956e-05,
|
|
"loss": 0.0,
|
|
"step": 1260
|
|
},
|
|
{
|
|
"epoch": 2.697228144989339,
|
|
"grad_norm": 20.152830123901367,
|
|
"learning_rate": 2.302771855010661e-05,
|
|
"loss": 0.017,
|
|
"step": 1265
|
|
},
|
|
{
|
|
"epoch": 2.7078891257995736,
|
|
"grad_norm": 0.0003632500011008233,
|
|
"learning_rate": 2.2921108742004265e-05,
|
|
"loss": 0.0,
|
|
"step": 1270
|
|
},
|
|
{
|
|
"epoch": 2.718550106609808,
|
|
"grad_norm": 2.5107836723327637,
|
|
"learning_rate": 2.281449893390192e-05,
|
|
"loss": 0.0014,
|
|
"step": 1275
|
|
},
|
|
{
|
|
"epoch": 2.7292110874200426,
|
|
"grad_norm": 0.00036434625508263707,
|
|
"learning_rate": 2.2707889125799575e-05,
|
|
"loss": 0.0,
|
|
"step": 1280
|
|
},
|
|
{
|
|
"epoch": 2.739872068230277,
|
|
"grad_norm": 0.0004573333717416972,
|
|
"learning_rate": 2.260127931769723e-05,
|
|
"loss": 0.0,
|
|
"step": 1285
|
|
},
|
|
{
|
|
"epoch": 2.750533049040512,
|
|
"grad_norm": 0.0006087252404540777,
|
|
"learning_rate": 2.2494669509594884e-05,
|
|
"loss": 0.0005,
|
|
"step": 1290
|
|
},
|
|
{
|
|
"epoch": 2.7611940298507465,
|
|
"grad_norm": 0.0004526629636529833,
|
|
"learning_rate": 2.238805970149254e-05,
|
|
"loss": 0.0,
|
|
"step": 1295
|
|
},
|
|
{
|
|
"epoch": 2.771855010660981,
|
|
"grad_norm": 0.0006064132321625948,
|
|
"learning_rate": 2.2281449893390194e-05,
|
|
"loss": 0.0001,
|
|
"step": 1300
|
|
},
|
|
{
|
|
"epoch": 2.7825159914712154,
|
|
"grad_norm": 0.00036414284841157496,
|
|
"learning_rate": 2.217484008528785e-05,
|
|
"loss": 0.0001,
|
|
"step": 1305
|
|
},
|
|
{
|
|
"epoch": 2.79317697228145,
|
|
"grad_norm": 0.000577672733925283,
|
|
"learning_rate": 2.2068230277185503e-05,
|
|
"loss": 0.0001,
|
|
"step": 1310
|
|
},
|
|
{
|
|
"epoch": 2.8038379530916844,
|
|
"grad_norm": 0.012910407967865467,
|
|
"learning_rate": 2.1961620469083158e-05,
|
|
"loss": 0.0001,
|
|
"step": 1315
|
|
},
|
|
{
|
|
"epoch": 2.814498933901919,
|
|
"grad_norm": 0.0003832327201962471,
|
|
"learning_rate": 2.1855010660980813e-05,
|
|
"loss": 0.0,
|
|
"step": 1320
|
|
},
|
|
{
|
|
"epoch": 2.8251599147121533,
|
|
"grad_norm": 0.005169307347387075,
|
|
"learning_rate": 2.1748400852878467e-05,
|
|
"loss": 0.0001,
|
|
"step": 1325
|
|
},
|
|
{
|
|
"epoch": 2.835820895522388,
|
|
"grad_norm": 0.0003850864595733583,
|
|
"learning_rate": 2.164179104477612e-05,
|
|
"loss": 0.0,
|
|
"step": 1330
|
|
},
|
|
{
|
|
"epoch": 2.8464818763326227,
|
|
"grad_norm": 10.417537689208984,
|
|
"learning_rate": 2.1535181236673773e-05,
|
|
"loss": 0.0066,
|
|
"step": 1335
|
|
},
|
|
{
|
|
"epoch": 2.857142857142857,
|
|
"grad_norm": 0.00035963315167464316,
|
|
"learning_rate": 2.1428571428571428e-05,
|
|
"loss": 0.0001,
|
|
"step": 1340
|
|
},
|
|
{
|
|
"epoch": 2.8678038379530917,
|
|
"grad_norm": 0.0029190913774073124,
|
|
"learning_rate": 2.1321961620469083e-05,
|
|
"loss": 0.0001,
|
|
"step": 1345
|
|
},
|
|
{
|
|
"epoch": 2.878464818763326,
|
|
"grad_norm": 0.01105143129825592,
|
|
"learning_rate": 2.1215351812366738e-05,
|
|
"loss": 0.0001,
|
|
"step": 1350
|
|
},
|
|
{
|
|
"epoch": 2.8891257995735606,
|
|
"grad_norm": 0.0004973071627318859,
|
|
"learning_rate": 2.1108742004264392e-05,
|
|
"loss": 0.0,
|
|
"step": 1355
|
|
},
|
|
{
|
|
"epoch": 2.8997867803837956,
|
|
"grad_norm": 0.00039804342668503523,
|
|
"learning_rate": 2.1002132196162047e-05,
|
|
"loss": 0.0237,
|
|
"step": 1360
|
|
},
|
|
{
|
|
"epoch": 2.91044776119403,
|
|
"grad_norm": 0.0004003876820206642,
|
|
"learning_rate": 2.0895522388059702e-05,
|
|
"loss": 0.0,
|
|
"step": 1365
|
|
},
|
|
{
|
|
"epoch": 2.9211087420042645,
|
|
"grad_norm": 0.00038996984949335456,
|
|
"learning_rate": 2.0788912579957357e-05,
|
|
"loss": 0.0001,
|
|
"step": 1370
|
|
},
|
|
{
|
|
"epoch": 2.931769722814499,
|
|
"grad_norm": 0.0004061104846186936,
|
|
"learning_rate": 2.068230277185501e-05,
|
|
"loss": 0.0,
|
|
"step": 1375
|
|
},
|
|
{
|
|
"epoch": 2.9424307036247335,
|
|
"grad_norm": 0.00040256063221022487,
|
|
"learning_rate": 2.0575692963752666e-05,
|
|
"loss": 0.0,
|
|
"step": 1380
|
|
},
|
|
{
|
|
"epoch": 2.953091684434968,
|
|
"grad_norm": 0.0015018166741356254,
|
|
"learning_rate": 2.046908315565032e-05,
|
|
"loss": 0.0,
|
|
"step": 1385
|
|
},
|
|
{
|
|
"epoch": 2.9637526652452024,
|
|
"grad_norm": 0.00041588780004531145,
|
|
"learning_rate": 2.0362473347547976e-05,
|
|
"loss": 0.0001,
|
|
"step": 1390
|
|
},
|
|
{
|
|
"epoch": 2.974413646055437,
|
|
"grad_norm": 0.00039294984890148044,
|
|
"learning_rate": 2.025586353944563e-05,
|
|
"loss": 0.0,
|
|
"step": 1395
|
|
},
|
|
{
|
|
"epoch": 2.9850746268656714,
|
|
"grad_norm": 0.0004044592787977308,
|
|
"learning_rate": 2.0149253731343285e-05,
|
|
"loss": 0.0,
|
|
"step": 1400
|
|
},
|
|
{
|
|
"epoch": 2.9957356076759063,
|
|
"grad_norm": 0.000609898823313415,
|
|
"learning_rate": 2.004264392324094e-05,
|
|
"loss": 0.0,
|
|
"step": 1405
|
|
},
|
|
{
|
|
"epoch": 3.0,
|
|
"eval_accuracy": 0.9829333333333333,
|
|
"eval_loss": 0.09540021419525146,
|
|
"eval_runtime": 17.6405,
|
|
"eval_samples_per_second": 212.579,
|
|
"eval_steps_per_second": 6.689,
|
|
"step": 1407
|
|
},
|
|
{
|
|
"epoch": 3.0063965884861408,
|
|
"grad_norm": 0.000987140228971839,
|
|
"learning_rate": 1.9936034115138594e-05,
|
|
"loss": 0.0,
|
|
"step": 1410
|
|
},
|
|
{
|
|
"epoch": 3.0170575692963753,
|
|
"grad_norm": 0.0003622091026045382,
|
|
"learning_rate": 1.982942430703625e-05,
|
|
"loss": 0.0,
|
|
"step": 1415
|
|
},
|
|
{
|
|
"epoch": 3.0277185501066097,
|
|
"grad_norm": 0.0004392349801491946,
|
|
"learning_rate": 1.9722814498933904e-05,
|
|
"loss": 0.0,
|
|
"step": 1420
|
|
},
|
|
{
|
|
"epoch": 3.038379530916844,
|
|
"grad_norm": 0.00046836218098178506,
|
|
"learning_rate": 1.961620469083156e-05,
|
|
"loss": 0.0,
|
|
"step": 1425
|
|
},
|
|
{
|
|
"epoch": 3.0490405117270787,
|
|
"grad_norm": 0.012954283505678177,
|
|
"learning_rate": 1.9509594882729213e-05,
|
|
"loss": 0.0001,
|
|
"step": 1430
|
|
},
|
|
{
|
|
"epoch": 3.0597014925373136,
|
|
"grad_norm": 0.00039281166391447186,
|
|
"learning_rate": 1.9402985074626868e-05,
|
|
"loss": 0.0001,
|
|
"step": 1435
|
|
},
|
|
{
|
|
"epoch": 3.070362473347548,
|
|
"grad_norm": 0.009641666896641254,
|
|
"learning_rate": 1.9296375266524523e-05,
|
|
"loss": 0.0,
|
|
"step": 1440
|
|
},
|
|
{
|
|
"epoch": 3.0810234541577826,
|
|
"grad_norm": 0.00046739052049815655,
|
|
"learning_rate": 1.9189765458422178e-05,
|
|
"loss": 0.0,
|
|
"step": 1445
|
|
},
|
|
{
|
|
"epoch": 3.091684434968017,
|
|
"grad_norm": 0.0019970715511590242,
|
|
"learning_rate": 1.9083155650319832e-05,
|
|
"loss": 0.0001,
|
|
"step": 1450
|
|
},
|
|
{
|
|
"epoch": 3.1023454157782515,
|
|
"grad_norm": 0.0006188787519931793,
|
|
"learning_rate": 1.8976545842217487e-05,
|
|
"loss": 0.0001,
|
|
"step": 1455
|
|
},
|
|
{
|
|
"epoch": 3.113006396588486,
|
|
"grad_norm": 0.0006973852287046611,
|
|
"learning_rate": 1.8869936034115142e-05,
|
|
"loss": 0.0,
|
|
"step": 1460
|
|
},
|
|
{
|
|
"epoch": 3.1236673773987205,
|
|
"grad_norm": 0.024798931553959846,
|
|
"learning_rate": 1.8763326226012797e-05,
|
|
"loss": 0.0001,
|
|
"step": 1465
|
|
},
|
|
{
|
|
"epoch": 3.1343283582089554,
|
|
"grad_norm": 0.0003946349024772644,
|
|
"learning_rate": 1.865671641791045e-05,
|
|
"loss": 0.0,
|
|
"step": 1470
|
|
},
|
|
{
|
|
"epoch": 3.14498933901919,
|
|
"grad_norm": 0.00048699730541557074,
|
|
"learning_rate": 1.8550106609808106e-05,
|
|
"loss": 0.0216,
|
|
"step": 1475
|
|
},
|
|
{
|
|
"epoch": 3.1556503198294243,
|
|
"grad_norm": 0.002107360865920782,
|
|
"learning_rate": 1.8443496801705757e-05,
|
|
"loss": 0.0027,
|
|
"step": 1480
|
|
},
|
|
{
|
|
"epoch": 3.166311300639659,
|
|
"grad_norm": 0.0027846514713019133,
|
|
"learning_rate": 1.8336886993603412e-05,
|
|
"loss": 0.0001,
|
|
"step": 1485
|
|
},
|
|
{
|
|
"epoch": 3.1769722814498933,
|
|
"grad_norm": 0.002694466384127736,
|
|
"learning_rate": 1.8230277185501067e-05,
|
|
"loss": 0.0002,
|
|
"step": 1490
|
|
},
|
|
{
|
|
"epoch": 3.1876332622601278,
|
|
"grad_norm": 0.0007566306740045547,
|
|
"learning_rate": 1.812366737739872e-05,
|
|
"loss": 0.0001,
|
|
"step": 1495
|
|
},
|
|
{
|
|
"epoch": 3.1982942430703627,
|
|
"grad_norm": 0.0009015945834107697,
|
|
"learning_rate": 1.8017057569296376e-05,
|
|
"loss": 0.0001,
|
|
"step": 1500
|
|
},
|
|
{
|
|
"epoch": 3.208955223880597,
|
|
"grad_norm": 0.001042648684233427,
|
|
"learning_rate": 1.791044776119403e-05,
|
|
"loss": 0.0022,
|
|
"step": 1505
|
|
},
|
|
{
|
|
"epoch": 3.2196162046908317,
|
|
"grad_norm": 0.0006084468332119286,
|
|
"learning_rate": 1.7803837953091686e-05,
|
|
"loss": 0.0,
|
|
"step": 1510
|
|
},
|
|
{
|
|
"epoch": 3.230277185501066,
|
|
"grad_norm": 0.0005775997997261584,
|
|
"learning_rate": 1.769722814498934e-05,
|
|
"loss": 0.0001,
|
|
"step": 1515
|
|
},
|
|
{
|
|
"epoch": 3.2409381663113006,
|
|
"grad_norm": 0.00035793459392152727,
|
|
"learning_rate": 1.7590618336886995e-05,
|
|
"loss": 0.0,
|
|
"step": 1520
|
|
},
|
|
{
|
|
"epoch": 3.251599147121535,
|
|
"grad_norm": 0.0004987951833754778,
|
|
"learning_rate": 1.7484008528784647e-05,
|
|
"loss": 0.0,
|
|
"step": 1525
|
|
},
|
|
{
|
|
"epoch": 3.2622601279317696,
|
|
"grad_norm": 0.0003945837961509824,
|
|
"learning_rate": 1.73773987206823e-05,
|
|
"loss": 0.0,
|
|
"step": 1530
|
|
},
|
|
{
|
|
"epoch": 3.272921108742004,
|
|
"grad_norm": 0.00034646817948669195,
|
|
"learning_rate": 1.7270788912579956e-05,
|
|
"loss": 0.0,
|
|
"step": 1535
|
|
},
|
|
{
|
|
"epoch": 3.283582089552239,
|
|
"grad_norm": 0.0005279065808281302,
|
|
"learning_rate": 1.716417910447761e-05,
|
|
"loss": 0.0,
|
|
"step": 1540
|
|
},
|
|
{
|
|
"epoch": 3.2942430703624734,
|
|
"grad_norm": 0.000364255829481408,
|
|
"learning_rate": 1.7057569296375266e-05,
|
|
"loss": 0.0,
|
|
"step": 1545
|
|
},
|
|
{
|
|
"epoch": 3.304904051172708,
|
|
"grad_norm": 0.0005003450205549598,
|
|
"learning_rate": 1.695095948827292e-05,
|
|
"loss": 0.0,
|
|
"step": 1550
|
|
},
|
|
{
|
|
"epoch": 3.3155650319829424,
|
|
"grad_norm": 0.0006611489225178957,
|
|
"learning_rate": 1.6844349680170575e-05,
|
|
"loss": 0.0,
|
|
"step": 1555
|
|
},
|
|
{
|
|
"epoch": 3.326226012793177,
|
|
"grad_norm": 0.0005188793293200433,
|
|
"learning_rate": 1.673773987206823e-05,
|
|
"loss": 0.0,
|
|
"step": 1560
|
|
},
|
|
{
|
|
"epoch": 3.3368869936034113,
|
|
"grad_norm": 0.0006000649882480502,
|
|
"learning_rate": 1.6631130063965885e-05,
|
|
"loss": 0.0,
|
|
"step": 1565
|
|
},
|
|
{
|
|
"epoch": 3.3475479744136463,
|
|
"grad_norm": 0.0005054918583482504,
|
|
"learning_rate": 1.652452025586354e-05,
|
|
"loss": 0.0,
|
|
"step": 1570
|
|
},
|
|
{
|
|
"epoch": 3.3582089552238807,
|
|
"grad_norm": 0.0035277619026601315,
|
|
"learning_rate": 1.6417910447761194e-05,
|
|
"loss": 0.0001,
|
|
"step": 1575
|
|
},
|
|
{
|
|
"epoch": 3.368869936034115,
|
|
"grad_norm": 0.0003567801904864609,
|
|
"learning_rate": 1.631130063965885e-05,
|
|
"loss": 0.0001,
|
|
"step": 1580
|
|
},
|
|
{
|
|
"epoch": 3.3795309168443497,
|
|
"grad_norm": 0.0004025339148938656,
|
|
"learning_rate": 1.6204690831556504e-05,
|
|
"loss": 0.0001,
|
|
"step": 1585
|
|
},
|
|
{
|
|
"epoch": 3.390191897654584,
|
|
"grad_norm": 0.000375748350052163,
|
|
"learning_rate": 1.6098081023454158e-05,
|
|
"loss": 0.0001,
|
|
"step": 1590
|
|
},
|
|
{
|
|
"epoch": 3.4008528784648187,
|
|
"grad_norm": 0.0003993652353528887,
|
|
"learning_rate": 1.5991471215351813e-05,
|
|
"loss": 0.0,
|
|
"step": 1595
|
|
},
|
|
{
|
|
"epoch": 3.411513859275053,
|
|
"grad_norm": 0.0018557051662355661,
|
|
"learning_rate": 1.5884861407249468e-05,
|
|
"loss": 0.0001,
|
|
"step": 1600
|
|
},
|
|
{
|
|
"epoch": 3.4221748400852876,
|
|
"grad_norm": 0.0005845366977155209,
|
|
"learning_rate": 1.5778251599147122e-05,
|
|
"loss": 0.0,
|
|
"step": 1605
|
|
},
|
|
{
|
|
"epoch": 3.4328358208955225,
|
|
"grad_norm": 0.03420143947005272,
|
|
"learning_rate": 1.5671641791044777e-05,
|
|
"loss": 0.0001,
|
|
"step": 1610
|
|
},
|
|
{
|
|
"epoch": 3.443496801705757,
|
|
"grad_norm": 0.0003313858760520816,
|
|
"learning_rate": 1.5565031982942432e-05,
|
|
"loss": 0.0,
|
|
"step": 1615
|
|
},
|
|
{
|
|
"epoch": 3.4541577825159915,
|
|
"grad_norm": 0.0003766450681723654,
|
|
"learning_rate": 1.5458422174840087e-05,
|
|
"loss": 0.0,
|
|
"step": 1620
|
|
},
|
|
{
|
|
"epoch": 3.464818763326226,
|
|
"grad_norm": 0.0003519877209328115,
|
|
"learning_rate": 1.535181236673774e-05,
|
|
"loss": 0.0001,
|
|
"step": 1625
|
|
},
|
|
{
|
|
"epoch": 3.4754797441364604,
|
|
"grad_norm": 0.0016817667055875063,
|
|
"learning_rate": 1.5245202558635396e-05,
|
|
"loss": 0.0002,
|
|
"step": 1630
|
|
},
|
|
{
|
|
"epoch": 3.486140724946695,
|
|
"grad_norm": 0.0003433997626416385,
|
|
"learning_rate": 1.5138592750533051e-05,
|
|
"loss": 0.0,
|
|
"step": 1635
|
|
},
|
|
{
|
|
"epoch": 3.49680170575693,
|
|
"grad_norm": 0.0012473594397306442,
|
|
"learning_rate": 1.5031982942430706e-05,
|
|
"loss": 0.0,
|
|
"step": 1640
|
|
},
|
|
{
|
|
"epoch": 3.5074626865671643,
|
|
"grad_norm": 0.0004658605030272156,
|
|
"learning_rate": 1.4925373134328357e-05,
|
|
"loss": 0.0,
|
|
"step": 1645
|
|
},
|
|
{
|
|
"epoch": 3.518123667377399,
|
|
"grad_norm": 0.0006873384700156748,
|
|
"learning_rate": 1.4818763326226012e-05,
|
|
"loss": 0.0,
|
|
"step": 1650
|
|
},
|
|
{
|
|
"epoch": 3.5287846481876333,
|
|
"grad_norm": 0.02435077726840973,
|
|
"learning_rate": 1.4712153518123666e-05,
|
|
"loss": 0.0001,
|
|
"step": 1655
|
|
},
|
|
{
|
|
"epoch": 3.5394456289978677,
|
|
"grad_norm": 0.0003650671278592199,
|
|
"learning_rate": 1.4605543710021321e-05,
|
|
"loss": 0.0,
|
|
"step": 1660
|
|
},
|
|
{
|
|
"epoch": 3.550106609808102,
|
|
"grad_norm": 0.000336544297169894,
|
|
"learning_rate": 1.4498933901918976e-05,
|
|
"loss": 0.0,
|
|
"step": 1665
|
|
},
|
|
{
|
|
"epoch": 3.5607675906183367,
|
|
"grad_norm": 0.0003511256945785135,
|
|
"learning_rate": 1.439232409381663e-05,
|
|
"loss": 0.0,
|
|
"step": 1670
|
|
},
|
|
{
|
|
"epoch": 3.571428571428571,
|
|
"grad_norm": 0.002605132292956114,
|
|
"learning_rate": 1.4285714285714285e-05,
|
|
"loss": 0.0,
|
|
"step": 1675
|
|
},
|
|
{
|
|
"epoch": 3.582089552238806,
|
|
"grad_norm": 0.0006494534318335354,
|
|
"learning_rate": 1.417910447761194e-05,
|
|
"loss": 0.0001,
|
|
"step": 1680
|
|
},
|
|
{
|
|
"epoch": 3.5927505330490406,
|
|
"grad_norm": 0.00047712321975268424,
|
|
"learning_rate": 1.4072494669509595e-05,
|
|
"loss": 0.0,
|
|
"step": 1685
|
|
},
|
|
{
|
|
"epoch": 3.603411513859275,
|
|
"grad_norm": 0.0006122899940237403,
|
|
"learning_rate": 1.396588486140725e-05,
|
|
"loss": 0.0001,
|
|
"step": 1690
|
|
},
|
|
{
|
|
"epoch": 3.6140724946695095,
|
|
"grad_norm": 0.00035871515865437686,
|
|
"learning_rate": 1.3859275053304904e-05,
|
|
"loss": 0.0,
|
|
"step": 1695
|
|
},
|
|
{
|
|
"epoch": 3.624733475479744,
|
|
"grad_norm": 0.0012908672215417027,
|
|
"learning_rate": 1.3752665245202559e-05,
|
|
"loss": 0.0001,
|
|
"step": 1700
|
|
},
|
|
{
|
|
"epoch": 3.635394456289979,
|
|
"grad_norm": 0.0006462688907049596,
|
|
"learning_rate": 1.3646055437100214e-05,
|
|
"loss": 0.0,
|
|
"step": 1705
|
|
},
|
|
{
|
|
"epoch": 3.6460554371002134,
|
|
"grad_norm": 0.0005004721460863948,
|
|
"learning_rate": 1.3539445628997869e-05,
|
|
"loss": 0.0,
|
|
"step": 1710
|
|
},
|
|
{
|
|
"epoch": 3.656716417910448,
|
|
"grad_norm": 0.0004220051923766732,
|
|
"learning_rate": 1.3432835820895523e-05,
|
|
"loss": 0.0,
|
|
"step": 1715
|
|
},
|
|
{
|
|
"epoch": 3.6673773987206824,
|
|
"grad_norm": 0.0003302539116702974,
|
|
"learning_rate": 1.3326226012793178e-05,
|
|
"loss": 0.0,
|
|
"step": 1720
|
|
},
|
|
{
|
|
"epoch": 3.678038379530917,
|
|
"grad_norm": 0.0010951572330668569,
|
|
"learning_rate": 1.3219616204690833e-05,
|
|
"loss": 0.0,
|
|
"step": 1725
|
|
},
|
|
{
|
|
"epoch": 3.6886993603411513,
|
|
"grad_norm": 0.0003516291035339236,
|
|
"learning_rate": 1.3113006396588488e-05,
|
|
"loss": 0.0,
|
|
"step": 1730
|
|
},
|
|
{
|
|
"epoch": 3.699360341151386,
|
|
"grad_norm": 0.0003780345432460308,
|
|
"learning_rate": 1.3006396588486142e-05,
|
|
"loss": 0.0075,
|
|
"step": 1735
|
|
},
|
|
{
|
|
"epoch": 3.7100213219616203,
|
|
"grad_norm": 0.0003491029201541096,
|
|
"learning_rate": 1.2899786780383797e-05,
|
|
"loss": 0.0,
|
|
"step": 1740
|
|
},
|
|
{
|
|
"epoch": 3.7206823027718547,
|
|
"grad_norm": 0.00036451537744142115,
|
|
"learning_rate": 1.2793176972281452e-05,
|
|
"loss": 0.0,
|
|
"step": 1745
|
|
},
|
|
{
|
|
"epoch": 3.7313432835820897,
|
|
"grad_norm": 0.00037377155967988074,
|
|
"learning_rate": 1.2686567164179105e-05,
|
|
"loss": 0.0,
|
|
"step": 1750
|
|
},
|
|
{
|
|
"epoch": 3.742004264392324,
|
|
"grad_norm": 0.0003593664732761681,
|
|
"learning_rate": 1.257995735607676e-05,
|
|
"loss": 0.0001,
|
|
"step": 1755
|
|
},
|
|
{
|
|
"epoch": 3.7526652452025586,
|
|
"grad_norm": 0.0003434194077271968,
|
|
"learning_rate": 1.2473347547974414e-05,
|
|
"loss": 0.0,
|
|
"step": 1760
|
|
},
|
|
{
|
|
"epoch": 3.763326226012793,
|
|
"grad_norm": 0.0006098838057368994,
|
|
"learning_rate": 1.2366737739872069e-05,
|
|
"loss": 0.0,
|
|
"step": 1765
|
|
},
|
|
{
|
|
"epoch": 3.7739872068230276,
|
|
"grad_norm": 0.00044278433779254556,
|
|
"learning_rate": 1.2260127931769722e-05,
|
|
"loss": 0.0001,
|
|
"step": 1770
|
|
},
|
|
{
|
|
"epoch": 3.7846481876332625,
|
|
"grad_norm": 0.00032219523563981056,
|
|
"learning_rate": 1.2153518123667377e-05,
|
|
"loss": 0.0,
|
|
"step": 1775
|
|
},
|
|
{
|
|
"epoch": 3.795309168443497,
|
|
"grad_norm": 0.00039886031299829483,
|
|
"learning_rate": 1.2046908315565032e-05,
|
|
"loss": 0.0,
|
|
"step": 1780
|
|
},
|
|
{
|
|
"epoch": 3.8059701492537314,
|
|
"grad_norm": 0.0020189781207591295,
|
|
"learning_rate": 1.1940298507462686e-05,
|
|
"loss": 0.0,
|
|
"step": 1785
|
|
},
|
|
{
|
|
"epoch": 3.816631130063966,
|
|
"grad_norm": 0.0003333889471832663,
|
|
"learning_rate": 1.1833688699360341e-05,
|
|
"loss": 0.0001,
|
|
"step": 1790
|
|
},
|
|
{
|
|
"epoch": 3.8272921108742004,
|
|
"grad_norm": 0.0003438134153839201,
|
|
"learning_rate": 1.1727078891257996e-05,
|
|
"loss": 0.0,
|
|
"step": 1795
|
|
},
|
|
{
|
|
"epoch": 3.837953091684435,
|
|
"grad_norm": 0.00035095401108264923,
|
|
"learning_rate": 1.162046908315565e-05,
|
|
"loss": 0.0,
|
|
"step": 1800
|
|
},
|
|
{
|
|
"epoch": 3.8486140724946694,
|
|
"grad_norm": 0.0004062694206368178,
|
|
"learning_rate": 1.1513859275053305e-05,
|
|
"loss": 0.0029,
|
|
"step": 1805
|
|
},
|
|
{
|
|
"epoch": 3.859275053304904,
|
|
"grad_norm": 0.0004444060323294252,
|
|
"learning_rate": 1.140724946695096e-05,
|
|
"loss": 0.0,
|
|
"step": 1810
|
|
},
|
|
{
|
|
"epoch": 3.8699360341151388,
|
|
"grad_norm": 0.001018754206597805,
|
|
"learning_rate": 1.1300639658848615e-05,
|
|
"loss": 0.0017,
|
|
"step": 1815
|
|
},
|
|
{
|
|
"epoch": 3.8805970149253732,
|
|
"grad_norm": 0.0005008568405173719,
|
|
"learning_rate": 1.119402985074627e-05,
|
|
"loss": 0.0,
|
|
"step": 1820
|
|
},
|
|
{
|
|
"epoch": 3.8912579957356077,
|
|
"grad_norm": 0.0005286957020871341,
|
|
"learning_rate": 1.1087420042643924e-05,
|
|
"loss": 0.0,
|
|
"step": 1825
|
|
},
|
|
{
|
|
"epoch": 3.901918976545842,
|
|
"grad_norm": 0.0004799753660336137,
|
|
"learning_rate": 1.0980810234541579e-05,
|
|
"loss": 0.0,
|
|
"step": 1830
|
|
},
|
|
{
|
|
"epoch": 3.9125799573560767,
|
|
"grad_norm": 0.000426744285505265,
|
|
"learning_rate": 1.0874200426439234e-05,
|
|
"loss": 0.0,
|
|
"step": 1835
|
|
},
|
|
{
|
|
"epoch": 3.923240938166311,
|
|
"grad_norm": 0.00032224878668785095,
|
|
"learning_rate": 1.0767590618336887e-05,
|
|
"loss": 0.0,
|
|
"step": 1840
|
|
},
|
|
{
|
|
"epoch": 3.933901918976546,
|
|
"grad_norm": 1.7990508079528809,
|
|
"learning_rate": 1.0660980810234541e-05,
|
|
"loss": 0.0008,
|
|
"step": 1845
|
|
},
|
|
{
|
|
"epoch": 3.9445628997867805,
|
|
"grad_norm": 0.00035311843384988606,
|
|
"learning_rate": 1.0554371002132196e-05,
|
|
"loss": 0.0,
|
|
"step": 1850
|
|
},
|
|
{
|
|
"epoch": 3.955223880597015,
|
|
"grad_norm": 0.004169478081166744,
|
|
"learning_rate": 1.0447761194029851e-05,
|
|
"loss": 0.0,
|
|
"step": 1855
|
|
},
|
|
{
|
|
"epoch": 3.9658848614072495,
|
|
"grad_norm": 0.00043409838690422475,
|
|
"learning_rate": 1.0341151385927506e-05,
|
|
"loss": 0.0001,
|
|
"step": 1860
|
|
},
|
|
{
|
|
"epoch": 3.976545842217484,
|
|
"grad_norm": 0.0003393043880350888,
|
|
"learning_rate": 1.023454157782516e-05,
|
|
"loss": 0.0,
|
|
"step": 1865
|
|
},
|
|
{
|
|
"epoch": 3.9872068230277184,
|
|
"grad_norm": 0.0437081903219223,
|
|
"learning_rate": 1.0127931769722815e-05,
|
|
"loss": 0.0001,
|
|
"step": 1870
|
|
},
|
|
{
|
|
"epoch": 3.997867803837953,
|
|
"grad_norm": 0.01754571497440338,
|
|
"learning_rate": 1.002132196162047e-05,
|
|
"loss": 0.0001,
|
|
"step": 1875
|
|
},
|
|
{
|
|
"epoch": 4.0,
|
|
"eval_accuracy": 0.9829333333333333,
|
|
"eval_loss": 0.09427516162395477,
|
|
"eval_runtime": 16.813,
|
|
"eval_samples_per_second": 223.042,
|
|
"eval_steps_per_second": 7.018,
|
|
"step": 1876
|
|
},
|
|
{
|
|
"epoch": 4.008528784648187,
|
|
"grad_norm": 0.0003474506374914199,
|
|
"learning_rate": 9.914712153518125e-06,
|
|
"loss": 0.0,
|
|
"step": 1880
|
|
},
|
|
{
|
|
"epoch": 4.019189765458422,
|
|
"grad_norm": 0.00033483002334833145,
|
|
"learning_rate": 9.80810234541578e-06,
|
|
"loss": 0.0,
|
|
"step": 1885
|
|
},
|
|
{
|
|
"epoch": 4.029850746268656,
|
|
"grad_norm": 0.0003391946665942669,
|
|
"learning_rate": 9.701492537313434e-06,
|
|
"loss": 0.0,
|
|
"step": 1890
|
|
},
|
|
{
|
|
"epoch": 4.040511727078891,
|
|
"grad_norm": 0.0005056941299699247,
|
|
"learning_rate": 9.594882729211089e-06,
|
|
"loss": 0.0,
|
|
"step": 1895
|
|
},
|
|
{
|
|
"epoch": 4.051172707889126,
|
|
"grad_norm": 0.00033317593624815345,
|
|
"learning_rate": 9.488272921108744e-06,
|
|
"loss": 0.0,
|
|
"step": 1900
|
|
},
|
|
{
|
|
"epoch": 4.061833688699361,
|
|
"grad_norm": 0.00032516277860850096,
|
|
"learning_rate": 9.381663113006398e-06,
|
|
"loss": 0.0,
|
|
"step": 1905
|
|
},
|
|
{
|
|
"epoch": 4.072494669509595,
|
|
"grad_norm": 0.00035746689536608756,
|
|
"learning_rate": 9.275053304904053e-06,
|
|
"loss": 0.0,
|
|
"step": 1910
|
|
},
|
|
{
|
|
"epoch": 4.08315565031983,
|
|
"grad_norm": 0.0003913500695489347,
|
|
"learning_rate": 9.168443496801706e-06,
|
|
"loss": 0.0,
|
|
"step": 1915
|
|
},
|
|
{
|
|
"epoch": 4.093816631130064,
|
|
"grad_norm": 0.0008681740728206933,
|
|
"learning_rate": 9.06183368869936e-06,
|
|
"loss": 0.0,
|
|
"step": 1920
|
|
},
|
|
{
|
|
"epoch": 4.104477611940299,
|
|
"grad_norm": 0.0003178415645379573,
|
|
"learning_rate": 8.955223880597016e-06,
|
|
"loss": 0.0,
|
|
"step": 1925
|
|
},
|
|
{
|
|
"epoch": 4.115138592750533,
|
|
"grad_norm": 0.00032378282048739493,
|
|
"learning_rate": 8.84861407249467e-06,
|
|
"loss": 0.0,
|
|
"step": 1930
|
|
},
|
|
{
|
|
"epoch": 4.1257995735607675,
|
|
"grad_norm": 0.0004350518574938178,
|
|
"learning_rate": 8.742004264392323e-06,
|
|
"loss": 0.0,
|
|
"step": 1935
|
|
},
|
|
{
|
|
"epoch": 4.136460554371002,
|
|
"grad_norm": 0.00033244318910874426,
|
|
"learning_rate": 8.635394456289978e-06,
|
|
"loss": 0.0,
|
|
"step": 1940
|
|
},
|
|
{
|
|
"epoch": 4.1471215351812365,
|
|
"grad_norm": 0.00035036084591411054,
|
|
"learning_rate": 8.528784648187633e-06,
|
|
"loss": 0.0,
|
|
"step": 1945
|
|
},
|
|
{
|
|
"epoch": 4.157782515991471,
|
|
"grad_norm": 0.00031797256087884307,
|
|
"learning_rate": 8.422174840085288e-06,
|
|
"loss": 0.0,
|
|
"step": 1950
|
|
},
|
|
{
|
|
"epoch": 4.1684434968017055,
|
|
"grad_norm": 0.00035185704473406076,
|
|
"learning_rate": 8.315565031982942e-06,
|
|
"loss": 0.0,
|
|
"step": 1955
|
|
},
|
|
{
|
|
"epoch": 4.17910447761194,
|
|
"grad_norm": 0.0004108986759092659,
|
|
"learning_rate": 8.208955223880597e-06,
|
|
"loss": 0.0,
|
|
"step": 1960
|
|
},
|
|
{
|
|
"epoch": 4.189765458422174,
|
|
"grad_norm": 0.00036250168341211975,
|
|
"learning_rate": 8.102345415778252e-06,
|
|
"loss": 0.0,
|
|
"step": 1965
|
|
},
|
|
{
|
|
"epoch": 4.20042643923241,
|
|
"grad_norm": 0.00029882119270041585,
|
|
"learning_rate": 7.995735607675907e-06,
|
|
"loss": 0.0,
|
|
"step": 1970
|
|
},
|
|
{
|
|
"epoch": 4.211087420042644,
|
|
"grad_norm": 0.0003176250320393592,
|
|
"learning_rate": 7.889125799573561e-06,
|
|
"loss": 0.0,
|
|
"step": 1975
|
|
},
|
|
{
|
|
"epoch": 4.221748400852879,
|
|
"grad_norm": 0.00032078297226689756,
|
|
"learning_rate": 7.782515991471216e-06,
|
|
"loss": 0.0,
|
|
"step": 1980
|
|
},
|
|
{
|
|
"epoch": 4.232409381663113,
|
|
"grad_norm": 0.00134346354752779,
|
|
"learning_rate": 7.67590618336887e-06,
|
|
"loss": 0.0,
|
|
"step": 1985
|
|
},
|
|
{
|
|
"epoch": 4.243070362473348,
|
|
"grad_norm": 0.00032086981809698045,
|
|
"learning_rate": 7.5692963752665255e-06,
|
|
"loss": 0.0,
|
|
"step": 1990
|
|
},
|
|
{
|
|
"epoch": 4.253731343283582,
|
|
"grad_norm": 0.00033452690695412457,
|
|
"learning_rate": 7.4626865671641785e-06,
|
|
"loss": 0.0,
|
|
"step": 1995
|
|
},
|
|
{
|
|
"epoch": 4.264392324093817,
|
|
"grad_norm": 0.0003699937660712749,
|
|
"learning_rate": 7.356076759061833e-06,
|
|
"loss": 0.0,
|
|
"step": 2000
|
|
},
|
|
{
|
|
"epoch": 4.275053304904051,
|
|
"grad_norm": 0.0004910146235488355,
|
|
"learning_rate": 7.249466950959488e-06,
|
|
"loss": 0.0001,
|
|
"step": 2005
|
|
},
|
|
{
|
|
"epoch": 4.285714285714286,
|
|
"grad_norm": 0.0008930285694077611,
|
|
"learning_rate": 7.142857142857143e-06,
|
|
"loss": 0.0,
|
|
"step": 2010
|
|
},
|
|
{
|
|
"epoch": 4.29637526652452,
|
|
"grad_norm": 0.0005404684343375266,
|
|
"learning_rate": 7.0362473347547975e-06,
|
|
"loss": 0.0,
|
|
"step": 2015
|
|
},
|
|
{
|
|
"epoch": 4.3070362473347545,
|
|
"grad_norm": 0.0003300030075479299,
|
|
"learning_rate": 6.929637526652452e-06,
|
|
"loss": 0.0,
|
|
"step": 2020
|
|
},
|
|
{
|
|
"epoch": 4.317697228144989,
|
|
"grad_norm": 0.06439344584941864,
|
|
"learning_rate": 6.823027718550107e-06,
|
|
"loss": 0.0024,
|
|
"step": 2025
|
|
},
|
|
{
|
|
"epoch": 4.3283582089552235,
|
|
"grad_norm": 0.0003307462902739644,
|
|
"learning_rate": 6.716417910447762e-06,
|
|
"loss": 0.0,
|
|
"step": 2030
|
|
},
|
|
{
|
|
"epoch": 4.339019189765459,
|
|
"grad_norm": 0.00046611748985014856,
|
|
"learning_rate": 6.609808102345416e-06,
|
|
"loss": 0.0,
|
|
"step": 2035
|
|
},
|
|
{
|
|
"epoch": 4.349680170575693,
|
|
"grad_norm": 0.00035484781255945563,
|
|
"learning_rate": 6.503198294243071e-06,
|
|
"loss": 0.0,
|
|
"step": 2040
|
|
},
|
|
{
|
|
"epoch": 4.360341151385928,
|
|
"grad_norm": 0.0012688018614426255,
|
|
"learning_rate": 6.396588486140726e-06,
|
|
"loss": 0.0,
|
|
"step": 2045
|
|
},
|
|
{
|
|
"epoch": 4.371002132196162,
|
|
"grad_norm": 0.0003202289517503232,
|
|
"learning_rate": 6.28997867803838e-06,
|
|
"loss": 0.0,
|
|
"step": 2050
|
|
},
|
|
{
|
|
"epoch": 4.381663113006397,
|
|
"grad_norm": 0.00032230105716735125,
|
|
"learning_rate": 6.1833688699360345e-06,
|
|
"loss": 0.0,
|
|
"step": 2055
|
|
},
|
|
{
|
|
"epoch": 4.392324093816631,
|
|
"grad_norm": 0.00043376052053645253,
|
|
"learning_rate": 6.076759061833688e-06,
|
|
"loss": 0.0,
|
|
"step": 2060
|
|
},
|
|
{
|
|
"epoch": 4.402985074626866,
|
|
"grad_norm": 0.00033606207580305636,
|
|
"learning_rate": 5.970149253731343e-06,
|
|
"loss": 0.0,
|
|
"step": 2065
|
|
},
|
|
{
|
|
"epoch": 4.4136460554371,
|
|
"grad_norm": 0.000301430030958727,
|
|
"learning_rate": 5.863539445628998e-06,
|
|
"loss": 0.0,
|
|
"step": 2070
|
|
},
|
|
{
|
|
"epoch": 4.424307036247335,
|
|
"grad_norm": 0.0006231646402738988,
|
|
"learning_rate": 5.756929637526653e-06,
|
|
"loss": 0.0,
|
|
"step": 2075
|
|
},
|
|
{
|
|
"epoch": 4.434968017057569,
|
|
"grad_norm": 0.0005915539804846048,
|
|
"learning_rate": 5.650319829424307e-06,
|
|
"loss": 0.0,
|
|
"step": 2080
|
|
},
|
|
{
|
|
"epoch": 4.445628997867804,
|
|
"grad_norm": 0.00030837932717986405,
|
|
"learning_rate": 5.543710021321962e-06,
|
|
"loss": 0.0,
|
|
"step": 2085
|
|
},
|
|
{
|
|
"epoch": 4.456289978678038,
|
|
"grad_norm": 0.0003757126396521926,
|
|
"learning_rate": 5.437100213219617e-06,
|
|
"loss": 0.0,
|
|
"step": 2090
|
|
},
|
|
{
|
|
"epoch": 4.466950959488273,
|
|
"grad_norm": 0.0028115608729422092,
|
|
"learning_rate": 5.330490405117271e-06,
|
|
"loss": 0.0,
|
|
"step": 2095
|
|
},
|
|
{
|
|
"epoch": 4.477611940298507,
|
|
"grad_norm": 0.00030523879104293883,
|
|
"learning_rate": 5.2238805970149255e-06,
|
|
"loss": 0.0,
|
|
"step": 2100
|
|
},
|
|
{
|
|
"epoch": 4.4882729211087415,
|
|
"grad_norm": 0.0037852898240089417,
|
|
"learning_rate": 5.11727078891258e-06,
|
|
"loss": 0.0,
|
|
"step": 2105
|
|
},
|
|
{
|
|
"epoch": 4.498933901918977,
|
|
"grad_norm": 0.0003032777167391032,
|
|
"learning_rate": 5.010660980810235e-06,
|
|
"loss": 0.0,
|
|
"step": 2110
|
|
},
|
|
{
|
|
"epoch": 4.509594882729211,
|
|
"grad_norm": 0.0003362498537171632,
|
|
"learning_rate": 4.90405117270789e-06,
|
|
"loss": 0.0,
|
|
"step": 2115
|
|
},
|
|
{
|
|
"epoch": 4.520255863539446,
|
|
"grad_norm": 0.00030883800354786217,
|
|
"learning_rate": 4.797441364605544e-06,
|
|
"loss": 0.0,
|
|
"step": 2120
|
|
},
|
|
{
|
|
"epoch": 4.53091684434968,
|
|
"grad_norm": 0.0011556103127077222,
|
|
"learning_rate": 4.690831556503199e-06,
|
|
"loss": 0.0,
|
|
"step": 2125
|
|
},
|
|
{
|
|
"epoch": 4.541577825159915,
|
|
"grad_norm": 0.0004417916643433273,
|
|
"learning_rate": 4.584221748400853e-06,
|
|
"loss": 0.0,
|
|
"step": 2130
|
|
},
|
|
{
|
|
"epoch": 4.552238805970149,
|
|
"grad_norm": 0.0003299421805422753,
|
|
"learning_rate": 4.477611940298508e-06,
|
|
"loss": 0.0,
|
|
"step": 2135
|
|
},
|
|
{
|
|
"epoch": 4.562899786780384,
|
|
"grad_norm": 0.0003240181540604681,
|
|
"learning_rate": 4.371002132196162e-06,
|
|
"loss": 0.0,
|
|
"step": 2140
|
|
},
|
|
{
|
|
"epoch": 4.573560767590618,
|
|
"grad_norm": 0.000628639361821115,
|
|
"learning_rate": 4.264392324093816e-06,
|
|
"loss": 0.0,
|
|
"step": 2145
|
|
},
|
|
{
|
|
"epoch": 4.584221748400853,
|
|
"grad_norm": 0.0003082882903981954,
|
|
"learning_rate": 4.157782515991471e-06,
|
|
"loss": 0.0,
|
|
"step": 2150
|
|
},
|
|
{
|
|
"epoch": 4.594882729211087,
|
|
"grad_norm": 0.00030859513208270073,
|
|
"learning_rate": 4.051172707889126e-06,
|
|
"loss": 0.0,
|
|
"step": 2155
|
|
},
|
|
{
|
|
"epoch": 4.605543710021322,
|
|
"grad_norm": 0.0003443580062594265,
|
|
"learning_rate": 3.944562899786781e-06,
|
|
"loss": 0.0,
|
|
"step": 2160
|
|
},
|
|
{
|
|
"epoch": 4.616204690831556,
|
|
"grad_norm": 0.0015460433205589652,
|
|
"learning_rate": 3.837953091684435e-06,
|
|
"loss": 0.0,
|
|
"step": 2165
|
|
},
|
|
{
|
|
"epoch": 4.6268656716417915,
|
|
"grad_norm": 0.0014668882358819246,
|
|
"learning_rate": 3.7313432835820893e-06,
|
|
"loss": 0.0,
|
|
"step": 2170
|
|
},
|
|
{
|
|
"epoch": 4.637526652452026,
|
|
"grad_norm": 0.00031552041764371097,
|
|
"learning_rate": 3.624733475479744e-06,
|
|
"loss": 0.0,
|
|
"step": 2175
|
|
},
|
|
{
|
|
"epoch": 4.6481876332622605,
|
|
"grad_norm": 0.00031335154199041426,
|
|
"learning_rate": 3.5181236673773987e-06,
|
|
"loss": 0.0,
|
|
"step": 2180
|
|
},
|
|
{
|
|
"epoch": 4.658848614072495,
|
|
"grad_norm": 0.00035802609636448324,
|
|
"learning_rate": 3.4115138592750535e-06,
|
|
"loss": 0.0,
|
|
"step": 2185
|
|
},
|
|
{
|
|
"epoch": 4.669509594882729,
|
|
"grad_norm": 0.0003693062753882259,
|
|
"learning_rate": 3.304904051172708e-06,
|
|
"loss": 0.0,
|
|
"step": 2190
|
|
},
|
|
{
|
|
"epoch": 4.680170575692964,
|
|
"grad_norm": 0.00030029003391973674,
|
|
"learning_rate": 3.198294243070363e-06,
|
|
"loss": 0.0,
|
|
"step": 2195
|
|
},
|
|
{
|
|
"epoch": 4.690831556503198,
|
|
"grad_norm": 0.005750637035816908,
|
|
"learning_rate": 3.0916844349680173e-06,
|
|
"loss": 0.0,
|
|
"step": 2200
|
|
},
|
|
{
|
|
"epoch": 4.701492537313433,
|
|
"grad_norm": 0.000397185591282323,
|
|
"learning_rate": 2.9850746268656716e-06,
|
|
"loss": 0.0,
|
|
"step": 2205
|
|
},
|
|
{
|
|
"epoch": 4.712153518123667,
|
|
"grad_norm": 0.0004056187462992966,
|
|
"learning_rate": 2.8784648187633263e-06,
|
|
"loss": 0.0,
|
|
"step": 2210
|
|
},
|
|
{
|
|
"epoch": 4.722814498933902,
|
|
"grad_norm": 0.1374378502368927,
|
|
"learning_rate": 2.771855010660981e-06,
|
|
"loss": 0.0071,
|
|
"step": 2215
|
|
},
|
|
{
|
|
"epoch": 4.733475479744136,
|
|
"grad_norm": 0.0003161082568112761,
|
|
"learning_rate": 2.6652452025586354e-06,
|
|
"loss": 0.0,
|
|
"step": 2220
|
|
},
|
|
{
|
|
"epoch": 4.744136460554371,
|
|
"grad_norm": 0.00040993737638927996,
|
|
"learning_rate": 2.55863539445629e-06,
|
|
"loss": 0.0,
|
|
"step": 2225
|
|
},
|
|
{
|
|
"epoch": 4.754797441364605,
|
|
"grad_norm": 0.0005938154645264149,
|
|
"learning_rate": 2.452025586353945e-06,
|
|
"loss": 0.0,
|
|
"step": 2230
|
|
},
|
|
{
|
|
"epoch": 4.76545842217484,
|
|
"grad_norm": 0.00044926462578587234,
|
|
"learning_rate": 2.3454157782515996e-06,
|
|
"loss": 0.0,
|
|
"step": 2235
|
|
},
|
|
{
|
|
"epoch": 4.776119402985074,
|
|
"grad_norm": 0.0005122682196088135,
|
|
"learning_rate": 2.238805970149254e-06,
|
|
"loss": 0.0,
|
|
"step": 2240
|
|
},
|
|
{
|
|
"epoch": 4.786780383795309,
|
|
"grad_norm": 0.0003136317536700517,
|
|
"learning_rate": 2.132196162046908e-06,
|
|
"loss": 0.0,
|
|
"step": 2245
|
|
},
|
|
{
|
|
"epoch": 4.797441364605544,
|
|
"grad_norm": 0.00031330782803706825,
|
|
"learning_rate": 2.025586353944563e-06,
|
|
"loss": 0.0,
|
|
"step": 2250
|
|
},
|
|
{
|
|
"epoch": 4.8081023454157785,
|
|
"grad_norm": 0.0003872371162287891,
|
|
"learning_rate": 1.9189765458422177e-06,
|
|
"loss": 0.0,
|
|
"step": 2255
|
|
},
|
|
{
|
|
"epoch": 4.818763326226013,
|
|
"grad_norm": 0.00034078778116963804,
|
|
"learning_rate": 1.812366737739872e-06,
|
|
"loss": 0.0,
|
|
"step": 2260
|
|
},
|
|
{
|
|
"epoch": 4.8294243070362475,
|
|
"grad_norm": 0.0003631815197877586,
|
|
"learning_rate": 1.7057569296375267e-06,
|
|
"loss": 0.0,
|
|
"step": 2265
|
|
},
|
|
{
|
|
"epoch": 4.840085287846482,
|
|
"grad_norm": 0.0003130300319753587,
|
|
"learning_rate": 1.5991471215351815e-06,
|
|
"loss": 0.0,
|
|
"step": 2270
|
|
},
|
|
{
|
|
"epoch": 4.850746268656716,
|
|
"grad_norm": 0.0004666637978516519,
|
|
"learning_rate": 1.4925373134328358e-06,
|
|
"loss": 0.0,
|
|
"step": 2275
|
|
},
|
|
{
|
|
"epoch": 4.861407249466951,
|
|
"grad_norm": 0.00030576219432987273,
|
|
"learning_rate": 1.3859275053304905e-06,
|
|
"loss": 0.0,
|
|
"step": 2280
|
|
},
|
|
{
|
|
"epoch": 4.872068230277185,
|
|
"grad_norm": 0.0003064055345021188,
|
|
"learning_rate": 1.279317697228145e-06,
|
|
"loss": 0.0,
|
|
"step": 2285
|
|
},
|
|
{
|
|
"epoch": 4.88272921108742,
|
|
"grad_norm": 0.0003266089188400656,
|
|
"learning_rate": 1.1727078891257998e-06,
|
|
"loss": 0.0,
|
|
"step": 2290
|
|
},
|
|
{
|
|
"epoch": 4.893390191897654,
|
|
"grad_norm": 0.015440658666193485,
|
|
"learning_rate": 1.066098081023454e-06,
|
|
"loss": 0.0024,
|
|
"step": 2295
|
|
},
|
|
{
|
|
"epoch": 4.904051172707889,
|
|
"grad_norm": 0.0012533918488770723,
|
|
"learning_rate": 9.594882729211088e-07,
|
|
"loss": 0.0,
|
|
"step": 2300
|
|
},
|
|
{
|
|
"epoch": 4.914712153518123,
|
|
"grad_norm": 0.0003069853992201388,
|
|
"learning_rate": 8.528784648187634e-07,
|
|
"loss": 0.0,
|
|
"step": 2305
|
|
},
|
|
{
|
|
"epoch": 4.925373134328359,
|
|
"grad_norm": 0.00034220030647702515,
|
|
"learning_rate": 7.462686567164179e-07,
|
|
"loss": 0.0,
|
|
"step": 2310
|
|
},
|
|
{
|
|
"epoch": 4.936034115138593,
|
|
"grad_norm": 0.00047357421135529876,
|
|
"learning_rate": 6.396588486140725e-07,
|
|
"loss": 0.0,
|
|
"step": 2315
|
|
},
|
|
{
|
|
"epoch": 4.946695095948828,
|
|
"grad_norm": 0.0003225810651201755,
|
|
"learning_rate": 5.33049040511727e-07,
|
|
"loss": 0.0,
|
|
"step": 2320
|
|
},
|
|
{
|
|
"epoch": 4.957356076759062,
|
|
"grad_norm": 0.00030902717844583094,
|
|
"learning_rate": 4.264392324093817e-07,
|
|
"loss": 0.0,
|
|
"step": 2325
|
|
},
|
|
{
|
|
"epoch": 4.968017057569297,
|
|
"grad_norm": 0.00034685974242165685,
|
|
"learning_rate": 3.1982942430703626e-07,
|
|
"loss": 0.0,
|
|
"step": 2330
|
|
},
|
|
{
|
|
"epoch": 4.978678038379531,
|
|
"grad_norm": 0.000922052247915417,
|
|
"learning_rate": 2.1321961620469084e-07,
|
|
"loss": 0.0,
|
|
"step": 2335
|
|
},
|
|
{
|
|
"epoch": 4.9893390191897655,
|
|
"grad_norm": 0.0003126551164314151,
|
|
"learning_rate": 1.0660980810234542e-07,
|
|
"loss": 0.0,
|
|
"step": 2340
|
|
},
|
|
{
|
|
"epoch": 5.0,
|
|
"grad_norm": 0.0003404345770832151,
|
|
"learning_rate": 0.0,
|
|
"loss": 0.0,
|
|
"step": 2345
|
|
},
|
|
{
|
|
"epoch": 5.0,
|
|
"eval_accuracy": 0.9826666666666667,
|
|
"eval_loss": 0.09397382289171219,
|
|
"eval_runtime": 16.5788,
|
|
"eval_samples_per_second": 226.192,
|
|
"eval_steps_per_second": 7.118,
|
|
"step": 2345
|
|
},
|
|
{
|
|
"epoch": 5.0,
|
|
"step": 2345,
|
|
"total_flos": 5.8118992210944e+18,
|
|
"train_loss": 0.0029948701554321565,
|
|
"train_runtime": 796.4718,
|
|
"train_samples_per_second": 94.165,
|
|
"train_steps_per_second": 2.944
|
|
}
|
|
],
|
|
"logging_steps": 5,
|
|
"max_steps": 2345,
|
|
"num_input_tokens_seen": 0,
|
|
"num_train_epochs": 5,
|
|
"save_steps": 500,
|
|
"stateful_callbacks": {
|
|
"TrainerControl": {
|
|
"args": {
|
|
"should_epoch_stop": false,
|
|
"should_evaluate": false,
|
|
"should_log": false,
|
|
"should_save": true,
|
|
"should_training_stop": true
|
|
},
|
|
"attributes": {}
|
|
}
|
|
},
|
|
"total_flos": 5.8118992210944e+18,
|
|
"train_batch_size": 32,
|
|
"trial_name": null,
|
|
"trial_params": null
|
|
}
|
|
|