|
{ |
|
"best_metric": 0.038588594645261765, |
|
"best_model_checkpoint": "vit_epochs5_batch32_lr5e-05_size224_tiles2_seed1_q3_DA/checkpoint-2345", |
|
"epoch": 5.0, |
|
"eval_steps": 500, |
|
"global_step": 2345, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.010660980810234541, |
|
"grad_norm": 2.8042922019958496, |
|
"learning_rate": 4.989339019189766e-05, |
|
"loss": 0.5425, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.021321961620469083, |
|
"grad_norm": 1.9163143634796143, |
|
"learning_rate": 4.978678038379531e-05, |
|
"loss": 0.3581, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.031982942430703626, |
|
"grad_norm": 1.1642439365386963, |
|
"learning_rate": 4.9680170575692967e-05, |
|
"loss": 0.2258, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.042643923240938165, |
|
"grad_norm": 1.3562175035476685, |
|
"learning_rate": 4.957356076759062e-05, |
|
"loss": 0.203, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.053304904051172705, |
|
"grad_norm": 3.130192279815674, |
|
"learning_rate": 4.9466950959488276e-05, |
|
"loss": 0.1708, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.06396588486140725, |
|
"grad_norm": 1.6631479263305664, |
|
"learning_rate": 4.936034115138593e-05, |
|
"loss": 0.144, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.07462686567164178, |
|
"grad_norm": 5.913661479949951, |
|
"learning_rate": 4.9253731343283586e-05, |
|
"loss": 0.2868, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.08528784648187633, |
|
"grad_norm": 0.66987144947052, |
|
"learning_rate": 4.914712153518124e-05, |
|
"loss": 0.1727, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.09594882729211088, |
|
"grad_norm": 2.169769048690796, |
|
"learning_rate": 4.904051172707889e-05, |
|
"loss": 0.174, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.10660980810234541, |
|
"grad_norm": 1.001565933227539, |
|
"learning_rate": 4.893390191897655e-05, |
|
"loss": 0.1249, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.11727078891257996, |
|
"grad_norm": 0.36165979504585266, |
|
"learning_rate": 4.88272921108742e-05, |
|
"loss": 0.1297, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.1279317697228145, |
|
"grad_norm": 4.5865888595581055, |
|
"learning_rate": 4.872068230277186e-05, |
|
"loss": 0.1437, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.13859275053304904, |
|
"grad_norm": 1.7689142227172852, |
|
"learning_rate": 4.861407249466951e-05, |
|
"loss": 0.1153, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.14925373134328357, |
|
"grad_norm": 1.0855820178985596, |
|
"learning_rate": 4.850746268656717e-05, |
|
"loss": 0.0845, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.15991471215351813, |
|
"grad_norm": 1.0116578340530396, |
|
"learning_rate": 4.840085287846482e-05, |
|
"loss": 0.144, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.17057569296375266, |
|
"grad_norm": 0.8972574472427368, |
|
"learning_rate": 4.829424307036248e-05, |
|
"loss": 0.1042, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.1812366737739872, |
|
"grad_norm": 2.029723644256592, |
|
"learning_rate": 4.8187633262260126e-05, |
|
"loss": 0.1129, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.19189765458422176, |
|
"grad_norm": 1.2260494232177734, |
|
"learning_rate": 4.808102345415779e-05, |
|
"loss": 0.0505, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.2025586353944563, |
|
"grad_norm": 0.19438716769218445, |
|
"learning_rate": 4.7974413646055436e-05, |
|
"loss": 0.1133, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.21321961620469082, |
|
"grad_norm": 0.6091874241828918, |
|
"learning_rate": 4.78678038379531e-05, |
|
"loss": 0.1099, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.22388059701492538, |
|
"grad_norm": 2.3513033390045166, |
|
"learning_rate": 4.7761194029850745e-05, |
|
"loss": 0.0642, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.2345415778251599, |
|
"grad_norm": 4.342105388641357, |
|
"learning_rate": 4.765458422174841e-05, |
|
"loss": 0.1093, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.24520255863539445, |
|
"grad_norm": 0.46392691135406494, |
|
"learning_rate": 4.7547974413646055e-05, |
|
"loss": 0.0676, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.255863539445629, |
|
"grad_norm": 2.171640634536743, |
|
"learning_rate": 4.7441364605543716e-05, |
|
"loss": 0.0439, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.26652452025586354, |
|
"grad_norm": 5.224710464477539, |
|
"learning_rate": 4.7334754797441364e-05, |
|
"loss": 0.109, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.2771855010660981, |
|
"grad_norm": 0.9319650530815125, |
|
"learning_rate": 4.7228144989339026e-05, |
|
"loss": 0.0552, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.2878464818763326, |
|
"grad_norm": 2.031883478164673, |
|
"learning_rate": 4.7121535181236674e-05, |
|
"loss": 0.1078, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.29850746268656714, |
|
"grad_norm": 0.20140865445137024, |
|
"learning_rate": 4.7014925373134335e-05, |
|
"loss": 0.0724, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.3091684434968017, |
|
"grad_norm": 2.303995370864868, |
|
"learning_rate": 4.690831556503198e-05, |
|
"loss": 0.0992, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.31982942430703626, |
|
"grad_norm": 2.2379705905914307, |
|
"learning_rate": 4.6801705756929645e-05, |
|
"loss": 0.0713, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.3304904051172708, |
|
"grad_norm": 4.74453067779541, |
|
"learning_rate": 4.669509594882729e-05, |
|
"loss": 0.1487, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.3411513859275053, |
|
"grad_norm": 2.5837318897247314, |
|
"learning_rate": 4.658848614072495e-05, |
|
"loss": 0.0929, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.35181236673773986, |
|
"grad_norm": 0.9192020297050476, |
|
"learning_rate": 4.64818763326226e-05, |
|
"loss": 0.0501, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.3624733475479744, |
|
"grad_norm": 3.86073637008667, |
|
"learning_rate": 4.637526652452026e-05, |
|
"loss": 0.1307, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.373134328358209, |
|
"grad_norm": 4.664398193359375, |
|
"learning_rate": 4.626865671641791e-05, |
|
"loss": 0.115, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.3837953091684435, |
|
"grad_norm": 2.7233195304870605, |
|
"learning_rate": 4.6162046908315566e-05, |
|
"loss": 0.1197, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.39445628997867804, |
|
"grad_norm": 9.226214408874512, |
|
"learning_rate": 4.605543710021322e-05, |
|
"loss": 0.0938, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.4051172707889126, |
|
"grad_norm": 1.4643452167510986, |
|
"learning_rate": 4.5948827292110876e-05, |
|
"loss": 0.0717, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.4157782515991471, |
|
"grad_norm": 0.6635888814926147, |
|
"learning_rate": 4.584221748400853e-05, |
|
"loss": 0.1031, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.42643923240938164, |
|
"grad_norm": 2.135951519012451, |
|
"learning_rate": 4.5735607675906185e-05, |
|
"loss": 0.1272, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.43710021321961623, |
|
"grad_norm": 2.27439284324646, |
|
"learning_rate": 4.562899786780384e-05, |
|
"loss": 0.1153, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.44776119402985076, |
|
"grad_norm": 4.246057033538818, |
|
"learning_rate": 4.5522388059701495e-05, |
|
"loss": 0.1092, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.4584221748400853, |
|
"grad_norm": 1.4236388206481934, |
|
"learning_rate": 4.541577825159915e-05, |
|
"loss": 0.0761, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.4690831556503198, |
|
"grad_norm": 2.014106035232544, |
|
"learning_rate": 4.5309168443496804e-05, |
|
"loss": 0.1245, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.47974413646055436, |
|
"grad_norm": 1.5154823064804077, |
|
"learning_rate": 4.520255863539446e-05, |
|
"loss": 0.1176, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.4904051172707889, |
|
"grad_norm": 2.6291708946228027, |
|
"learning_rate": 4.5095948827292114e-05, |
|
"loss": 0.0835, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.5010660980810234, |
|
"grad_norm": 3.739208698272705, |
|
"learning_rate": 4.498933901918977e-05, |
|
"loss": 0.1548, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.511727078891258, |
|
"grad_norm": 1.9443986415863037, |
|
"learning_rate": 4.488272921108742e-05, |
|
"loss": 0.0861, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.5223880597014925, |
|
"grad_norm": 0.17805545032024384, |
|
"learning_rate": 4.477611940298508e-05, |
|
"loss": 0.0995, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.5330490405117271, |
|
"grad_norm": 0.7198059558868408, |
|
"learning_rate": 4.466950959488273e-05, |
|
"loss": 0.0787, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.5437100213219617, |
|
"grad_norm": 1.8614362478256226, |
|
"learning_rate": 4.456289978678039e-05, |
|
"loss": 0.098, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.5543710021321961, |
|
"grad_norm": 1.5462026596069336, |
|
"learning_rate": 4.445628997867804e-05, |
|
"loss": 0.1496, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.5650319829424307, |
|
"grad_norm": 0.5799494385719299, |
|
"learning_rate": 4.43496801705757e-05, |
|
"loss": 0.0584, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.5756929637526652, |
|
"grad_norm": 3.18220853805542, |
|
"learning_rate": 4.424307036247335e-05, |
|
"loss": 0.1034, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.5863539445628998, |
|
"grad_norm": 4.199785232543945, |
|
"learning_rate": 4.4136460554371006e-05, |
|
"loss": 0.0639, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.5970149253731343, |
|
"grad_norm": 2.5037028789520264, |
|
"learning_rate": 4.402985074626866e-05, |
|
"loss": 0.0634, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.6076759061833689, |
|
"grad_norm": 0.2935068905353546, |
|
"learning_rate": 4.3923240938166316e-05, |
|
"loss": 0.0601, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.6183368869936035, |
|
"grad_norm": 12.32214641571045, |
|
"learning_rate": 4.381663113006397e-05, |
|
"loss": 0.0861, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.6289978678038379, |
|
"grad_norm": 1.500057578086853, |
|
"learning_rate": 4.3710021321961625e-05, |
|
"loss": 0.1321, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.6396588486140725, |
|
"grad_norm": 1.154998540878296, |
|
"learning_rate": 4.360341151385928e-05, |
|
"loss": 0.0551, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.650319829424307, |
|
"grad_norm": 2.5112106800079346, |
|
"learning_rate": 4.3496801705756935e-05, |
|
"loss": 0.119, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.6609808102345416, |
|
"grad_norm": 0.4132208228111267, |
|
"learning_rate": 4.339019189765459e-05, |
|
"loss": 0.0777, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.6716417910447762, |
|
"grad_norm": 1.6362305879592896, |
|
"learning_rate": 4.328358208955224e-05, |
|
"loss": 0.0739, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.6823027718550106, |
|
"grad_norm": 4.30403470993042, |
|
"learning_rate": 4.31769722814499e-05, |
|
"loss": 0.079, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.6929637526652452, |
|
"grad_norm": 0.5877914428710938, |
|
"learning_rate": 4.307036247334755e-05, |
|
"loss": 0.0861, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.7036247334754797, |
|
"grad_norm": 3.309394121170044, |
|
"learning_rate": 4.29637526652452e-05, |
|
"loss": 0.1159, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.7142857142857143, |
|
"grad_norm": 4.541442394256592, |
|
"learning_rate": 4.2857142857142856e-05, |
|
"loss": 0.0843, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.7249466950959488, |
|
"grad_norm": 1.9757214784622192, |
|
"learning_rate": 4.275053304904051e-05, |
|
"loss": 0.1209, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.7356076759061834, |
|
"grad_norm": 4.3746442794799805, |
|
"learning_rate": 4.2643923240938166e-05, |
|
"loss": 0.1134, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.746268656716418, |
|
"grad_norm": 4.333628177642822, |
|
"learning_rate": 4.253731343283582e-05, |
|
"loss": 0.0663, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.7569296375266524, |
|
"grad_norm": 1.4036465883255005, |
|
"learning_rate": 4.2430703624733475e-05, |
|
"loss": 0.0754, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.767590618336887, |
|
"grad_norm": 0.24103765189647675, |
|
"learning_rate": 4.232409381663113e-05, |
|
"loss": 0.0795, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.7782515991471215, |
|
"grad_norm": 0.09186755120754242, |
|
"learning_rate": 4.2217484008528785e-05, |
|
"loss": 0.0775, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.7889125799573561, |
|
"grad_norm": 2.562626600265503, |
|
"learning_rate": 4.211087420042644e-05, |
|
"loss": 0.0784, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.7995735607675906, |
|
"grad_norm": 2.9715356826782227, |
|
"learning_rate": 4.2004264392324094e-05, |
|
"loss": 0.0602, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.8102345415778252, |
|
"grad_norm": 2.5202226638793945, |
|
"learning_rate": 4.189765458422175e-05, |
|
"loss": 0.0609, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.8208955223880597, |
|
"grad_norm": 5.89683198928833, |
|
"learning_rate": 4.1791044776119404e-05, |
|
"loss": 0.1166, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.8315565031982942, |
|
"grad_norm": 3.7403831481933594, |
|
"learning_rate": 4.168443496801706e-05, |
|
"loss": 0.1236, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.8422174840085288, |
|
"grad_norm": 0.4880005419254303, |
|
"learning_rate": 4.157782515991471e-05, |
|
"loss": 0.0725, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.8528784648187633, |
|
"grad_norm": 6.083738803863525, |
|
"learning_rate": 4.147121535181237e-05, |
|
"loss": 0.0646, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.8635394456289979, |
|
"grad_norm": 6.959341049194336, |
|
"learning_rate": 4.136460554371002e-05, |
|
"loss": 0.1293, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.8742004264392325, |
|
"grad_norm": 1.0812244415283203, |
|
"learning_rate": 4.125799573560768e-05, |
|
"loss": 0.0679, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.8848614072494669, |
|
"grad_norm": 0.18147622048854828, |
|
"learning_rate": 4.115138592750533e-05, |
|
"loss": 0.0616, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.8955223880597015, |
|
"grad_norm": 0.20879709720611572, |
|
"learning_rate": 4.104477611940299e-05, |
|
"loss": 0.0679, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.906183368869936, |
|
"grad_norm": 2.961556911468506, |
|
"learning_rate": 4.093816631130064e-05, |
|
"loss": 0.0701, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.9168443496801706, |
|
"grad_norm": 1.1815117597579956, |
|
"learning_rate": 4.0831556503198296e-05, |
|
"loss": 0.0342, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.9275053304904051, |
|
"grad_norm": 0.8347243666648865, |
|
"learning_rate": 4.072494669509595e-05, |
|
"loss": 0.052, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.9381663113006397, |
|
"grad_norm": 2.859471321105957, |
|
"learning_rate": 4.0618336886993606e-05, |
|
"loss": 0.0977, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.9488272921108742, |
|
"grad_norm": 2.672069549560547, |
|
"learning_rate": 4.051172707889126e-05, |
|
"loss": 0.0503, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.9594882729211087, |
|
"grad_norm": 3.173109769821167, |
|
"learning_rate": 4.0405117270788915e-05, |
|
"loss": 0.1618, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.9701492537313433, |
|
"grad_norm": 1.0781461000442505, |
|
"learning_rate": 4.029850746268657e-05, |
|
"loss": 0.1305, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.9808102345415778, |
|
"grad_norm": 4.129727840423584, |
|
"learning_rate": 4.0191897654584225e-05, |
|
"loss": 0.0723, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.9914712153518124, |
|
"grad_norm": 0.6592491269111633, |
|
"learning_rate": 4.008528784648188e-05, |
|
"loss": 0.0572, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.9858666666666667, |
|
"eval_loss": 0.04294070601463318, |
|
"eval_runtime": 19.7981, |
|
"eval_samples_per_second": 189.412, |
|
"eval_steps_per_second": 5.96, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 1.0021321961620469, |
|
"grad_norm": 1.636838436126709, |
|
"learning_rate": 3.997867803837953e-05, |
|
"loss": 0.0539, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.0127931769722816, |
|
"grad_norm": 0.18990933895111084, |
|
"learning_rate": 3.987206823027719e-05, |
|
"loss": 0.0649, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.023454157782516, |
|
"grad_norm": 0.7702750563621521, |
|
"learning_rate": 3.976545842217484e-05, |
|
"loss": 0.059, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.0341151385927505, |
|
"grad_norm": 3.786081314086914, |
|
"learning_rate": 3.96588486140725e-05, |
|
"loss": 0.077, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.044776119402985, |
|
"grad_norm": 2.2191529273986816, |
|
"learning_rate": 3.9552238805970146e-05, |
|
"loss": 0.1533, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.0554371002132197, |
|
"grad_norm": 4.093388557434082, |
|
"learning_rate": 3.944562899786781e-05, |
|
"loss": 0.0302, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.0660980810234542, |
|
"grad_norm": 0.14937493205070496, |
|
"learning_rate": 3.9339019189765456e-05, |
|
"loss": 0.0485, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.0767590618336886, |
|
"grad_norm": 4.016363143920898, |
|
"learning_rate": 3.923240938166312e-05, |
|
"loss": 0.0727, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.0874200426439233, |
|
"grad_norm": 2.2727200984954834, |
|
"learning_rate": 3.9125799573560765e-05, |
|
"loss": 0.0934, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.0980810234541578, |
|
"grad_norm": 0.23455174267292023, |
|
"learning_rate": 3.901918976545843e-05, |
|
"loss": 0.0556, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.1087420042643923, |
|
"grad_norm": 4.3866119384765625, |
|
"learning_rate": 3.8912579957356075e-05, |
|
"loss": 0.0538, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.1194029850746268, |
|
"grad_norm": 0.15548841655254364, |
|
"learning_rate": 3.8805970149253736e-05, |
|
"loss": 0.0584, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.1300639658848615, |
|
"grad_norm": 0.5124216079711914, |
|
"learning_rate": 3.8699360341151384e-05, |
|
"loss": 0.0587, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.140724946695096, |
|
"grad_norm": 3.2346091270446777, |
|
"learning_rate": 3.8592750533049046e-05, |
|
"loss": 0.0642, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.1513859275053304, |
|
"grad_norm": 3.773242235183716, |
|
"learning_rate": 3.8486140724946694e-05, |
|
"loss": 0.0808, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.1620469083155651, |
|
"grad_norm": 1.0450078248977661, |
|
"learning_rate": 3.8379530916844355e-05, |
|
"loss": 0.0969, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.1727078891257996, |
|
"grad_norm": 5.721042633056641, |
|
"learning_rate": 3.8272921108742e-05, |
|
"loss": 0.0979, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.183368869936034, |
|
"grad_norm": 1.990458369255066, |
|
"learning_rate": 3.8166311300639665e-05, |
|
"loss": 0.0453, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.1940298507462686, |
|
"grad_norm": 3.758833169937134, |
|
"learning_rate": 3.805970149253731e-05, |
|
"loss": 0.0775, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.2046908315565032, |
|
"grad_norm": 1.1600302457809448, |
|
"learning_rate": 3.7953091684434974e-05, |
|
"loss": 0.0612, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.2153518123667377, |
|
"grad_norm": 0.3355506956577301, |
|
"learning_rate": 3.784648187633262e-05, |
|
"loss": 0.0321, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.2260127931769722, |
|
"grad_norm": 0.8091686964035034, |
|
"learning_rate": 3.7739872068230284e-05, |
|
"loss": 0.0682, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.236673773987207, |
|
"grad_norm": 5.0688252449035645, |
|
"learning_rate": 3.763326226012793e-05, |
|
"loss": 0.1095, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.2473347547974414, |
|
"grad_norm": 5.048577785491943, |
|
"learning_rate": 3.752665245202559e-05, |
|
"loss": 0.0596, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.2579957356076759, |
|
"grad_norm": 1.5879158973693848, |
|
"learning_rate": 3.742004264392324e-05, |
|
"loss": 0.0925, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.2686567164179103, |
|
"grad_norm": 1.8203855752944946, |
|
"learning_rate": 3.73134328358209e-05, |
|
"loss": 0.1243, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.279317697228145, |
|
"grad_norm": 0.13723811507225037, |
|
"learning_rate": 3.720682302771855e-05, |
|
"loss": 0.0649, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.2899786780383795, |
|
"grad_norm": 0.5020030736923218, |
|
"learning_rate": 3.710021321961621e-05, |
|
"loss": 0.0429, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.3006396588486142, |
|
"grad_norm": 0.6411632895469666, |
|
"learning_rate": 3.699360341151386e-05, |
|
"loss": 0.0509, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.3113006396588487, |
|
"grad_norm": 1.8692152500152588, |
|
"learning_rate": 3.6886993603411515e-05, |
|
"loss": 0.0572, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.3219616204690832, |
|
"grad_norm": 2.4140026569366455, |
|
"learning_rate": 3.678038379530917e-05, |
|
"loss": 0.0421, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.3326226012793176, |
|
"grad_norm": 0.25005435943603516, |
|
"learning_rate": 3.6673773987206824e-05, |
|
"loss": 0.039, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.3432835820895521, |
|
"grad_norm": 0.47146981954574585, |
|
"learning_rate": 3.656716417910448e-05, |
|
"loss": 0.1776, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.3539445628997868, |
|
"grad_norm": 1.9525902271270752, |
|
"learning_rate": 3.6460554371002134e-05, |
|
"loss": 0.0942, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.3646055437100213, |
|
"grad_norm": 0.7272979617118835, |
|
"learning_rate": 3.635394456289979e-05, |
|
"loss": 0.0619, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.375266524520256, |
|
"grad_norm": 1.7880783081054688, |
|
"learning_rate": 3.624733475479744e-05, |
|
"loss": 0.0309, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.3859275053304905, |
|
"grad_norm": 0.528110146522522, |
|
"learning_rate": 3.61407249466951e-05, |
|
"loss": 0.1393, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.396588486140725, |
|
"grad_norm": 3.894486427307129, |
|
"learning_rate": 3.603411513859275e-05, |
|
"loss": 0.0785, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.4072494669509594, |
|
"grad_norm": 1.0912986993789673, |
|
"learning_rate": 3.592750533049041e-05, |
|
"loss": 0.0438, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.417910447761194, |
|
"grad_norm": 3.0003552436828613, |
|
"learning_rate": 3.582089552238806e-05, |
|
"loss": 0.0687, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.4285714285714286, |
|
"grad_norm": 2.5522620677948, |
|
"learning_rate": 3.571428571428572e-05, |
|
"loss": 0.0591, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.439232409381663, |
|
"grad_norm": 0.5077835917472839, |
|
"learning_rate": 3.560767590618337e-05, |
|
"loss": 0.0726, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.4498933901918978, |
|
"grad_norm": 4.259483814239502, |
|
"learning_rate": 3.5501066098081026e-05, |
|
"loss": 0.1002, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.4605543710021323, |
|
"grad_norm": 0.09785676747560501, |
|
"learning_rate": 3.539445628997868e-05, |
|
"loss": 0.0582, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.4712153518123667, |
|
"grad_norm": 2.750432252883911, |
|
"learning_rate": 3.5287846481876336e-05, |
|
"loss": 0.0955, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.4818763326226012, |
|
"grad_norm": 0.5293856263160706, |
|
"learning_rate": 3.518123667377399e-05, |
|
"loss": 0.0308, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.4925373134328357, |
|
"grad_norm": 0.05988016724586487, |
|
"learning_rate": 3.5074626865671645e-05, |
|
"loss": 0.019, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.5031982942430704, |
|
"grad_norm": 2.2935566902160645, |
|
"learning_rate": 3.496801705756929e-05, |
|
"loss": 0.1193, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.5138592750533049, |
|
"grad_norm": 0.10816150158643723, |
|
"learning_rate": 3.4861407249466955e-05, |
|
"loss": 0.1045, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.5245202558635396, |
|
"grad_norm": 2.53214693069458, |
|
"learning_rate": 3.47547974413646e-05, |
|
"loss": 0.0829, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.535181236673774, |
|
"grad_norm": 1.9416873455047607, |
|
"learning_rate": 3.4648187633262264e-05, |
|
"loss": 0.0897, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.5458422174840085, |
|
"grad_norm": 1.4472146034240723, |
|
"learning_rate": 3.454157782515991e-05, |
|
"loss": 0.0584, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.556503198294243, |
|
"grad_norm": 2.7857182025909424, |
|
"learning_rate": 3.4434968017057574e-05, |
|
"loss": 0.0889, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.5671641791044775, |
|
"grad_norm": 0.8759065866470337, |
|
"learning_rate": 3.432835820895522e-05, |
|
"loss": 0.0361, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.5778251599147122, |
|
"grad_norm": 0.28077009320259094, |
|
"learning_rate": 3.422174840085288e-05, |
|
"loss": 0.0419, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.5884861407249466, |
|
"grad_norm": 0.19267123937606812, |
|
"learning_rate": 3.411513859275053e-05, |
|
"loss": 0.1, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.5991471215351813, |
|
"grad_norm": 2.6972272396087646, |
|
"learning_rate": 3.400852878464819e-05, |
|
"loss": 0.0562, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.6098081023454158, |
|
"grad_norm": 3.566300868988037, |
|
"learning_rate": 3.390191897654584e-05, |
|
"loss": 0.0526, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.6204690831556503, |
|
"grad_norm": 0.6369922161102295, |
|
"learning_rate": 3.37953091684435e-05, |
|
"loss": 0.057, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.6311300639658848, |
|
"grad_norm": 2.6664175987243652, |
|
"learning_rate": 3.368869936034115e-05, |
|
"loss": 0.1061, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.6417910447761193, |
|
"grad_norm": 1.07101309299469, |
|
"learning_rate": 3.358208955223881e-05, |
|
"loss": 0.0417, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.652452025586354, |
|
"grad_norm": 2.0522356033325195, |
|
"learning_rate": 3.347547974413646e-05, |
|
"loss": 0.0872, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.6631130063965884, |
|
"grad_norm": 4.085935115814209, |
|
"learning_rate": 3.336886993603412e-05, |
|
"loss": 0.0856, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.6737739872068231, |
|
"grad_norm": 3.9453682899475098, |
|
"learning_rate": 3.326226012793177e-05, |
|
"loss": 0.0507, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.6844349680170576, |
|
"grad_norm": 1.2206450700759888, |
|
"learning_rate": 3.3155650319829424e-05, |
|
"loss": 0.0855, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.695095948827292, |
|
"grad_norm": 3.93784761428833, |
|
"learning_rate": 3.304904051172708e-05, |
|
"loss": 0.0568, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.7057569296375266, |
|
"grad_norm": 0.3546872138977051, |
|
"learning_rate": 3.294243070362473e-05, |
|
"loss": 0.0236, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.716417910447761, |
|
"grad_norm": 2.8468477725982666, |
|
"learning_rate": 3.283582089552239e-05, |
|
"loss": 0.0719, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.7270788912579957, |
|
"grad_norm": 0.3439815044403076, |
|
"learning_rate": 3.272921108742004e-05, |
|
"loss": 0.0712, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.7377398720682304, |
|
"grad_norm": 0.5323034524917603, |
|
"learning_rate": 3.26226012793177e-05, |
|
"loss": 0.0411, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.748400852878465, |
|
"grad_norm": 0.1601293534040451, |
|
"learning_rate": 3.251599147121535e-05, |
|
"loss": 0.0745, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.7590618336886994, |
|
"grad_norm": 3.148142099380493, |
|
"learning_rate": 3.240938166311301e-05, |
|
"loss": 0.0619, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.7697228144989339, |
|
"grad_norm": 0.6774784922599792, |
|
"learning_rate": 3.230277185501066e-05, |
|
"loss": 0.0483, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.7803837953091683, |
|
"grad_norm": 6.595566272735596, |
|
"learning_rate": 3.2196162046908317e-05, |
|
"loss": 0.0747, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.7910447761194028, |
|
"grad_norm": 7.630441188812256, |
|
"learning_rate": 3.208955223880597e-05, |
|
"loss": 0.0503, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.8017057569296375, |
|
"grad_norm": 3.3056442737579346, |
|
"learning_rate": 3.1982942430703626e-05, |
|
"loss": 0.1111, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.8123667377398722, |
|
"grad_norm": 1.4347069263458252, |
|
"learning_rate": 3.187633262260128e-05, |
|
"loss": 0.087, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.8230277185501067, |
|
"grad_norm": 6.892719745635986, |
|
"learning_rate": 3.1769722814498935e-05, |
|
"loss": 0.052, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.8336886993603412, |
|
"grad_norm": 0.9510917067527771, |
|
"learning_rate": 3.166311300639659e-05, |
|
"loss": 0.0264, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.8443496801705757, |
|
"grad_norm": 3.8203604221343994, |
|
"learning_rate": 3.1556503198294245e-05, |
|
"loss": 0.0844, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.8550106609808101, |
|
"grad_norm": 0.4166005551815033, |
|
"learning_rate": 3.14498933901919e-05, |
|
"loss": 0.0374, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.8656716417910446, |
|
"grad_norm": 0.13386821746826172, |
|
"learning_rate": 3.1343283582089554e-05, |
|
"loss": 0.0198, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.8763326226012793, |
|
"grad_norm": 3.634406566619873, |
|
"learning_rate": 3.123667377398721e-05, |
|
"loss": 0.0703, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.886993603411514, |
|
"grad_norm": 2.554532289505005, |
|
"learning_rate": 3.1130063965884864e-05, |
|
"loss": 0.0477, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.8976545842217485, |
|
"grad_norm": 6.829390048980713, |
|
"learning_rate": 3.102345415778252e-05, |
|
"loss": 0.0763, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.908315565031983, |
|
"grad_norm": 3.2969565391540527, |
|
"learning_rate": 3.0916844349680173e-05, |
|
"loss": 0.0703, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.9189765458422174, |
|
"grad_norm": 4.583891868591309, |
|
"learning_rate": 3.081023454157783e-05, |
|
"loss": 0.0329, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.929637526652452, |
|
"grad_norm": 1.4368081092834473, |
|
"learning_rate": 3.070362473347548e-05, |
|
"loss": 0.0596, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.9402985074626866, |
|
"grad_norm": 0.1616012156009674, |
|
"learning_rate": 3.059701492537314e-05, |
|
"loss": 0.0837, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.950959488272921, |
|
"grad_norm": 0.15070191025733948, |
|
"learning_rate": 3.0490405117270792e-05, |
|
"loss": 0.058, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.9616204690831558, |
|
"grad_norm": 0.8148602247238159, |
|
"learning_rate": 3.0383795309168444e-05, |
|
"loss": 0.0594, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.9722814498933903, |
|
"grad_norm": 0.44962525367736816, |
|
"learning_rate": 3.0277185501066102e-05, |
|
"loss": 0.0896, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.9829424307036247, |
|
"grad_norm": 3.669036626815796, |
|
"learning_rate": 3.0170575692963753e-05, |
|
"loss": 0.0987, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.9936034115138592, |
|
"grad_norm": 4.995541572570801, |
|
"learning_rate": 3.006396588486141e-05, |
|
"loss": 0.1224, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.984, |
|
"eval_loss": 0.049605656415224075, |
|
"eval_runtime": 19.8467, |
|
"eval_samples_per_second": 188.949, |
|
"eval_steps_per_second": 5.946, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 2.0042643923240937, |
|
"grad_norm": 0.29805323481559753, |
|
"learning_rate": 2.9957356076759063e-05, |
|
"loss": 0.0468, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.014925373134328, |
|
"grad_norm": 0.7967060804367065, |
|
"learning_rate": 2.9850746268656714e-05, |
|
"loss": 0.0195, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 2.025586353944563, |
|
"grad_norm": 3.673015832901001, |
|
"learning_rate": 2.9744136460554372e-05, |
|
"loss": 0.0407, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.0362473347547976, |
|
"grad_norm": 2.1294305324554443, |
|
"learning_rate": 2.9637526652452023e-05, |
|
"loss": 0.0503, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 2.046908315565032, |
|
"grad_norm": 2.847761869430542, |
|
"learning_rate": 2.953091684434968e-05, |
|
"loss": 0.0426, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.0575692963752665, |
|
"grad_norm": 0.8906169533729553, |
|
"learning_rate": 2.9424307036247333e-05, |
|
"loss": 0.0801, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 2.068230277185501, |
|
"grad_norm": 0.33497995138168335, |
|
"learning_rate": 2.931769722814499e-05, |
|
"loss": 0.0485, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.0788912579957355, |
|
"grad_norm": 0.23288927972316742, |
|
"learning_rate": 2.9211087420042642e-05, |
|
"loss": 0.04, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 2.08955223880597, |
|
"grad_norm": 4.505132675170898, |
|
"learning_rate": 2.91044776119403e-05, |
|
"loss": 0.0931, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.100213219616205, |
|
"grad_norm": 0.5373914241790771, |
|
"learning_rate": 2.8997867803837952e-05, |
|
"loss": 0.0267, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 2.1108742004264394, |
|
"grad_norm": 4.8163533210754395, |
|
"learning_rate": 2.889125799573561e-05, |
|
"loss": 0.0821, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.121535181236674, |
|
"grad_norm": 0.11990977823734283, |
|
"learning_rate": 2.878464818763326e-05, |
|
"loss": 0.0666, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 2.1321961620469083, |
|
"grad_norm": 0.7920727133750916, |
|
"learning_rate": 2.867803837953092e-05, |
|
"loss": 0.0401, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.142857142857143, |
|
"grad_norm": 0.0751320868730545, |
|
"learning_rate": 2.857142857142857e-05, |
|
"loss": 0.0443, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 2.1535181236673773, |
|
"grad_norm": 4.964386463165283, |
|
"learning_rate": 2.846481876332623e-05, |
|
"loss": 0.0413, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.1641791044776117, |
|
"grad_norm": 0.4451679289340973, |
|
"learning_rate": 2.835820895522388e-05, |
|
"loss": 0.0628, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 2.1748400852878467, |
|
"grad_norm": 1.4509568214416504, |
|
"learning_rate": 2.825159914712154e-05, |
|
"loss": 0.0527, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.185501066098081, |
|
"grad_norm": 1.2620441913604736, |
|
"learning_rate": 2.814498933901919e-05, |
|
"loss": 0.0829, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 2.1961620469083156, |
|
"grad_norm": 1.0130614042282104, |
|
"learning_rate": 2.8038379530916848e-05, |
|
"loss": 0.0352, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.20682302771855, |
|
"grad_norm": 0.8870302438735962, |
|
"learning_rate": 2.79317697228145e-05, |
|
"loss": 0.05, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 2.2174840085287846, |
|
"grad_norm": 0.031111275777220726, |
|
"learning_rate": 2.7825159914712157e-05, |
|
"loss": 0.0157, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.228144989339019, |
|
"grad_norm": 3.1758933067321777, |
|
"learning_rate": 2.771855010660981e-05, |
|
"loss": 0.0515, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 2.2388059701492535, |
|
"grad_norm": 0.6015059947967529, |
|
"learning_rate": 2.7611940298507467e-05, |
|
"loss": 0.1155, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.2494669509594885, |
|
"grad_norm": 0.3003462851047516, |
|
"learning_rate": 2.7505330490405118e-05, |
|
"loss": 0.0057, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 2.260127931769723, |
|
"grad_norm": 1.8147363662719727, |
|
"learning_rate": 2.7398720682302776e-05, |
|
"loss": 0.0878, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.2707889125799574, |
|
"grad_norm": 7.494335174560547, |
|
"learning_rate": 2.7292110874200428e-05, |
|
"loss": 0.0947, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 2.281449893390192, |
|
"grad_norm": 0.09985765069723129, |
|
"learning_rate": 2.7185501066098086e-05, |
|
"loss": 0.0563, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.2921108742004264, |
|
"grad_norm": 2.6674981117248535, |
|
"learning_rate": 2.7078891257995737e-05, |
|
"loss": 0.0733, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 2.302771855010661, |
|
"grad_norm": 0.1115797609090805, |
|
"learning_rate": 2.6972281449893395e-05, |
|
"loss": 0.0865, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.3134328358208958, |
|
"grad_norm": 0.6754827499389648, |
|
"learning_rate": 2.6865671641791047e-05, |
|
"loss": 0.0647, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 2.3240938166311302, |
|
"grad_norm": 1.852326512336731, |
|
"learning_rate": 2.6759061833688705e-05, |
|
"loss": 0.0941, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.3347547974413647, |
|
"grad_norm": 3.0763440132141113, |
|
"learning_rate": 2.6652452025586356e-05, |
|
"loss": 0.0351, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 2.345415778251599, |
|
"grad_norm": 2.8727152347564697, |
|
"learning_rate": 2.6545842217484007e-05, |
|
"loss": 0.079, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.3560767590618337, |
|
"grad_norm": 3.065932512283325, |
|
"learning_rate": 2.6439232409381666e-05, |
|
"loss": 0.051, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 2.366737739872068, |
|
"grad_norm": 0.10097462683916092, |
|
"learning_rate": 2.6332622601279317e-05, |
|
"loss": 0.0187, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.3773987206823026, |
|
"grad_norm": 1.5885852575302124, |
|
"learning_rate": 2.6226012793176975e-05, |
|
"loss": 0.017, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 2.388059701492537, |
|
"grad_norm": 1.9868512153625488, |
|
"learning_rate": 2.6119402985074626e-05, |
|
"loss": 0.0587, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.398720682302772, |
|
"grad_norm": 0.41781434416770935, |
|
"learning_rate": 2.6012793176972285e-05, |
|
"loss": 0.0209, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 2.4093816631130065, |
|
"grad_norm": 0.2237754613161087, |
|
"learning_rate": 2.5906183368869936e-05, |
|
"loss": 0.0295, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.420042643923241, |
|
"grad_norm": 0.02059091068804264, |
|
"learning_rate": 2.5799573560767594e-05, |
|
"loss": 0.026, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 2.4307036247334755, |
|
"grad_norm": 0.08942164480686188, |
|
"learning_rate": 2.5692963752665245e-05, |
|
"loss": 0.0304, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.44136460554371, |
|
"grad_norm": 1.2203013896942139, |
|
"learning_rate": 2.5586353944562904e-05, |
|
"loss": 0.0576, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 2.4520255863539444, |
|
"grad_norm": 0.7167838215827942, |
|
"learning_rate": 2.5479744136460555e-05, |
|
"loss": 0.0333, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.4626865671641793, |
|
"grad_norm": 0.1730741411447525, |
|
"learning_rate": 2.537313432835821e-05, |
|
"loss": 0.0461, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 2.473347547974414, |
|
"grad_norm": 2.8202412128448486, |
|
"learning_rate": 2.5266524520255864e-05, |
|
"loss": 0.0592, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.4840085287846483, |
|
"grad_norm": 0.03754109889268875, |
|
"learning_rate": 2.515991471215352e-05, |
|
"loss": 0.0429, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 2.4946695095948828, |
|
"grad_norm": 0.10562442243099213, |
|
"learning_rate": 2.5053304904051174e-05, |
|
"loss": 0.0691, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.5053304904051172, |
|
"grad_norm": 0.019232243299484253, |
|
"learning_rate": 2.494669509594883e-05, |
|
"loss": 0.0315, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 2.5159914712153517, |
|
"grad_norm": 3.511117696762085, |
|
"learning_rate": 2.4840085287846483e-05, |
|
"loss": 0.1139, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.526652452025586, |
|
"grad_norm": 4.489925861358643, |
|
"learning_rate": 2.4733475479744138e-05, |
|
"loss": 0.069, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 2.5373134328358207, |
|
"grad_norm": 6.046106338500977, |
|
"learning_rate": 2.4626865671641793e-05, |
|
"loss": 0.0833, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.5479744136460556, |
|
"grad_norm": 0.6252923011779785, |
|
"learning_rate": 2.4520255863539444e-05, |
|
"loss": 0.0382, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 2.55863539445629, |
|
"grad_norm": 1.6278384923934937, |
|
"learning_rate": 2.44136460554371e-05, |
|
"loss": 0.0175, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.5692963752665245, |
|
"grad_norm": 0.20935571193695068, |
|
"learning_rate": 2.4307036247334754e-05, |
|
"loss": 0.0076, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 2.579957356076759, |
|
"grad_norm": 0.032994162291288376, |
|
"learning_rate": 2.420042643923241e-05, |
|
"loss": 0.0053, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.5906183368869935, |
|
"grad_norm": 1.09673011302948, |
|
"learning_rate": 2.4093816631130063e-05, |
|
"loss": 0.0372, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 2.6012793176972284, |
|
"grad_norm": 0.8167636394500732, |
|
"learning_rate": 2.3987206823027718e-05, |
|
"loss": 0.0655, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.611940298507463, |
|
"grad_norm": 1.9008265733718872, |
|
"learning_rate": 2.3880597014925373e-05, |
|
"loss": 0.0216, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 2.6226012793176974, |
|
"grad_norm": 2.796604871749878, |
|
"learning_rate": 2.3773987206823027e-05, |
|
"loss": 0.0455, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.633262260127932, |
|
"grad_norm": 0.0798131674528122, |
|
"learning_rate": 2.3667377398720682e-05, |
|
"loss": 0.0823, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 2.6439232409381663, |
|
"grad_norm": 3.7637686729431152, |
|
"learning_rate": 2.3560767590618337e-05, |
|
"loss": 0.0713, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.654584221748401, |
|
"grad_norm": 3.1797375679016113, |
|
"learning_rate": 2.345415778251599e-05, |
|
"loss": 0.1347, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 2.6652452025586353, |
|
"grad_norm": 1.2994327545166016, |
|
"learning_rate": 2.3347547974413646e-05, |
|
"loss": 0.0275, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.6759061833688698, |
|
"grad_norm": 4.0164408683776855, |
|
"learning_rate": 2.32409381663113e-05, |
|
"loss": 0.0371, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 2.6865671641791042, |
|
"grad_norm": 0.05566682294011116, |
|
"learning_rate": 2.3134328358208956e-05, |
|
"loss": 0.0047, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.697228144989339, |
|
"grad_norm": 2.674593925476074, |
|
"learning_rate": 2.302771855010661e-05, |
|
"loss": 0.0796, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 2.7078891257995736, |
|
"grad_norm": 0.14947471022605896, |
|
"learning_rate": 2.2921108742004265e-05, |
|
"loss": 0.0592, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.718550106609808, |
|
"grad_norm": 7.502159118652344, |
|
"learning_rate": 2.281449893390192e-05, |
|
"loss": 0.0647, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 2.7292110874200426, |
|
"grad_norm": 0.7342118620872498, |
|
"learning_rate": 2.2707889125799575e-05, |
|
"loss": 0.0454, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.739872068230277, |
|
"grad_norm": 0.6529799699783325, |
|
"learning_rate": 2.260127931769723e-05, |
|
"loss": 0.0289, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 2.750533049040512, |
|
"grad_norm": 0.8765589594841003, |
|
"learning_rate": 2.2494669509594884e-05, |
|
"loss": 0.0114, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.7611940298507465, |
|
"grad_norm": 0.01636110618710518, |
|
"learning_rate": 2.238805970149254e-05, |
|
"loss": 0.0088, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 2.771855010660981, |
|
"grad_norm": 0.10365647822618484, |
|
"learning_rate": 2.2281449893390194e-05, |
|
"loss": 0.0514, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.7825159914712154, |
|
"grad_norm": 37.73663330078125, |
|
"learning_rate": 2.217484008528785e-05, |
|
"loss": 0.0725, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 2.79317697228145, |
|
"grad_norm": 0.22364971041679382, |
|
"learning_rate": 2.2068230277185503e-05, |
|
"loss": 0.0792, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.8038379530916844, |
|
"grad_norm": 0.9712044596672058, |
|
"learning_rate": 2.1961620469083158e-05, |
|
"loss": 0.0749, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 2.814498933901919, |
|
"grad_norm": 0.5006036162376404, |
|
"learning_rate": 2.1855010660980813e-05, |
|
"loss": 0.0239, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.8251599147121533, |
|
"grad_norm": 0.5171977877616882, |
|
"learning_rate": 2.1748400852878467e-05, |
|
"loss": 0.0691, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 2.835820895522388, |
|
"grad_norm": 1.9258904457092285, |
|
"learning_rate": 2.164179104477612e-05, |
|
"loss": 0.0585, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.8464818763326227, |
|
"grad_norm": 4.567844390869141, |
|
"learning_rate": 2.1535181236673773e-05, |
|
"loss": 0.0329, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 2.857142857142857, |
|
"grad_norm": 0.9108818769454956, |
|
"learning_rate": 2.1428571428571428e-05, |
|
"loss": 0.0295, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.8678038379530917, |
|
"grad_norm": 0.02197256311774254, |
|
"learning_rate": 2.1321961620469083e-05, |
|
"loss": 0.024, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 2.878464818763326, |
|
"grad_norm": 2.4663374423980713, |
|
"learning_rate": 2.1215351812366738e-05, |
|
"loss": 0.0325, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.8891257995735606, |
|
"grad_norm": 3.5528790950775146, |
|
"learning_rate": 2.1108742004264392e-05, |
|
"loss": 0.0891, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 2.8997867803837956, |
|
"grad_norm": 7.606436252593994, |
|
"learning_rate": 2.1002132196162047e-05, |
|
"loss": 0.0381, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 2.91044776119403, |
|
"grad_norm": 4.542375564575195, |
|
"learning_rate": 2.0895522388059702e-05, |
|
"loss": 0.0525, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 2.9211087420042645, |
|
"grad_norm": 0.5873743891716003, |
|
"learning_rate": 2.0788912579957357e-05, |
|
"loss": 0.0669, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.931769722814499, |
|
"grad_norm": 3.73779034614563, |
|
"learning_rate": 2.068230277185501e-05, |
|
"loss": 0.0897, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 2.9424307036247335, |
|
"grad_norm": 1.2701014280319214, |
|
"learning_rate": 2.0575692963752666e-05, |
|
"loss": 0.0684, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.953091684434968, |
|
"grad_norm": 3.018660306930542, |
|
"learning_rate": 2.046908315565032e-05, |
|
"loss": 0.0763, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 2.9637526652452024, |
|
"grad_norm": 0.02258170209825039, |
|
"learning_rate": 2.0362473347547976e-05, |
|
"loss": 0.0508, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 2.974413646055437, |
|
"grad_norm": 0.29736319184303284, |
|
"learning_rate": 2.025586353944563e-05, |
|
"loss": 0.1179, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 2.9850746268656714, |
|
"grad_norm": 4.075245380401611, |
|
"learning_rate": 2.0149253731343285e-05, |
|
"loss": 0.0719, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.9957356076759063, |
|
"grad_norm": 0.12064412236213684, |
|
"learning_rate": 2.004264392324094e-05, |
|
"loss": 0.0267, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.9861333333333333, |
|
"eval_loss": 0.040121279656887054, |
|
"eval_runtime": 19.636, |
|
"eval_samples_per_second": 190.975, |
|
"eval_steps_per_second": 6.009, |
|
"step": 1407 |
|
}, |
|
{ |
|
"epoch": 3.0063965884861408, |
|
"grad_norm": 1.099586009979248, |
|
"learning_rate": 1.9936034115138594e-05, |
|
"loss": 0.0743, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 3.0170575692963753, |
|
"grad_norm": 0.9293396472930908, |
|
"learning_rate": 1.982942430703625e-05, |
|
"loss": 0.0459, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 3.0277185501066097, |
|
"grad_norm": 5.390717506408691, |
|
"learning_rate": 1.9722814498933904e-05, |
|
"loss": 0.0535, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 3.038379530916844, |
|
"grad_norm": 6.141834259033203, |
|
"learning_rate": 1.961620469083156e-05, |
|
"loss": 0.0382, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 3.0490405117270787, |
|
"grad_norm": 0.20086173713207245, |
|
"learning_rate": 1.9509594882729213e-05, |
|
"loss": 0.0442, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 3.0597014925373136, |
|
"grad_norm": 1.134650707244873, |
|
"learning_rate": 1.9402985074626868e-05, |
|
"loss": 0.0208, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 3.070362473347548, |
|
"grad_norm": 0.06544306874275208, |
|
"learning_rate": 1.9296375266524523e-05, |
|
"loss": 0.0318, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 3.0810234541577826, |
|
"grad_norm": 0.12228225916624069, |
|
"learning_rate": 1.9189765458422178e-05, |
|
"loss": 0.0599, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 3.091684434968017, |
|
"grad_norm": 2.879087448120117, |
|
"learning_rate": 1.9083155650319832e-05, |
|
"loss": 0.0578, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 3.1023454157782515, |
|
"grad_norm": 0.15038320422172546, |
|
"learning_rate": 1.8976545842217487e-05, |
|
"loss": 0.0335, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 3.113006396588486, |
|
"grad_norm": 1.4391463994979858, |
|
"learning_rate": 1.8869936034115142e-05, |
|
"loss": 0.0591, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 3.1236673773987205, |
|
"grad_norm": 0.2009987235069275, |
|
"learning_rate": 1.8763326226012797e-05, |
|
"loss": 0.0237, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 3.1343283582089554, |
|
"grad_norm": 0.36209341883659363, |
|
"learning_rate": 1.865671641791045e-05, |
|
"loss": 0.0473, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 3.14498933901919, |
|
"grad_norm": 0.11285064369440079, |
|
"learning_rate": 1.8550106609808106e-05, |
|
"loss": 0.0096, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 3.1556503198294243, |
|
"grad_norm": 1.7548198699951172, |
|
"learning_rate": 1.8443496801705757e-05, |
|
"loss": 0.0566, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 3.166311300639659, |
|
"grad_norm": 0.5041276812553406, |
|
"learning_rate": 1.8336886993603412e-05, |
|
"loss": 0.028, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 3.1769722814498933, |
|
"grad_norm": 2.809965133666992, |
|
"learning_rate": 1.8230277185501067e-05, |
|
"loss": 0.0489, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 3.1876332622601278, |
|
"grad_norm": 3.5192341804504395, |
|
"learning_rate": 1.812366737739872e-05, |
|
"loss": 0.0163, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 3.1982942430703627, |
|
"grad_norm": 0.05835040286183357, |
|
"learning_rate": 1.8017057569296376e-05, |
|
"loss": 0.055, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 3.208955223880597, |
|
"grad_norm": 3.340876340866089, |
|
"learning_rate": 1.791044776119403e-05, |
|
"loss": 0.0721, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 3.2196162046908317, |
|
"grad_norm": 1.8546497821807861, |
|
"learning_rate": 1.7803837953091686e-05, |
|
"loss": 0.0431, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 3.230277185501066, |
|
"grad_norm": 3.2202024459838867, |
|
"learning_rate": 1.769722814498934e-05, |
|
"loss": 0.0215, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 3.2409381663113006, |
|
"grad_norm": 5.033542633056641, |
|
"learning_rate": 1.7590618336886995e-05, |
|
"loss": 0.0445, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 3.251599147121535, |
|
"grad_norm": 0.5107560753822327, |
|
"learning_rate": 1.7484008528784647e-05, |
|
"loss": 0.0667, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 3.2622601279317696, |
|
"grad_norm": 5.583055019378662, |
|
"learning_rate": 1.73773987206823e-05, |
|
"loss": 0.0227, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 3.272921108742004, |
|
"grad_norm": 2.9682164192199707, |
|
"learning_rate": 1.7270788912579956e-05, |
|
"loss": 0.0227, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 3.283582089552239, |
|
"grad_norm": 3.661648750305176, |
|
"learning_rate": 1.716417910447761e-05, |
|
"loss": 0.0877, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 3.2942430703624734, |
|
"grad_norm": 0.1012461856007576, |
|
"learning_rate": 1.7057569296375266e-05, |
|
"loss": 0.0754, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 3.304904051172708, |
|
"grad_norm": 3.245004892349243, |
|
"learning_rate": 1.695095948827292e-05, |
|
"loss": 0.0602, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 3.3155650319829424, |
|
"grad_norm": 1.2995877265930176, |
|
"learning_rate": 1.6844349680170575e-05, |
|
"loss": 0.0492, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 3.326226012793177, |
|
"grad_norm": 0.08209806680679321, |
|
"learning_rate": 1.673773987206823e-05, |
|
"loss": 0.0514, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 3.3368869936034113, |
|
"grad_norm": 0.48734763264656067, |
|
"learning_rate": 1.6631130063965885e-05, |
|
"loss": 0.0228, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 3.3475479744136463, |
|
"grad_norm": 2.341841459274292, |
|
"learning_rate": 1.652452025586354e-05, |
|
"loss": 0.0293, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 3.3582089552238807, |
|
"grad_norm": 1.3017606735229492, |
|
"learning_rate": 1.6417910447761194e-05, |
|
"loss": 0.0667, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 3.368869936034115, |
|
"grad_norm": 0.08357426524162292, |
|
"learning_rate": 1.631130063965885e-05, |
|
"loss": 0.0492, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 3.3795309168443497, |
|
"grad_norm": 3.370689630508423, |
|
"learning_rate": 1.6204690831556504e-05, |
|
"loss": 0.0119, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 3.390191897654584, |
|
"grad_norm": 4.849547386169434, |
|
"learning_rate": 1.6098081023454158e-05, |
|
"loss": 0.0973, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 3.4008528784648187, |
|
"grad_norm": 0.09231895953416824, |
|
"learning_rate": 1.5991471215351813e-05, |
|
"loss": 0.0425, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 3.411513859275053, |
|
"grad_norm": 5.798752784729004, |
|
"learning_rate": 1.5884861407249468e-05, |
|
"loss": 0.0481, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 3.4221748400852876, |
|
"grad_norm": 0.33630430698394775, |
|
"learning_rate": 1.5778251599147122e-05, |
|
"loss": 0.0353, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 3.4328358208955225, |
|
"grad_norm": 0.12259405851364136, |
|
"learning_rate": 1.5671641791044777e-05, |
|
"loss": 0.0612, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 3.443496801705757, |
|
"grad_norm": 1.8938533067703247, |
|
"learning_rate": 1.5565031982942432e-05, |
|
"loss": 0.0366, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 3.4541577825159915, |
|
"grad_norm": 1.6231105327606201, |
|
"learning_rate": 1.5458422174840087e-05, |
|
"loss": 0.0154, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 3.464818763326226, |
|
"grad_norm": 0.08800487220287323, |
|
"learning_rate": 1.535181236673774e-05, |
|
"loss": 0.0471, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 3.4754797441364604, |
|
"grad_norm": 2.0822739601135254, |
|
"learning_rate": 1.5245202558635396e-05, |
|
"loss": 0.0246, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 3.486140724946695, |
|
"grad_norm": 0.5254740118980408, |
|
"learning_rate": 1.5138592750533051e-05, |
|
"loss": 0.01, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 3.49680170575693, |
|
"grad_norm": 2.5631539821624756, |
|
"learning_rate": 1.5031982942430706e-05, |
|
"loss": 0.0564, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 3.5074626865671643, |
|
"grad_norm": 7.974194049835205, |
|
"learning_rate": 1.4925373134328357e-05, |
|
"loss": 0.0963, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 3.518123667377399, |
|
"grad_norm": 1.6003506183624268, |
|
"learning_rate": 1.4818763326226012e-05, |
|
"loss": 0.048, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 3.5287846481876333, |
|
"grad_norm": 6.6301751136779785, |
|
"learning_rate": 1.4712153518123666e-05, |
|
"loss": 0.0235, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 3.5394456289978677, |
|
"grad_norm": 0.11535332351922989, |
|
"learning_rate": 1.4605543710021321e-05, |
|
"loss": 0.0484, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 3.550106609808102, |
|
"grad_norm": 2.4943089485168457, |
|
"learning_rate": 1.4498933901918976e-05, |
|
"loss": 0.0714, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 3.5607675906183367, |
|
"grad_norm": 1.4798794984817505, |
|
"learning_rate": 1.439232409381663e-05, |
|
"loss": 0.0727, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 3.571428571428571, |
|
"grad_norm": 7.409952163696289, |
|
"learning_rate": 1.4285714285714285e-05, |
|
"loss": 0.0458, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 3.582089552238806, |
|
"grad_norm": 1.0499883890151978, |
|
"learning_rate": 1.417910447761194e-05, |
|
"loss": 0.0521, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 3.5927505330490406, |
|
"grad_norm": 2.103320360183716, |
|
"learning_rate": 1.4072494669509595e-05, |
|
"loss": 0.0513, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 3.603411513859275, |
|
"grad_norm": 1.1079938411712646, |
|
"learning_rate": 1.396588486140725e-05, |
|
"loss": 0.0513, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 3.6140724946695095, |
|
"grad_norm": 4.392083644866943, |
|
"learning_rate": 1.3859275053304904e-05, |
|
"loss": 0.0315, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 3.624733475479744, |
|
"grad_norm": 4.584433555603027, |
|
"learning_rate": 1.3752665245202559e-05, |
|
"loss": 0.0852, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 3.635394456289979, |
|
"grad_norm": 0.029997576028108597, |
|
"learning_rate": 1.3646055437100214e-05, |
|
"loss": 0.047, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 3.6460554371002134, |
|
"grad_norm": 1.1480836868286133, |
|
"learning_rate": 1.3539445628997869e-05, |
|
"loss": 0.0234, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 3.656716417910448, |
|
"grad_norm": 0.664803147315979, |
|
"learning_rate": 1.3432835820895523e-05, |
|
"loss": 0.0317, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 3.6673773987206824, |
|
"grad_norm": 0.5053799748420715, |
|
"learning_rate": 1.3326226012793178e-05, |
|
"loss": 0.0093, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 3.678038379530917, |
|
"grad_norm": 0.23175647854804993, |
|
"learning_rate": 1.3219616204690833e-05, |
|
"loss": 0.0415, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 3.6886993603411513, |
|
"grad_norm": 0.03786076605319977, |
|
"learning_rate": 1.3113006396588488e-05, |
|
"loss": 0.0832, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 3.699360341151386, |
|
"grad_norm": 7.109079837799072, |
|
"learning_rate": 1.3006396588486142e-05, |
|
"loss": 0.0497, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 3.7100213219616203, |
|
"grad_norm": 1.8452637195587158, |
|
"learning_rate": 1.2899786780383797e-05, |
|
"loss": 0.0543, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 3.7206823027718547, |
|
"grad_norm": 0.015753092244267464, |
|
"learning_rate": 1.2793176972281452e-05, |
|
"loss": 0.0192, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 3.7313432835820897, |
|
"grad_norm": 0.0742538571357727, |
|
"learning_rate": 1.2686567164179105e-05, |
|
"loss": 0.0656, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 3.742004264392324, |
|
"grad_norm": 4.67445707321167, |
|
"learning_rate": 1.257995735607676e-05, |
|
"loss": 0.0563, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 3.7526652452025586, |
|
"grad_norm": 0.8840895295143127, |
|
"learning_rate": 1.2473347547974414e-05, |
|
"loss": 0.013, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 3.763326226012793, |
|
"grad_norm": 0.5750113725662231, |
|
"learning_rate": 1.2366737739872069e-05, |
|
"loss": 0.0253, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 3.7739872068230276, |
|
"grad_norm": 4.264706611633301, |
|
"learning_rate": 1.2260127931769722e-05, |
|
"loss": 0.0725, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 3.7846481876332625, |
|
"grad_norm": 0.6944350600242615, |
|
"learning_rate": 1.2153518123667377e-05, |
|
"loss": 0.0277, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 3.795309168443497, |
|
"grad_norm": 2.92893385887146, |
|
"learning_rate": 1.2046908315565032e-05, |
|
"loss": 0.028, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 3.8059701492537314, |
|
"grad_norm": 0.5488278269767761, |
|
"learning_rate": 1.1940298507462686e-05, |
|
"loss": 0.063, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 3.816631130063966, |
|
"grad_norm": 0.5430752635002136, |
|
"learning_rate": 1.1833688699360341e-05, |
|
"loss": 0.0567, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 3.8272921108742004, |
|
"grad_norm": 2.9317545890808105, |
|
"learning_rate": 1.1727078891257996e-05, |
|
"loss": 0.0592, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 3.837953091684435, |
|
"grad_norm": 0.3377562165260315, |
|
"learning_rate": 1.162046908315565e-05, |
|
"loss": 0.0295, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 3.8486140724946694, |
|
"grad_norm": 0.04982152208685875, |
|
"learning_rate": 1.1513859275053305e-05, |
|
"loss": 0.0022, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 3.859275053304904, |
|
"grad_norm": 3.149503231048584, |
|
"learning_rate": 1.140724946695096e-05, |
|
"loss": 0.0736, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 3.8699360341151388, |
|
"grad_norm": 4.5320844650268555, |
|
"learning_rate": 1.1300639658848615e-05, |
|
"loss": 0.0796, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 3.8805970149253732, |
|
"grad_norm": 1.086563229560852, |
|
"learning_rate": 1.119402985074627e-05, |
|
"loss": 0.0465, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 3.8912579957356077, |
|
"grad_norm": 0.1699202060699463, |
|
"learning_rate": 1.1087420042643924e-05, |
|
"loss": 0.0227, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 3.901918976545842, |
|
"grad_norm": 0.12619614601135254, |
|
"learning_rate": 1.0980810234541579e-05, |
|
"loss": 0.0083, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 3.9125799573560767, |
|
"grad_norm": 6.868366241455078, |
|
"learning_rate": 1.0874200426439234e-05, |
|
"loss": 0.1049, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 3.923240938166311, |
|
"grad_norm": 0.5237112045288086, |
|
"learning_rate": 1.0767590618336887e-05, |
|
"loss": 0.0552, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 3.933901918976546, |
|
"grad_norm": 4.085588455200195, |
|
"learning_rate": 1.0660980810234541e-05, |
|
"loss": 0.0763, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 3.9445628997867805, |
|
"grad_norm": 6.672009468078613, |
|
"learning_rate": 1.0554371002132196e-05, |
|
"loss": 0.0647, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 3.955223880597015, |
|
"grad_norm": 16.397506713867188, |
|
"learning_rate": 1.0447761194029851e-05, |
|
"loss": 0.0592, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 3.9658848614072495, |
|
"grad_norm": 0.138535276055336, |
|
"learning_rate": 1.0341151385927506e-05, |
|
"loss": 0.0041, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 3.976545842217484, |
|
"grad_norm": 4.759294509887695, |
|
"learning_rate": 1.023454157782516e-05, |
|
"loss": 0.0195, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 3.9872068230277184, |
|
"grad_norm": 0.01401366014033556, |
|
"learning_rate": 1.0127931769722815e-05, |
|
"loss": 0.0051, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 3.997867803837953, |
|
"grad_norm": 0.06928014010190964, |
|
"learning_rate": 1.002132196162047e-05, |
|
"loss": 0.043, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.9834666666666667, |
|
"eval_loss": 0.05394299700856209, |
|
"eval_runtime": 19.7074, |
|
"eval_samples_per_second": 190.284, |
|
"eval_steps_per_second": 5.988, |
|
"step": 1876 |
|
}, |
|
{ |
|
"epoch": 4.008528784648187, |
|
"grad_norm": 0.03933345526456833, |
|
"learning_rate": 9.914712153518125e-06, |
|
"loss": 0.0925, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 4.019189765458422, |
|
"grad_norm": 2.53454327583313, |
|
"learning_rate": 9.80810234541578e-06, |
|
"loss": 0.0981, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 4.029850746268656, |
|
"grad_norm": 0.34256383776664734, |
|
"learning_rate": 9.701492537313434e-06, |
|
"loss": 0.0451, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 4.040511727078891, |
|
"grad_norm": 1.6070563793182373, |
|
"learning_rate": 9.594882729211089e-06, |
|
"loss": 0.0492, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 4.051172707889126, |
|
"grad_norm": 2.9909026622772217, |
|
"learning_rate": 9.488272921108744e-06, |
|
"loss": 0.0427, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 4.061833688699361, |
|
"grad_norm": 2.7542803287506104, |
|
"learning_rate": 9.381663113006398e-06, |
|
"loss": 0.0263, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 4.072494669509595, |
|
"grad_norm": 5.553555965423584, |
|
"learning_rate": 9.275053304904053e-06, |
|
"loss": 0.0468, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 4.08315565031983, |
|
"grad_norm": 0.49815893173217773, |
|
"learning_rate": 9.168443496801706e-06, |
|
"loss": 0.0223, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 4.093816631130064, |
|
"grad_norm": 2.076446294784546, |
|
"learning_rate": 9.06183368869936e-06, |
|
"loss": 0.0441, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 4.104477611940299, |
|
"grad_norm": 2.2500789165496826, |
|
"learning_rate": 8.955223880597016e-06, |
|
"loss": 0.04, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 4.115138592750533, |
|
"grad_norm": 1.9906346797943115, |
|
"learning_rate": 8.84861407249467e-06, |
|
"loss": 0.0705, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 4.1257995735607675, |
|
"grad_norm": 6.390938758850098, |
|
"learning_rate": 8.742004264392323e-06, |
|
"loss": 0.0391, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 4.136460554371002, |
|
"grad_norm": 4.86877965927124, |
|
"learning_rate": 8.635394456289978e-06, |
|
"loss": 0.0425, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 4.1471215351812365, |
|
"grad_norm": 0.11859188228845596, |
|
"learning_rate": 8.528784648187633e-06, |
|
"loss": 0.047, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 4.157782515991471, |
|
"grad_norm": 0.048083122819662094, |
|
"learning_rate": 8.422174840085288e-06, |
|
"loss": 0.0443, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 4.1684434968017055, |
|
"grad_norm": 0.4983248710632324, |
|
"learning_rate": 8.315565031982942e-06, |
|
"loss": 0.0456, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 4.17910447761194, |
|
"grad_norm": 4.853020668029785, |
|
"learning_rate": 8.208955223880597e-06, |
|
"loss": 0.044, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 4.189765458422174, |
|
"grad_norm": 0.588883101940155, |
|
"learning_rate": 8.102345415778252e-06, |
|
"loss": 0.0079, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 4.20042643923241, |
|
"grad_norm": 0.6782941222190857, |
|
"learning_rate": 7.995735607675907e-06, |
|
"loss": 0.0334, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 4.211087420042644, |
|
"grad_norm": 0.07805205136537552, |
|
"learning_rate": 7.889125799573561e-06, |
|
"loss": 0.0163, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 4.221748400852879, |
|
"grad_norm": 2.8289687633514404, |
|
"learning_rate": 7.782515991471216e-06, |
|
"loss": 0.0513, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 4.232409381663113, |
|
"grad_norm": 0.21742869913578033, |
|
"learning_rate": 7.67590618336887e-06, |
|
"loss": 0.0132, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 4.243070362473348, |
|
"grad_norm": 0.01784393936395645, |
|
"learning_rate": 7.5692963752665255e-06, |
|
"loss": 0.0273, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 4.253731343283582, |
|
"grad_norm": 0.5595592856407166, |
|
"learning_rate": 7.4626865671641785e-06, |
|
"loss": 0.0232, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 4.264392324093817, |
|
"grad_norm": 0.8079865574836731, |
|
"learning_rate": 7.356076759061833e-06, |
|
"loss": 0.0762, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 4.275053304904051, |
|
"grad_norm": 1.4811580181121826, |
|
"learning_rate": 7.249466950959488e-06, |
|
"loss": 0.0463, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 4.285714285714286, |
|
"grad_norm": 2.164968252182007, |
|
"learning_rate": 7.142857142857143e-06, |
|
"loss": 0.0559, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 4.29637526652452, |
|
"grad_norm": 6.072133541107178, |
|
"learning_rate": 7.0362473347547975e-06, |
|
"loss": 0.0434, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 4.3070362473347545, |
|
"grad_norm": 0.20288045704364777, |
|
"learning_rate": 6.929637526652452e-06, |
|
"loss": 0.0743, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 4.317697228144989, |
|
"grad_norm": 5.396198749542236, |
|
"learning_rate": 6.823027718550107e-06, |
|
"loss": 0.0475, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 4.3283582089552235, |
|
"grad_norm": 0.4482716917991638, |
|
"learning_rate": 6.716417910447762e-06, |
|
"loss": 0.1045, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 4.339019189765459, |
|
"grad_norm": 3.217515707015991, |
|
"learning_rate": 6.609808102345416e-06, |
|
"loss": 0.019, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 4.349680170575693, |
|
"grad_norm": 6.129680633544922, |
|
"learning_rate": 6.503198294243071e-06, |
|
"loss": 0.0738, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 4.360341151385928, |
|
"grad_norm": 0.23290002346038818, |
|
"learning_rate": 6.396588486140726e-06, |
|
"loss": 0.0115, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 4.371002132196162, |
|
"grad_norm": 0.2791178524494171, |
|
"learning_rate": 6.28997867803838e-06, |
|
"loss": 0.0254, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 4.381663113006397, |
|
"grad_norm": 1.407891869544983, |
|
"learning_rate": 6.1833688699360345e-06, |
|
"loss": 0.0242, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 4.392324093816631, |
|
"grad_norm": 0.5623234510421753, |
|
"learning_rate": 6.076759061833688e-06, |
|
"loss": 0.0394, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 4.402985074626866, |
|
"grad_norm": 0.5242185592651367, |
|
"learning_rate": 5.970149253731343e-06, |
|
"loss": 0.0458, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 4.4136460554371, |
|
"grad_norm": 5.725996971130371, |
|
"learning_rate": 5.863539445628998e-06, |
|
"loss": 0.0273, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 4.424307036247335, |
|
"grad_norm": 2.9313879013061523, |
|
"learning_rate": 5.756929637526653e-06, |
|
"loss": 0.049, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 4.434968017057569, |
|
"grad_norm": 0.013987524434924126, |
|
"learning_rate": 5.650319829424307e-06, |
|
"loss": 0.0054, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 4.445628997867804, |
|
"grad_norm": 0.3480443060398102, |
|
"learning_rate": 5.543710021321962e-06, |
|
"loss": 0.0207, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 4.456289978678038, |
|
"grad_norm": 0.03678889945149422, |
|
"learning_rate": 5.437100213219617e-06, |
|
"loss": 0.0037, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 4.466950959488273, |
|
"grad_norm": 0.015881139785051346, |
|
"learning_rate": 5.330490405117271e-06, |
|
"loss": 0.0377, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 4.477611940298507, |
|
"grad_norm": 0.08979633450508118, |
|
"learning_rate": 5.2238805970149255e-06, |
|
"loss": 0.0458, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 4.4882729211087415, |
|
"grad_norm": 0.4414765536785126, |
|
"learning_rate": 5.11727078891258e-06, |
|
"loss": 0.0105, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 4.498933901918977, |
|
"grad_norm": 0.25397035479545593, |
|
"learning_rate": 5.010660980810235e-06, |
|
"loss": 0.0496, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 4.509594882729211, |
|
"grad_norm": 1.8180643320083618, |
|
"learning_rate": 4.90405117270789e-06, |
|
"loss": 0.0547, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 4.520255863539446, |
|
"grad_norm": 0.8239295482635498, |
|
"learning_rate": 4.797441364605544e-06, |
|
"loss": 0.0404, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 4.53091684434968, |
|
"grad_norm": 0.18995444476604462, |
|
"learning_rate": 4.690831556503199e-06, |
|
"loss": 0.0757, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 4.541577825159915, |
|
"grad_norm": 1.689175009727478, |
|
"learning_rate": 4.584221748400853e-06, |
|
"loss": 0.1032, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 4.552238805970149, |
|
"grad_norm": 3.80082368850708, |
|
"learning_rate": 4.477611940298508e-06, |
|
"loss": 0.0626, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 4.562899786780384, |
|
"grad_norm": 0.15196892619132996, |
|
"learning_rate": 4.371002132196162e-06, |
|
"loss": 0.0132, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 4.573560767590618, |
|
"grad_norm": 2.260632276535034, |
|
"learning_rate": 4.264392324093816e-06, |
|
"loss": 0.0252, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 4.584221748400853, |
|
"grad_norm": 1.6350780725479126, |
|
"learning_rate": 4.157782515991471e-06, |
|
"loss": 0.0798, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 4.594882729211087, |
|
"grad_norm": 0.6196228265762329, |
|
"learning_rate": 4.051172707889126e-06, |
|
"loss": 0.0215, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 4.605543710021322, |
|
"grad_norm": 0.4525044560432434, |
|
"learning_rate": 3.944562899786781e-06, |
|
"loss": 0.0282, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 4.616204690831556, |
|
"grad_norm": 0.921244204044342, |
|
"learning_rate": 3.837953091684435e-06, |
|
"loss": 0.0654, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 4.6268656716417915, |
|
"grad_norm": 1.978655219078064, |
|
"learning_rate": 3.7313432835820893e-06, |
|
"loss": 0.0704, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 4.637526652452026, |
|
"grad_norm": 0.06893670558929443, |
|
"learning_rate": 3.624733475479744e-06, |
|
"loss": 0.0222, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 4.6481876332622605, |
|
"grad_norm": 0.7820674180984497, |
|
"learning_rate": 3.5181236673773987e-06, |
|
"loss": 0.0142, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 4.658848614072495, |
|
"grad_norm": 2.677800178527832, |
|
"learning_rate": 3.4115138592750535e-06, |
|
"loss": 0.0724, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 4.669509594882729, |
|
"grad_norm": 3.9743552207946777, |
|
"learning_rate": 3.304904051172708e-06, |
|
"loss": 0.0359, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 4.680170575692964, |
|
"grad_norm": 3.922999382019043, |
|
"learning_rate": 3.198294243070363e-06, |
|
"loss": 0.0503, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 4.690831556503198, |
|
"grad_norm": 0.27923038601875305, |
|
"learning_rate": 3.0916844349680173e-06, |
|
"loss": 0.0679, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 4.701492537313433, |
|
"grad_norm": 4.296010494232178, |
|
"learning_rate": 2.9850746268656716e-06, |
|
"loss": 0.1045, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 4.712153518123667, |
|
"grad_norm": 0.062033142894506454, |
|
"learning_rate": 2.8784648187633263e-06, |
|
"loss": 0.0284, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 4.722814498933902, |
|
"grad_norm": 0.38232874870300293, |
|
"learning_rate": 2.771855010660981e-06, |
|
"loss": 0.0259, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 4.733475479744136, |
|
"grad_norm": 0.5059103965759277, |
|
"learning_rate": 2.6652452025586354e-06, |
|
"loss": 0.0113, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 4.744136460554371, |
|
"grad_norm": 0.8820269107818604, |
|
"learning_rate": 2.55863539445629e-06, |
|
"loss": 0.0202, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 4.754797441364605, |
|
"grad_norm": 5.611047267913818, |
|
"learning_rate": 2.452025586353945e-06, |
|
"loss": 0.0242, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 4.76545842217484, |
|
"grad_norm": 1.507293939590454, |
|
"learning_rate": 2.3454157782515996e-06, |
|
"loss": 0.0782, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 4.776119402985074, |
|
"grad_norm": 0.04614684730768204, |
|
"learning_rate": 2.238805970149254e-06, |
|
"loss": 0.0243, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 4.786780383795309, |
|
"grad_norm": 1.9392746686935425, |
|
"learning_rate": 2.132196162046908e-06, |
|
"loss": 0.0368, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 4.797441364605544, |
|
"grad_norm": 0.11692571640014648, |
|
"learning_rate": 2.025586353944563e-06, |
|
"loss": 0.0271, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 4.8081023454157785, |
|
"grad_norm": 6.559055805206299, |
|
"learning_rate": 1.9189765458422177e-06, |
|
"loss": 0.061, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 4.818763326226013, |
|
"grad_norm": 0.9836236834526062, |
|
"learning_rate": 1.812366737739872e-06, |
|
"loss": 0.0156, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 4.8294243070362475, |
|
"grad_norm": 0.14515946805477142, |
|
"learning_rate": 1.7057569296375267e-06, |
|
"loss": 0.0208, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 4.840085287846482, |
|
"grad_norm": 4.400147914886475, |
|
"learning_rate": 1.5991471215351815e-06, |
|
"loss": 0.0342, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 4.850746268656716, |
|
"grad_norm": 5.292147636413574, |
|
"learning_rate": 1.4925373134328358e-06, |
|
"loss": 0.0603, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 4.861407249466951, |
|
"grad_norm": 0.21063509583473206, |
|
"learning_rate": 1.3859275053304905e-06, |
|
"loss": 0.0137, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 4.872068230277185, |
|
"grad_norm": 0.039475779980421066, |
|
"learning_rate": 1.279317697228145e-06, |
|
"loss": 0.0175, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 4.88272921108742, |
|
"grad_norm": 0.16149736940860748, |
|
"learning_rate": 1.1727078891257998e-06, |
|
"loss": 0.0132, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 4.893390191897654, |
|
"grad_norm": 5.232501029968262, |
|
"learning_rate": 1.066098081023454e-06, |
|
"loss": 0.0888, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 4.904051172707889, |
|
"grad_norm": 0.12954987585544586, |
|
"learning_rate": 9.594882729211088e-07, |
|
"loss": 0.0087, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 4.914712153518123, |
|
"grad_norm": 0.3452926278114319, |
|
"learning_rate": 8.528784648187634e-07, |
|
"loss": 0.0172, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 4.925373134328359, |
|
"grad_norm": 6.652196884155273, |
|
"learning_rate": 7.462686567164179e-07, |
|
"loss": 0.0895, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 4.936034115138593, |
|
"grad_norm": 1.8307301998138428, |
|
"learning_rate": 6.396588486140725e-07, |
|
"loss": 0.0903, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 4.946695095948828, |
|
"grad_norm": 0.45310544967651367, |
|
"learning_rate": 5.33049040511727e-07, |
|
"loss": 0.0467, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 4.957356076759062, |
|
"grad_norm": 0.8440231084823608, |
|
"learning_rate": 4.264392324093817e-07, |
|
"loss": 0.0571, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 4.968017057569297, |
|
"grad_norm": 0.1346929669380188, |
|
"learning_rate": 3.1982942430703626e-07, |
|
"loss": 0.0225, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 4.978678038379531, |
|
"grad_norm": 5.43850040435791, |
|
"learning_rate": 2.1321961620469084e-07, |
|
"loss": 0.0924, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 4.9893390191897655, |
|
"grad_norm": 2.714733600616455, |
|
"learning_rate": 1.0660980810234542e-07, |
|
"loss": 0.0358, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 4.701388359069824, |
|
"learning_rate": 0.0, |
|
"loss": 0.0628, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_accuracy": 0.9861333333333333, |
|
"eval_loss": 0.038588594645261765, |
|
"eval_runtime": 19.6523, |
|
"eval_samples_per_second": 190.817, |
|
"eval_steps_per_second": 6.004, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"step": 2345, |
|
"total_flos": 5.8118992210944e+18, |
|
"train_loss": 0.06377606868886872, |
|
"train_runtime": 869.1015, |
|
"train_samples_per_second": 86.296, |
|
"train_steps_per_second": 2.698 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 2345, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.8118992210944e+18, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|