vit_epochs5_batch32_lr5e-05_size224_tiles1_seed1_classic_image_classification_t
/
trainer_state.json
{ | |
"best_metric": 0.017088035121560097, | |
"best_model_checkpoint": "vit_epochs5_batch32_lr5e-05_size224_tiles1_seed1_classic_image_classification_t/checkpoint-1876", | |
"epoch": 5.0, | |
"eval_steps": 500, | |
"global_step": 2345, | |
"is_hyper_param_search": false, | |
"is_local_process_zero": true, | |
"is_world_process_zero": true, | |
"log_history": [ | |
{ | |
"epoch": 0.010660980810234541, | |
"grad_norm": 1.3116532564163208, | |
"learning_rate": 4.989339019189766e-05, | |
"loss": 0.616, | |
"step": 5 | |
}, | |
{ | |
"epoch": 0.021321961620469083, | |
"grad_norm": 1.3153573274612427, | |
"learning_rate": 4.978678038379531e-05, | |
"loss": 0.4935, | |
"step": 10 | |
}, | |
{ | |
"epoch": 0.031982942430703626, | |
"grad_norm": 1.0578992366790771, | |
"learning_rate": 4.9680170575692967e-05, | |
"loss": 0.3358, | |
"step": 15 | |
}, | |
{ | |
"epoch": 0.042643923240938165, | |
"grad_norm": 0.8229417204856873, | |
"learning_rate": 4.957356076759062e-05, | |
"loss": 0.2276, | |
"step": 20 | |
}, | |
{ | |
"epoch": 0.053304904051172705, | |
"grad_norm": 0.7642258405685425, | |
"learning_rate": 4.9466950959488276e-05, | |
"loss": 0.1588, | |
"step": 25 | |
}, | |
{ | |
"epoch": 0.06396588486140725, | |
"grad_norm": 0.449072927236557, | |
"learning_rate": 4.936034115138593e-05, | |
"loss": 0.1078, | |
"step": 30 | |
}, | |
{ | |
"epoch": 0.07462686567164178, | |
"grad_norm": 0.9152060151100159, | |
"learning_rate": 4.9253731343283586e-05, | |
"loss": 0.125, | |
"step": 35 | |
}, | |
{ | |
"epoch": 0.08528784648187633, | |
"grad_norm": 1.8500392436981201, | |
"learning_rate": 4.914712153518124e-05, | |
"loss": 0.0653, | |
"step": 40 | |
}, | |
{ | |
"epoch": 0.09594882729211088, | |
"grad_norm": 0.4089727997779846, | |
"learning_rate": 4.904051172707889e-05, | |
"loss": 0.0928, | |
"step": 45 | |
}, | |
{ | |
"epoch": 0.10660980810234541, | |
"grad_norm": 2.106147050857544, | |
"learning_rate": 4.893390191897655e-05, | |
"loss": 0.0478, | |
"step": 50 | |
}, | |
{ | |
"epoch": 0.11727078891257996, | |
"grad_norm": 0.23594968020915985, | |
"learning_rate": 4.88272921108742e-05, | |
"loss": 0.0565, | |
"step": 55 | |
}, | |
{ | |
"epoch": 0.1279317697228145, | |
"grad_norm": 0.18873228132724762, | |
"learning_rate": 4.872068230277186e-05, | |
"loss": 0.0541, | |
"step": 60 | |
}, | |
{ | |
"epoch": 0.13859275053304904, | |
"grad_norm": 1.8800818920135498, | |
"learning_rate": 4.861407249466951e-05, | |
"loss": 0.0542, | |
"step": 65 | |
}, | |
{ | |
"epoch": 0.14925373134328357, | |
"grad_norm": 3.5219645500183105, | |
"learning_rate": 4.850746268656717e-05, | |
"loss": 0.0327, | |
"step": 70 | |
}, | |
{ | |
"epoch": 0.15991471215351813, | |
"grad_norm": 1.4127917289733887, | |
"learning_rate": 4.840085287846482e-05, | |
"loss": 0.0672, | |
"step": 75 | |
}, | |
{ | |
"epoch": 0.17057569296375266, | |
"grad_norm": 2.867933511734009, | |
"learning_rate": 4.829424307036248e-05, | |
"loss": 0.0707, | |
"step": 80 | |
}, | |
{ | |
"epoch": 0.1812366737739872, | |
"grad_norm": 1.1417315006256104, | |
"learning_rate": 4.8187633262260126e-05, | |
"loss": 0.035, | |
"step": 85 | |
}, | |
{ | |
"epoch": 0.19189765458422176, | |
"grad_norm": 0.20421718060970306, | |
"learning_rate": 4.808102345415779e-05, | |
"loss": 0.0483, | |
"step": 90 | |
}, | |
{ | |
"epoch": 0.2025586353944563, | |
"grad_norm": 0.2251688688993454, | |
"learning_rate": 4.7974413646055436e-05, | |
"loss": 0.0661, | |
"step": 95 | |
}, | |
{ | |
"epoch": 0.21321961620469082, | |
"grad_norm": 0.550264298915863, | |
"learning_rate": 4.78678038379531e-05, | |
"loss": 0.0228, | |
"step": 100 | |
}, | |
{ | |
"epoch": 0.22388059701492538, | |
"grad_norm": 0.2170877754688263, | |
"learning_rate": 4.7761194029850745e-05, | |
"loss": 0.0467, | |
"step": 105 | |
}, | |
{ | |
"epoch": 0.2345415778251599, | |
"grad_norm": 0.12919823825359344, | |
"learning_rate": 4.765458422174841e-05, | |
"loss": 0.031, | |
"step": 110 | |
}, | |
{ | |
"epoch": 0.24520255863539445, | |
"grad_norm": 4.972256183624268, | |
"learning_rate": 4.7547974413646055e-05, | |
"loss": 0.0313, | |
"step": 115 | |
}, | |
{ | |
"epoch": 0.255863539445629, | |
"grad_norm": 0.2253728061914444, | |
"learning_rate": 4.7441364605543716e-05, | |
"loss": 0.0218, | |
"step": 120 | |
}, | |
{ | |
"epoch": 0.26652452025586354, | |
"grad_norm": 3.4333720207214355, | |
"learning_rate": 4.7334754797441364e-05, | |
"loss": 0.061, | |
"step": 125 | |
}, | |
{ | |
"epoch": 0.2771855010660981, | |
"grad_norm": 1.9168648719787598, | |
"learning_rate": 4.7228144989339026e-05, | |
"loss": 0.0342, | |
"step": 130 | |
}, | |
{ | |
"epoch": 0.2878464818763326, | |
"grad_norm": 0.19792096316814423, | |
"learning_rate": 4.7121535181236674e-05, | |
"loss": 0.0144, | |
"step": 135 | |
}, | |
{ | |
"epoch": 0.29850746268656714, | |
"grad_norm": 0.09157060086727142, | |
"learning_rate": 4.7014925373134335e-05, | |
"loss": 0.0153, | |
"step": 140 | |
}, | |
{ | |
"epoch": 0.3091684434968017, | |
"grad_norm": 0.5903327465057373, | |
"learning_rate": 4.690831556503198e-05, | |
"loss": 0.0184, | |
"step": 145 | |
}, | |
{ | |
"epoch": 0.31982942430703626, | |
"grad_norm": 3.5828375816345215, | |
"learning_rate": 4.6801705756929645e-05, | |
"loss": 0.031, | |
"step": 150 | |
}, | |
{ | |
"epoch": 0.3304904051172708, | |
"grad_norm": 4.411681175231934, | |
"learning_rate": 4.669509594882729e-05, | |
"loss": 0.0666, | |
"step": 155 | |
}, | |
{ | |
"epoch": 0.3411513859275053, | |
"grad_norm": 0.5323250889778137, | |
"learning_rate": 4.658848614072495e-05, | |
"loss": 0.082, | |
"step": 160 | |
}, | |
{ | |
"epoch": 0.35181236673773986, | |
"grad_norm": 4.381761074066162, | |
"learning_rate": 4.64818763326226e-05, | |
"loss": 0.1012, | |
"step": 165 | |
}, | |
{ | |
"epoch": 0.3624733475479744, | |
"grad_norm": 1.8138011693954468, | |
"learning_rate": 4.637526652452026e-05, | |
"loss": 0.1278, | |
"step": 170 | |
}, | |
{ | |
"epoch": 0.373134328358209, | |
"grad_norm": 0.3873082399368286, | |
"learning_rate": 4.626865671641791e-05, | |
"loss": 0.0247, | |
"step": 175 | |
}, | |
{ | |
"epoch": 0.3837953091684435, | |
"grad_norm": 0.1741461157798767, | |
"learning_rate": 4.6162046908315566e-05, | |
"loss": 0.0514, | |
"step": 180 | |
}, | |
{ | |
"epoch": 0.39445628997867804, | |
"grad_norm": 0.09827233850955963, | |
"learning_rate": 4.605543710021322e-05, | |
"loss": 0.0217, | |
"step": 185 | |
}, | |
{ | |
"epoch": 0.4051172707889126, | |
"grad_norm": 0.10961289703845978, | |
"learning_rate": 4.5948827292110876e-05, | |
"loss": 0.0108, | |
"step": 190 | |
}, | |
{ | |
"epoch": 0.4157782515991471, | |
"grad_norm": 0.07937943935394287, | |
"learning_rate": 4.584221748400853e-05, | |
"loss": 0.0278, | |
"step": 195 | |
}, | |
{ | |
"epoch": 0.42643923240938164, | |
"grad_norm": 0.07325994223356247, | |
"learning_rate": 4.5735607675906185e-05, | |
"loss": 0.0121, | |
"step": 200 | |
}, | |
{ | |
"epoch": 0.43710021321961623, | |
"grad_norm": 6.186931133270264, | |
"learning_rate": 4.562899786780384e-05, | |
"loss": 0.0698, | |
"step": 205 | |
}, | |
{ | |
"epoch": 0.44776119402985076, | |
"grad_norm": 0.07154982537031174, | |
"learning_rate": 4.5522388059701495e-05, | |
"loss": 0.0349, | |
"step": 210 | |
}, | |
{ | |
"epoch": 0.4584221748400853, | |
"grad_norm": 2.9962408542633057, | |
"learning_rate": 4.541577825159915e-05, | |
"loss": 0.0249, | |
"step": 215 | |
}, | |
{ | |
"epoch": 0.4690831556503198, | |
"grad_norm": 0.06725240498781204, | |
"learning_rate": 4.5309168443496804e-05, | |
"loss": 0.0616, | |
"step": 220 | |
}, | |
{ | |
"epoch": 0.47974413646055436, | |
"grad_norm": 6.895079135894775, | |
"learning_rate": 4.520255863539446e-05, | |
"loss": 0.0558, | |
"step": 225 | |
}, | |
{ | |
"epoch": 0.4904051172707889, | |
"grad_norm": 0.070187047123909, | |
"learning_rate": 4.5095948827292114e-05, | |
"loss": 0.0231, | |
"step": 230 | |
}, | |
{ | |
"epoch": 0.5010660980810234, | |
"grad_norm": 0.079035185277462, | |
"learning_rate": 4.498933901918977e-05, | |
"loss": 0.0457, | |
"step": 235 | |
}, | |
{ | |
"epoch": 0.511727078891258, | |
"grad_norm": 0.0686585083603859, | |
"learning_rate": 4.488272921108742e-05, | |
"loss": 0.0427, | |
"step": 240 | |
}, | |
{ | |
"epoch": 0.5223880597014925, | |
"grad_norm": 0.1387544721364975, | |
"learning_rate": 4.477611940298508e-05, | |
"loss": 0.0117, | |
"step": 245 | |
}, | |
{ | |
"epoch": 0.5330490405117271, | |
"grad_norm": 0.08245594799518585, | |
"learning_rate": 4.466950959488273e-05, | |
"loss": 0.0247, | |
"step": 250 | |
}, | |
{ | |
"epoch": 0.5437100213219617, | |
"grad_norm": 0.22577756643295288, | |
"learning_rate": 4.456289978678039e-05, | |
"loss": 0.0492, | |
"step": 255 | |
}, | |
{ | |
"epoch": 0.5543710021321961, | |
"grad_norm": 0.2817031741142273, | |
"learning_rate": 4.445628997867804e-05, | |
"loss": 0.0148, | |
"step": 260 | |
}, | |
{ | |
"epoch": 0.5650319829424307, | |
"grad_norm": 1.5719612836837769, | |
"learning_rate": 4.43496801705757e-05, | |
"loss": 0.044, | |
"step": 265 | |
}, | |
{ | |
"epoch": 0.5756929637526652, | |
"grad_norm": 3.860602378845215, | |
"learning_rate": 4.424307036247335e-05, | |
"loss": 0.0391, | |
"step": 270 | |
}, | |
{ | |
"epoch": 0.5863539445628998, | |
"grad_norm": 0.21841102838516235, | |
"learning_rate": 4.4136460554371006e-05, | |
"loss": 0.047, | |
"step": 275 | |
}, | |
{ | |
"epoch": 0.5970149253731343, | |
"grad_norm": 2.9997661113739014, | |
"learning_rate": 4.402985074626866e-05, | |
"loss": 0.0148, | |
"step": 280 | |
}, | |
{ | |
"epoch": 0.6076759061833689, | |
"grad_norm": 0.22736194729804993, | |
"learning_rate": 4.3923240938166316e-05, | |
"loss": 0.014, | |
"step": 285 | |
}, | |
{ | |
"epoch": 0.6183368869936035, | |
"grad_norm": 3.9116787910461426, | |
"learning_rate": 4.381663113006397e-05, | |
"loss": 0.0335, | |
"step": 290 | |
}, | |
{ | |
"epoch": 0.6289978678038379, | |
"grad_norm": 0.04943195357918739, | |
"learning_rate": 4.3710021321961625e-05, | |
"loss": 0.0174, | |
"step": 295 | |
}, | |
{ | |
"epoch": 0.6396588486140725, | |
"grad_norm": 0.09343817830085754, | |
"learning_rate": 4.360341151385928e-05, | |
"loss": 0.0121, | |
"step": 300 | |
}, | |
{ | |
"epoch": 0.650319829424307, | |
"grad_norm": 0.5552846789360046, | |
"learning_rate": 4.3496801705756935e-05, | |
"loss": 0.0079, | |
"step": 305 | |
}, | |
{ | |
"epoch": 0.6609808102345416, | |
"grad_norm": 2.6768391132354736, | |
"learning_rate": 4.339019189765459e-05, | |
"loss": 0.0171, | |
"step": 310 | |
}, | |
{ | |
"epoch": 0.6716417910447762, | |
"grad_norm": 7.584292411804199, | |
"learning_rate": 4.328358208955224e-05, | |
"loss": 0.0194, | |
"step": 315 | |
}, | |
{ | |
"epoch": 0.6823027718550106, | |
"grad_norm": 0.04962082579731941, | |
"learning_rate": 4.31769722814499e-05, | |
"loss": 0.0058, | |
"step": 320 | |
}, | |
{ | |
"epoch": 0.6929637526652452, | |
"grad_norm": 8.337027549743652, | |
"learning_rate": 4.307036247334755e-05, | |
"loss": 0.0466, | |
"step": 325 | |
}, | |
{ | |
"epoch": 0.7036247334754797, | |
"grad_norm": 1.1544479131698608, | |
"learning_rate": 4.29637526652452e-05, | |
"loss": 0.0479, | |
"step": 330 | |
}, | |
{ | |
"epoch": 0.7142857142857143, | |
"grad_norm": 0.6436145901679993, | |
"learning_rate": 4.2857142857142856e-05, | |
"loss": 0.0322, | |
"step": 335 | |
}, | |
{ | |
"epoch": 0.7249466950959488, | |
"grad_norm": 1.4992705583572388, | |
"learning_rate": 4.275053304904051e-05, | |
"loss": 0.1019, | |
"step": 340 | |
}, | |
{ | |
"epoch": 0.7356076759061834, | |
"grad_norm": 6.25881290435791, | |
"learning_rate": 4.2643923240938166e-05, | |
"loss": 0.0278, | |
"step": 345 | |
}, | |
{ | |
"epoch": 0.746268656716418, | |
"grad_norm": 1.1255793571472168, | |
"learning_rate": 4.253731343283582e-05, | |
"loss": 0.0945, | |
"step": 350 | |
}, | |
{ | |
"epoch": 0.7569296375266524, | |
"grad_norm": 1.4046266078948975, | |
"learning_rate": 4.2430703624733475e-05, | |
"loss": 0.03, | |
"step": 355 | |
}, | |
{ | |
"epoch": 0.767590618336887, | |
"grad_norm": 0.2621874511241913, | |
"learning_rate": 4.232409381663113e-05, | |
"loss": 0.0108, | |
"step": 360 | |
}, | |
{ | |
"epoch": 0.7782515991471215, | |
"grad_norm": 0.10603990405797958, | |
"learning_rate": 4.2217484008528785e-05, | |
"loss": 0.009, | |
"step": 365 | |
}, | |
{ | |
"epoch": 0.7889125799573561, | |
"grad_norm": 0.26428526639938354, | |
"learning_rate": 4.211087420042644e-05, | |
"loss": 0.073, | |
"step": 370 | |
}, | |
{ | |
"epoch": 0.7995735607675906, | |
"grad_norm": 0.1072433590888977, | |
"learning_rate": 4.2004264392324094e-05, | |
"loss": 0.0099, | |
"step": 375 | |
}, | |
{ | |
"epoch": 0.8102345415778252, | |
"grad_norm": 0.051769137382507324, | |
"learning_rate": 4.189765458422175e-05, | |
"loss": 0.0232, | |
"step": 380 | |
}, | |
{ | |
"epoch": 0.8208955223880597, | |
"grad_norm": 2.6303951740264893, | |
"learning_rate": 4.1791044776119404e-05, | |
"loss": 0.0119, | |
"step": 385 | |
}, | |
{ | |
"epoch": 0.8315565031982942, | |
"grad_norm": 0.8637888431549072, | |
"learning_rate": 4.168443496801706e-05, | |
"loss": 0.0278, | |
"step": 390 | |
}, | |
{ | |
"epoch": 0.8422174840085288, | |
"grad_norm": 0.05808399245142937, | |
"learning_rate": 4.157782515991471e-05, | |
"loss": 0.0305, | |
"step": 395 | |
}, | |
{ | |
"epoch": 0.8528784648187633, | |
"grad_norm": 0.043234869837760925, | |
"learning_rate": 4.147121535181237e-05, | |
"loss": 0.027, | |
"step": 400 | |
}, | |
{ | |
"epoch": 0.8635394456289979, | |
"grad_norm": 0.04670003056526184, | |
"learning_rate": 4.136460554371002e-05, | |
"loss": 0.0077, | |
"step": 405 | |
}, | |
{ | |
"epoch": 0.8742004264392325, | |
"grad_norm": 0.22551722824573517, | |
"learning_rate": 4.125799573560768e-05, | |
"loss": 0.0101, | |
"step": 410 | |
}, | |
{ | |
"epoch": 0.8848614072494669, | |
"grad_norm": 0.037985723465681076, | |
"learning_rate": 4.115138592750533e-05, | |
"loss": 0.0098, | |
"step": 415 | |
}, | |
{ | |
"epoch": 0.8955223880597015, | |
"grad_norm": 0.036151234060525894, | |
"learning_rate": 4.104477611940299e-05, | |
"loss": 0.0596, | |
"step": 420 | |
}, | |
{ | |
"epoch": 0.906183368869936, | |
"grad_norm": 0.10200171917676926, | |
"learning_rate": 4.093816631130064e-05, | |
"loss": 0.0551, | |
"step": 425 | |
}, | |
{ | |
"epoch": 0.9168443496801706, | |
"grad_norm": 0.04176831990480423, | |
"learning_rate": 4.0831556503198296e-05, | |
"loss": 0.0058, | |
"step": 430 | |
}, | |
{ | |
"epoch": 0.9275053304904051, | |
"grad_norm": 0.03623997047543526, | |
"learning_rate": 4.072494669509595e-05, | |
"loss": 0.0333, | |
"step": 435 | |
}, | |
{ | |
"epoch": 0.9381663113006397, | |
"grad_norm": 4.799571514129639, | |
"learning_rate": 4.0618336886993606e-05, | |
"loss": 0.0852, | |
"step": 440 | |
}, | |
{ | |
"epoch": 0.9488272921108742, | |
"grad_norm": 0.05819183215498924, | |
"learning_rate": 4.051172707889126e-05, | |
"loss": 0.0184, | |
"step": 445 | |
}, | |
{ | |
"epoch": 0.9594882729211087, | |
"grad_norm": 0.0419892892241478, | |
"learning_rate": 4.0405117270788915e-05, | |
"loss": 0.0138, | |
"step": 450 | |
}, | |
{ | |
"epoch": 0.9701492537313433, | |
"grad_norm": 1.6589030027389526, | |
"learning_rate": 4.029850746268657e-05, | |
"loss": 0.0887, | |
"step": 455 | |
}, | |
{ | |
"epoch": 0.9808102345415778, | |
"grad_norm": 0.04107643663883209, | |
"learning_rate": 4.0191897654584225e-05, | |
"loss": 0.0073, | |
"step": 460 | |
}, | |
{ | |
"epoch": 0.9914712153518124, | |
"grad_norm": 0.06751642376184464, | |
"learning_rate": 4.008528784648188e-05, | |
"loss": 0.006, | |
"step": 465 | |
}, | |
{ | |
"epoch": 1.0, | |
"eval_accuracy": 0.9925333333333334, | |
"eval_loss": 0.024789396673440933, | |
"eval_runtime": 52.4044, | |
"eval_samples_per_second": 71.559, | |
"eval_steps_per_second": 2.252, | |
"step": 469 | |
}, | |
{ | |
"epoch": 1.0021321961620469, | |
"grad_norm": 0.1655988246202469, | |
"learning_rate": 3.997867803837953e-05, | |
"loss": 0.007, | |
"step": 470 | |
}, | |
{ | |
"epoch": 1.0127931769722816, | |
"grad_norm": 0.04232338070869446, | |
"learning_rate": 3.987206823027719e-05, | |
"loss": 0.009, | |
"step": 475 | |
}, | |
{ | |
"epoch": 1.023454157782516, | |
"grad_norm": 0.03478037565946579, | |
"learning_rate": 3.976545842217484e-05, | |
"loss": 0.0049, | |
"step": 480 | |
}, | |
{ | |
"epoch": 1.0341151385927505, | |
"grad_norm": 0.03219752386212349, | |
"learning_rate": 3.96588486140725e-05, | |
"loss": 0.0042, | |
"step": 485 | |
}, | |
{ | |
"epoch": 1.044776119402985, | |
"grad_norm": 0.0352039709687233, | |
"learning_rate": 3.9552238805970146e-05, | |
"loss": 0.0091, | |
"step": 490 | |
}, | |
{ | |
"epoch": 1.0554371002132197, | |
"grad_norm": 0.03213835135102272, | |
"learning_rate": 3.944562899786781e-05, | |
"loss": 0.0042, | |
"step": 495 | |
}, | |
{ | |
"epoch": 1.0660980810234542, | |
"grad_norm": 6.631697177886963, | |
"learning_rate": 3.9339019189765456e-05, | |
"loss": 0.0172, | |
"step": 500 | |
}, | |
{ | |
"epoch": 1.0767590618336886, | |
"grad_norm": 0.03505675867199898, | |
"learning_rate": 3.923240938166312e-05, | |
"loss": 0.0177, | |
"step": 505 | |
}, | |
{ | |
"epoch": 1.0874200426439233, | |
"grad_norm": 0.03205326944589615, | |
"learning_rate": 3.9125799573560765e-05, | |
"loss": 0.0046, | |
"step": 510 | |
}, | |
{ | |
"epoch": 1.0980810234541578, | |
"grad_norm": 0.03313703089952469, | |
"learning_rate": 3.901918976545843e-05, | |
"loss": 0.0048, | |
"step": 515 | |
}, | |
{ | |
"epoch": 1.1087420042643923, | |
"grad_norm": 0.06457573175430298, | |
"learning_rate": 3.8912579957356075e-05, | |
"loss": 0.0043, | |
"step": 520 | |
}, | |
{ | |
"epoch": 1.1194029850746268, | |
"grad_norm": 0.029396317899227142, | |
"learning_rate": 3.8805970149253736e-05, | |
"loss": 0.0486, | |
"step": 525 | |
}, | |
{ | |
"epoch": 1.1300639658848615, | |
"grad_norm": 0.030403027310967445, | |
"learning_rate": 3.8699360341151384e-05, | |
"loss": 0.0284, | |
"step": 530 | |
}, | |
{ | |
"epoch": 1.140724946695096, | |
"grad_norm": 0.6405311822891235, | |
"learning_rate": 3.8592750533049046e-05, | |
"loss": 0.0086, | |
"step": 535 | |
}, | |
{ | |
"epoch": 1.1513859275053304, | |
"grad_norm": 0.029770322144031525, | |
"learning_rate": 3.8486140724946694e-05, | |
"loss": 0.0041, | |
"step": 540 | |
}, | |
{ | |
"epoch": 1.1620469083155651, | |
"grad_norm": 0.07603688538074493, | |
"learning_rate": 3.8379530916844355e-05, | |
"loss": 0.0041, | |
"step": 545 | |
}, | |
{ | |
"epoch": 1.1727078891257996, | |
"grad_norm": 0.13620473444461823, | |
"learning_rate": 3.8272921108742e-05, | |
"loss": 0.004, | |
"step": 550 | |
}, | |
{ | |
"epoch": 1.183368869936034, | |
"grad_norm": 0.04136044904589653, | |
"learning_rate": 3.8166311300639665e-05, | |
"loss": 0.0522, | |
"step": 555 | |
}, | |
{ | |
"epoch": 1.1940298507462686, | |
"grad_norm": 0.029707202687859535, | |
"learning_rate": 3.805970149253731e-05, | |
"loss": 0.0039, | |
"step": 560 | |
}, | |
{ | |
"epoch": 1.2046908315565032, | |
"grad_norm": 0.027011705562472343, | |
"learning_rate": 3.7953091684434974e-05, | |
"loss": 0.0207, | |
"step": 565 | |
}, | |
{ | |
"epoch": 1.2153518123667377, | |
"grad_norm": 0.027442237362265587, | |
"learning_rate": 3.784648187633262e-05, | |
"loss": 0.004, | |
"step": 570 | |
}, | |
{ | |
"epoch": 1.2260127931769722, | |
"grad_norm": 0.029470104724168777, | |
"learning_rate": 3.7739872068230284e-05, | |
"loss": 0.0372, | |
"step": 575 | |
}, | |
{ | |
"epoch": 1.236673773987207, | |
"grad_norm": 0.31320253014564514, | |
"learning_rate": 3.763326226012793e-05, | |
"loss": 0.0334, | |
"step": 580 | |
}, | |
{ | |
"epoch": 1.2473347547974414, | |
"grad_norm": 0.041305042803287506, | |
"learning_rate": 3.752665245202559e-05, | |
"loss": 0.0192, | |
"step": 585 | |
}, | |
{ | |
"epoch": 1.2579957356076759, | |
"grad_norm": 0.027358947321772575, | |
"learning_rate": 3.742004264392324e-05, | |
"loss": 0.0035, | |
"step": 590 | |
}, | |
{ | |
"epoch": 1.2686567164179103, | |
"grad_norm": 3.598153829574585, | |
"learning_rate": 3.73134328358209e-05, | |
"loss": 0.0427, | |
"step": 595 | |
}, | |
{ | |
"epoch": 1.279317697228145, | |
"grad_norm": 0.0319012813270092, | |
"learning_rate": 3.720682302771855e-05, | |
"loss": 0.0039, | |
"step": 600 | |
}, | |
{ | |
"epoch": 1.2899786780383795, | |
"grad_norm": 0.24696660041809082, | |
"learning_rate": 3.710021321961621e-05, | |
"loss": 0.0041, | |
"step": 605 | |
}, | |
{ | |
"epoch": 1.3006396588486142, | |
"grad_norm": 0.39110004901885986, | |
"learning_rate": 3.699360341151386e-05, | |
"loss": 0.0068, | |
"step": 610 | |
}, | |
{ | |
"epoch": 1.3113006396588487, | |
"grad_norm": 2.6746857166290283, | |
"learning_rate": 3.6886993603411515e-05, | |
"loss": 0.018, | |
"step": 615 | |
}, | |
{ | |
"epoch": 1.3219616204690832, | |
"grad_norm": 0.052029743790626526, | |
"learning_rate": 3.678038379530917e-05, | |
"loss": 0.0034, | |
"step": 620 | |
}, | |
{ | |
"epoch": 1.3326226012793176, | |
"grad_norm": 2.32309627532959, | |
"learning_rate": 3.6673773987206824e-05, | |
"loss": 0.0064, | |
"step": 625 | |
}, | |
{ | |
"epoch": 1.3432835820895521, | |
"grad_norm": 0.02732570469379425, | |
"learning_rate": 3.656716417910448e-05, | |
"loss": 0.0035, | |
"step": 630 | |
}, | |
{ | |
"epoch": 1.3539445628997868, | |
"grad_norm": 0.026114502921700478, | |
"learning_rate": 3.6460554371002134e-05, | |
"loss": 0.0035, | |
"step": 635 | |
}, | |
{ | |
"epoch": 1.3646055437100213, | |
"grad_norm": 1.3989812135696411, | |
"learning_rate": 3.635394456289979e-05, | |
"loss": 0.0087, | |
"step": 640 | |
}, | |
{ | |
"epoch": 1.375266524520256, | |
"grad_norm": 0.02258048579096794, | |
"learning_rate": 3.624733475479744e-05, | |
"loss": 0.0329, | |
"step": 645 | |
}, | |
{ | |
"epoch": 1.3859275053304905, | |
"grad_norm": 0.02210250124335289, | |
"learning_rate": 3.61407249466951e-05, | |
"loss": 0.0035, | |
"step": 650 | |
}, | |
{ | |
"epoch": 1.396588486140725, | |
"grad_norm": 0.024187836796045303, | |
"learning_rate": 3.603411513859275e-05, | |
"loss": 0.006, | |
"step": 655 | |
}, | |
{ | |
"epoch": 1.4072494669509594, | |
"grad_norm": 0.6303499341011047, | |
"learning_rate": 3.592750533049041e-05, | |
"loss": 0.0069, | |
"step": 660 | |
}, | |
{ | |
"epoch": 1.417910447761194, | |
"grad_norm": 0.023536095395684242, | |
"learning_rate": 3.582089552238806e-05, | |
"loss": 0.0056, | |
"step": 665 | |
}, | |
{ | |
"epoch": 1.4285714285714286, | |
"grad_norm": 0.29033660888671875, | |
"learning_rate": 3.571428571428572e-05, | |
"loss": 0.0033, | |
"step": 670 | |
}, | |
{ | |
"epoch": 1.439232409381663, | |
"grad_norm": 9.409659385681152, | |
"learning_rate": 3.560767590618337e-05, | |
"loss": 0.0466, | |
"step": 675 | |
}, | |
{ | |
"epoch": 1.4498933901918978, | |
"grad_norm": 0.02146943286061287, | |
"learning_rate": 3.5501066098081026e-05, | |
"loss": 0.0275, | |
"step": 680 | |
}, | |
{ | |
"epoch": 1.4605543710021323, | |
"grad_norm": 0.020682044327259064, | |
"learning_rate": 3.539445628997868e-05, | |
"loss": 0.0027, | |
"step": 685 | |
}, | |
{ | |
"epoch": 1.4712153518123667, | |
"grad_norm": 0.022130822762846947, | |
"learning_rate": 3.5287846481876336e-05, | |
"loss": 0.0028, | |
"step": 690 | |
}, | |
{ | |
"epoch": 1.4818763326226012, | |
"grad_norm": 0.02127368561923504, | |
"learning_rate": 3.518123667377399e-05, | |
"loss": 0.0026, | |
"step": 695 | |
}, | |
{ | |
"epoch": 1.4925373134328357, | |
"grad_norm": 0.02118576318025589, | |
"learning_rate": 3.5074626865671645e-05, | |
"loss": 0.0311, | |
"step": 700 | |
}, | |
{ | |
"epoch": 1.5031982942430704, | |
"grad_norm": 0.020313868299126625, | |
"learning_rate": 3.496801705756929e-05, | |
"loss": 0.0167, | |
"step": 705 | |
}, | |
{ | |
"epoch": 1.5138592750533049, | |
"grad_norm": 0.020809153094887733, | |
"learning_rate": 3.4861407249466955e-05, | |
"loss": 0.0044, | |
"step": 710 | |
}, | |
{ | |
"epoch": 1.5245202558635396, | |
"grad_norm": 0.02015788108110428, | |
"learning_rate": 3.47547974413646e-05, | |
"loss": 0.0026, | |
"step": 715 | |
}, | |
{ | |
"epoch": 1.535181236673774, | |
"grad_norm": 0.043719809502363205, | |
"learning_rate": 3.4648187633262264e-05, | |
"loss": 0.0106, | |
"step": 720 | |
}, | |
{ | |
"epoch": 1.5458422174840085, | |
"grad_norm": 1.7854561805725098, | |
"learning_rate": 3.454157782515991e-05, | |
"loss": 0.027, | |
"step": 725 | |
}, | |
{ | |
"epoch": 1.556503198294243, | |
"grad_norm": 0.023914912715554237, | |
"learning_rate": 3.4434968017057574e-05, | |
"loss": 0.0036, | |
"step": 730 | |
}, | |
{ | |
"epoch": 1.5671641791044775, | |
"grad_norm": 0.024362361058592796, | |
"learning_rate": 3.432835820895522e-05, | |
"loss": 0.0026, | |
"step": 735 | |
}, | |
{ | |
"epoch": 1.5778251599147122, | |
"grad_norm": 0.04256792366504669, | |
"learning_rate": 3.422174840085288e-05, | |
"loss": 0.0026, | |
"step": 740 | |
}, | |
{ | |
"epoch": 1.5884861407249466, | |
"grad_norm": 0.018862731754779816, | |
"learning_rate": 3.411513859275053e-05, | |
"loss": 0.0024, | |
"step": 745 | |
}, | |
{ | |
"epoch": 1.5991471215351813, | |
"grad_norm": 0.02639196068048477, | |
"learning_rate": 3.400852878464819e-05, | |
"loss": 0.0031, | |
"step": 750 | |
}, | |
{ | |
"epoch": 1.6098081023454158, | |
"grad_norm": 0.02296234667301178, | |
"learning_rate": 3.390191897654584e-05, | |
"loss": 0.0038, | |
"step": 755 | |
}, | |
{ | |
"epoch": 1.6204690831556503, | |
"grad_norm": 0.04922598972916603, | |
"learning_rate": 3.37953091684435e-05, | |
"loss": 0.0049, | |
"step": 760 | |
}, | |
{ | |
"epoch": 1.6311300639658848, | |
"grad_norm": 6.4008708000183105, | |
"learning_rate": 3.368869936034115e-05, | |
"loss": 0.0075, | |
"step": 765 | |
}, | |
{ | |
"epoch": 1.6417910447761193, | |
"grad_norm": 0.017683830112218857, | |
"learning_rate": 3.358208955223881e-05, | |
"loss": 0.0048, | |
"step": 770 | |
}, | |
{ | |
"epoch": 1.652452025586354, | |
"grad_norm": 0.02495339699089527, | |
"learning_rate": 3.347547974413646e-05, | |
"loss": 0.0024, | |
"step": 775 | |
}, | |
{ | |
"epoch": 1.6631130063965884, | |
"grad_norm": 0.050526976585388184, | |
"learning_rate": 3.336886993603412e-05, | |
"loss": 0.0024, | |
"step": 780 | |
}, | |
{ | |
"epoch": 1.6737739872068231, | |
"grad_norm": 0.01730433478951454, | |
"learning_rate": 3.326226012793177e-05, | |
"loss": 0.0023, | |
"step": 785 | |
}, | |
{ | |
"epoch": 1.6844349680170576, | |
"grad_norm": 0.2560845911502838, | |
"learning_rate": 3.3155650319829424e-05, | |
"loss": 0.0165, | |
"step": 790 | |
}, | |
{ | |
"epoch": 1.695095948827292, | |
"grad_norm": 0.017180945724248886, | |
"learning_rate": 3.304904051172708e-05, | |
"loss": 0.0021, | |
"step": 795 | |
}, | |
{ | |
"epoch": 1.7057569296375266, | |
"grad_norm": 0.01699437014758587, | |
"learning_rate": 3.294243070362473e-05, | |
"loss": 0.0024, | |
"step": 800 | |
}, | |
{ | |
"epoch": 1.716417910447761, | |
"grad_norm": 0.016817433759570122, | |
"learning_rate": 3.283582089552239e-05, | |
"loss": 0.0161, | |
"step": 805 | |
}, | |
{ | |
"epoch": 1.7270788912579957, | |
"grad_norm": 0.018291587010025978, | |
"learning_rate": 3.272921108742004e-05, | |
"loss": 0.0021, | |
"step": 810 | |
}, | |
{ | |
"epoch": 1.7377398720682304, | |
"grad_norm": 0.08288798481225967, | |
"learning_rate": 3.26226012793177e-05, | |
"loss": 0.0408, | |
"step": 815 | |
}, | |
{ | |
"epoch": 1.748400852878465, | |
"grad_norm": 0.01669316366314888, | |
"learning_rate": 3.251599147121535e-05, | |
"loss": 0.0171, | |
"step": 820 | |
}, | |
{ | |
"epoch": 1.7590618336886994, | |
"grad_norm": 6.584457874298096, | |
"learning_rate": 3.240938166311301e-05, | |
"loss": 0.0815, | |
"step": 825 | |
}, | |
{ | |
"epoch": 1.7697228144989339, | |
"grad_norm": 0.0167829766869545, | |
"learning_rate": 3.230277185501066e-05, | |
"loss": 0.02, | |
"step": 830 | |
}, | |
{ | |
"epoch": 1.7803837953091683, | |
"grad_norm": 0.5150222778320312, | |
"learning_rate": 3.2196162046908317e-05, | |
"loss": 0.0027, | |
"step": 835 | |
}, | |
{ | |
"epoch": 1.7910447761194028, | |
"grad_norm": 0.016286632046103477, | |
"learning_rate": 3.208955223880597e-05, | |
"loss": 0.0029, | |
"step": 840 | |
}, | |
{ | |
"epoch": 1.8017057569296375, | |
"grad_norm": 5.640036106109619, | |
"learning_rate": 3.1982942430703626e-05, | |
"loss": 0.0149, | |
"step": 845 | |
}, | |
{ | |
"epoch": 1.8123667377398722, | |
"grad_norm": 0.018778517842292786, | |
"learning_rate": 3.187633262260128e-05, | |
"loss": 0.0029, | |
"step": 850 | |
}, | |
{ | |
"epoch": 1.8230277185501067, | |
"grad_norm": 0.02453577145934105, | |
"learning_rate": 3.1769722814498935e-05, | |
"loss": 0.0295, | |
"step": 855 | |
}, | |
{ | |
"epoch": 1.8336886993603412, | |
"grad_norm": 0.11220049858093262, | |
"learning_rate": 3.166311300639659e-05, | |
"loss": 0.0023, | |
"step": 860 | |
}, | |
{ | |
"epoch": 1.8443496801705757, | |
"grad_norm": 0.01793779991567135, | |
"learning_rate": 3.1556503198294245e-05, | |
"loss": 0.0021, | |
"step": 865 | |
}, | |
{ | |
"epoch": 1.8550106609808101, | |
"grad_norm": 0.07333528250455856, | |
"learning_rate": 3.14498933901919e-05, | |
"loss": 0.0024, | |
"step": 870 | |
}, | |
{ | |
"epoch": 1.8656716417910446, | |
"grad_norm": 0.015899455174803734, | |
"learning_rate": 3.1343283582089554e-05, | |
"loss": 0.0058, | |
"step": 875 | |
}, | |
{ | |
"epoch": 1.8763326226012793, | |
"grad_norm": 7.5077223777771, | |
"learning_rate": 3.123667377398721e-05, | |
"loss": 0.027, | |
"step": 880 | |
}, | |
{ | |
"epoch": 1.886993603411514, | |
"grad_norm": 0.018155360594391823, | |
"learning_rate": 3.1130063965884864e-05, | |
"loss": 0.0047, | |
"step": 885 | |
}, | |
{ | |
"epoch": 1.8976545842217485, | |
"grad_norm": 0.016275795176625252, | |
"learning_rate": 3.102345415778252e-05, | |
"loss": 0.0018, | |
"step": 890 | |
}, | |
{ | |
"epoch": 1.908315565031983, | |
"grad_norm": 0.015320072881877422, | |
"learning_rate": 3.0916844349680173e-05, | |
"loss": 0.0018, | |
"step": 895 | |
}, | |
{ | |
"epoch": 1.9189765458422174, | |
"grad_norm": 0.015198267996311188, | |
"learning_rate": 3.081023454157783e-05, | |
"loss": 0.0019, | |
"step": 900 | |
}, | |
{ | |
"epoch": 1.929637526652452, | |
"grad_norm": 0.017301984131336212, | |
"learning_rate": 3.070362473347548e-05, | |
"loss": 0.0021, | |
"step": 905 | |
}, | |
{ | |
"epoch": 1.9402985074626866, | |
"grad_norm": 0.5218965411186218, | |
"learning_rate": 3.059701492537314e-05, | |
"loss": 0.0123, | |
"step": 910 | |
}, | |
{ | |
"epoch": 1.950959488272921, | |
"grad_norm": 0.015347805805504322, | |
"learning_rate": 3.0490405117270792e-05, | |
"loss": 0.0353, | |
"step": 915 | |
}, | |
{ | |
"epoch": 1.9616204690831558, | |
"grad_norm": 0.10156101733446121, | |
"learning_rate": 3.0383795309168444e-05, | |
"loss": 0.002, | |
"step": 920 | |
}, | |
{ | |
"epoch": 1.9722814498933903, | |
"grad_norm": 0.2850308418273926, | |
"learning_rate": 3.0277185501066102e-05, | |
"loss": 0.0029, | |
"step": 925 | |
}, | |
{ | |
"epoch": 1.9829424307036247, | |
"grad_norm": 0.015180164948105812, | |
"learning_rate": 3.0170575692963753e-05, | |
"loss": 0.048, | |
"step": 930 | |
}, | |
{ | |
"epoch": 1.9936034115138592, | |
"grad_norm": 0.014642196707427502, | |
"learning_rate": 3.006396588486141e-05, | |
"loss": 0.0019, | |
"step": 935 | |
}, | |
{ | |
"epoch": 2.0, | |
"eval_accuracy": 0.9930666666666667, | |
"eval_loss": 0.027498725801706314, | |
"eval_runtime": 52.2356, | |
"eval_samples_per_second": 71.79, | |
"eval_steps_per_second": 2.259, | |
"step": 938 | |
}, | |
{ | |
"epoch": 2.0042643923240937, | |
"grad_norm": 0.014993283897638321, | |
"learning_rate": 2.9957356076759063e-05, | |
"loss": 0.0166, | |
"step": 940 | |
}, | |
{ | |
"epoch": 2.014925373134328, | |
"grad_norm": 0.21592961251735687, | |
"learning_rate": 2.9850746268656714e-05, | |
"loss": 0.0019, | |
"step": 945 | |
}, | |
{ | |
"epoch": 2.025586353944563, | |
"grad_norm": 0.014774264767765999, | |
"learning_rate": 2.9744136460554372e-05, | |
"loss": 0.0018, | |
"step": 950 | |
}, | |
{ | |
"epoch": 2.0362473347547976, | |
"grad_norm": 0.014509981498122215, | |
"learning_rate": 2.9637526652452023e-05, | |
"loss": 0.0017, | |
"step": 955 | |
}, | |
{ | |
"epoch": 2.046908315565032, | |
"grad_norm": 0.268341988325119, | |
"learning_rate": 2.953091684434968e-05, | |
"loss": 0.0042, | |
"step": 960 | |
}, | |
{ | |
"epoch": 2.0575692963752665, | |
"grad_norm": 0.019052350893616676, | |
"learning_rate": 2.9424307036247333e-05, | |
"loss": 0.0019, | |
"step": 965 | |
}, | |
{ | |
"epoch": 2.068230277185501, | |
"grad_norm": 0.01463211327791214, | |
"learning_rate": 2.931769722814499e-05, | |
"loss": 0.0414, | |
"step": 970 | |
}, | |
{ | |
"epoch": 2.0788912579957355, | |
"grad_norm": 0.014595979824662209, | |
"learning_rate": 2.9211087420042642e-05, | |
"loss": 0.0018, | |
"step": 975 | |
}, | |
{ | |
"epoch": 2.08955223880597, | |
"grad_norm": 0.02617252804338932, | |
"learning_rate": 2.91044776119403e-05, | |
"loss": 0.0018, | |
"step": 980 | |
}, | |
{ | |
"epoch": 2.100213219616205, | |
"grad_norm": 0.014648554846644402, | |
"learning_rate": 2.8997867803837952e-05, | |
"loss": 0.002, | |
"step": 985 | |
}, | |
{ | |
"epoch": 2.1108742004264394, | |
"grad_norm": 0.014886678196489811, | |
"learning_rate": 2.889125799573561e-05, | |
"loss": 0.0018, | |
"step": 990 | |
}, | |
{ | |
"epoch": 2.121535181236674, | |
"grad_norm": 0.02294674888253212, | |
"learning_rate": 2.878464818763326e-05, | |
"loss": 0.0019, | |
"step": 995 | |
}, | |
{ | |
"epoch": 2.1321961620469083, | |
"grad_norm": 0.014405577443540096, | |
"learning_rate": 2.867803837953092e-05, | |
"loss": 0.0053, | |
"step": 1000 | |
}, | |
{ | |
"epoch": 2.142857142857143, | |
"grad_norm": 0.013990418054163456, | |
"learning_rate": 2.857142857142857e-05, | |
"loss": 0.0017, | |
"step": 1005 | |
}, | |
{ | |
"epoch": 2.1535181236673773, | |
"grad_norm": 0.014723999425768852, | |
"learning_rate": 2.846481876332623e-05, | |
"loss": 0.0017, | |
"step": 1010 | |
}, | |
{ | |
"epoch": 2.1641791044776117, | |
"grad_norm": 0.01395417656749487, | |
"learning_rate": 2.835820895522388e-05, | |
"loss": 0.0022, | |
"step": 1015 | |
}, | |
{ | |
"epoch": 2.1748400852878467, | |
"grad_norm": 0.014218118973076344, | |
"learning_rate": 2.825159914712154e-05, | |
"loss": 0.0016, | |
"step": 1020 | |
}, | |
{ | |
"epoch": 2.185501066098081, | |
"grad_norm": 0.013465196825563908, | |
"learning_rate": 2.814498933901919e-05, | |
"loss": 0.0016, | |
"step": 1025 | |
}, | |
{ | |
"epoch": 2.1961620469083156, | |
"grad_norm": 0.23096726834774017, | |
"learning_rate": 2.8038379530916848e-05, | |
"loss": 0.0017, | |
"step": 1030 | |
}, | |
{ | |
"epoch": 2.20682302771855, | |
"grad_norm": 0.01287803053855896, | |
"learning_rate": 2.79317697228145e-05, | |
"loss": 0.0016, | |
"step": 1035 | |
}, | |
{ | |
"epoch": 2.2174840085287846, | |
"grad_norm": 0.012949486263096333, | |
"learning_rate": 2.7825159914712157e-05, | |
"loss": 0.0018, | |
"step": 1040 | |
}, | |
{ | |
"epoch": 2.228144989339019, | |
"grad_norm": 0.014992132782936096, | |
"learning_rate": 2.771855010660981e-05, | |
"loss": 0.0017, | |
"step": 1045 | |
}, | |
{ | |
"epoch": 2.2388059701492535, | |
"grad_norm": 0.012656531296670437, | |
"learning_rate": 2.7611940298507467e-05, | |
"loss": 0.0015, | |
"step": 1050 | |
}, | |
{ | |
"epoch": 2.2494669509594885, | |
"grad_norm": 0.013797188177704811, | |
"learning_rate": 2.7505330490405118e-05, | |
"loss": 0.0287, | |
"step": 1055 | |
}, | |
{ | |
"epoch": 2.260127931769723, | |
"grad_norm": 0.01297246664762497, | |
"learning_rate": 2.7398720682302776e-05, | |
"loss": 0.0061, | |
"step": 1060 | |
}, | |
{ | |
"epoch": 2.2707889125799574, | |
"grad_norm": 0.01339882705360651, | |
"learning_rate": 2.7292110874200428e-05, | |
"loss": 0.0022, | |
"step": 1065 | |
}, | |
{ | |
"epoch": 2.281449893390192, | |
"grad_norm": 0.06925658881664276, | |
"learning_rate": 2.7185501066098086e-05, | |
"loss": 0.0016, | |
"step": 1070 | |
}, | |
{ | |
"epoch": 2.2921108742004264, | |
"grad_norm": 0.012873906642198563, | |
"learning_rate": 2.7078891257995737e-05, | |
"loss": 0.0016, | |
"step": 1075 | |
}, | |
{ | |
"epoch": 2.302771855010661, | |
"grad_norm": 0.3088153898715973, | |
"learning_rate": 2.6972281449893395e-05, | |
"loss": 0.0021, | |
"step": 1080 | |
}, | |
{ | |
"epoch": 2.3134328358208958, | |
"grad_norm": 0.01247407216578722, | |
"learning_rate": 2.6865671641791047e-05, | |
"loss": 0.0015, | |
"step": 1085 | |
}, | |
{ | |
"epoch": 2.3240938166311302, | |
"grad_norm": 0.7272318601608276, | |
"learning_rate": 2.6759061833688705e-05, | |
"loss": 0.0023, | |
"step": 1090 | |
}, | |
{ | |
"epoch": 2.3347547974413647, | |
"grad_norm": 0.02615715190768242, | |
"learning_rate": 2.6652452025586356e-05, | |
"loss": 0.0015, | |
"step": 1095 | |
}, | |
{ | |
"epoch": 2.345415778251599, | |
"grad_norm": 0.01222276221960783, | |
"learning_rate": 2.6545842217484007e-05, | |
"loss": 0.0062, | |
"step": 1100 | |
}, | |
{ | |
"epoch": 2.3560767590618337, | |
"grad_norm": 0.01195189356803894, | |
"learning_rate": 2.6439232409381666e-05, | |
"loss": 0.0015, | |
"step": 1105 | |
}, | |
{ | |
"epoch": 2.366737739872068, | |
"grad_norm": 0.01233593001961708, | |
"learning_rate": 2.6332622601279317e-05, | |
"loss": 0.0014, | |
"step": 1110 | |
}, | |
{ | |
"epoch": 2.3773987206823026, | |
"grad_norm": 0.013035369105637074, | |
"learning_rate": 2.6226012793176975e-05, | |
"loss": 0.0278, | |
"step": 1115 | |
}, | |
{ | |
"epoch": 2.388059701492537, | |
"grad_norm": 0.016300667077302933, | |
"learning_rate": 2.6119402985074626e-05, | |
"loss": 0.0024, | |
"step": 1120 | |
}, | |
{ | |
"epoch": 2.398720682302772, | |
"grad_norm": 0.01196726318448782, | |
"learning_rate": 2.6012793176972285e-05, | |
"loss": 0.0016, | |
"step": 1125 | |
}, | |
{ | |
"epoch": 2.4093816631130065, | |
"grad_norm": 0.012641930021345615, | |
"learning_rate": 2.5906183368869936e-05, | |
"loss": 0.0014, | |
"step": 1130 | |
}, | |
{ | |
"epoch": 2.420042643923241, | |
"grad_norm": 0.011776166968047619, | |
"learning_rate": 2.5799573560767594e-05, | |
"loss": 0.0016, | |
"step": 1135 | |
}, | |
{ | |
"epoch": 2.4307036247334755, | |
"grad_norm": 0.012702585197985172, | |
"learning_rate": 2.5692963752665245e-05, | |
"loss": 0.0014, | |
"step": 1140 | |
}, | |
{ | |
"epoch": 2.44136460554371, | |
"grad_norm": 0.029008181765675545, | |
"learning_rate": 2.5586353944562904e-05, | |
"loss": 0.0035, | |
"step": 1145 | |
}, | |
{ | |
"epoch": 2.4520255863539444, | |
"grad_norm": 0.012029062025249004, | |
"learning_rate": 2.5479744136460555e-05, | |
"loss": 0.0043, | |
"step": 1150 | |
}, | |
{ | |
"epoch": 2.4626865671641793, | |
"grad_norm": 0.012926764786243439, | |
"learning_rate": 2.537313432835821e-05, | |
"loss": 0.0034, | |
"step": 1155 | |
}, | |
{ | |
"epoch": 2.473347547974414, | |
"grad_norm": 0.013837242498993874, | |
"learning_rate": 2.5266524520255864e-05, | |
"loss": 0.0177, | |
"step": 1160 | |
}, | |
{ | |
"epoch": 2.4840085287846483, | |
"grad_norm": 0.011425266973674297, | |
"learning_rate": 2.515991471215352e-05, | |
"loss": 0.0014, | |
"step": 1165 | |
}, | |
{ | |
"epoch": 2.4946695095948828, | |
"grad_norm": 0.011828986927866936, | |
"learning_rate": 2.5053304904051174e-05, | |
"loss": 0.0014, | |
"step": 1170 | |
}, | |
{ | |
"epoch": 2.5053304904051172, | |
"grad_norm": 0.01125127449631691, | |
"learning_rate": 2.494669509594883e-05, | |
"loss": 0.0014, | |
"step": 1175 | |
}, | |
{ | |
"epoch": 2.5159914712153517, | |
"grad_norm": 0.011239953339099884, | |
"learning_rate": 2.4840085287846483e-05, | |
"loss": 0.0014, | |
"step": 1180 | |
}, | |
{ | |
"epoch": 2.526652452025586, | |
"grad_norm": 0.015625856816768646, | |
"learning_rate": 2.4733475479744138e-05, | |
"loss": 0.0014, | |
"step": 1185 | |
}, | |
{ | |
"epoch": 2.5373134328358207, | |
"grad_norm": 0.01832733303308487, | |
"learning_rate": 2.4626865671641793e-05, | |
"loss": 0.0015, | |
"step": 1190 | |
}, | |
{ | |
"epoch": 2.5479744136460556, | |
"grad_norm": 0.012323500588536263, | |
"learning_rate": 2.4520255863539444e-05, | |
"loss": 0.0013, | |
"step": 1195 | |
}, | |
{ | |
"epoch": 2.55863539445629, | |
"grad_norm": 0.010705026797950268, | |
"learning_rate": 2.44136460554371e-05, | |
"loss": 0.0013, | |
"step": 1200 | |
}, | |
{ | |
"epoch": 2.5692963752665245, | |
"grad_norm": 0.01072059664875269, | |
"learning_rate": 2.4307036247334754e-05, | |
"loss": 0.0013, | |
"step": 1205 | |
}, | |
{ | |
"epoch": 2.579957356076759, | |
"grad_norm": 0.010688871145248413, | |
"learning_rate": 2.420042643923241e-05, | |
"loss": 0.0013, | |
"step": 1210 | |
}, | |
{ | |
"epoch": 2.5906183368869935, | |
"grad_norm": 0.010716568678617477, | |
"learning_rate": 2.4093816631130063e-05, | |
"loss": 0.0017, | |
"step": 1215 | |
}, | |
{ | |
"epoch": 2.6012793176972284, | |
"grad_norm": 0.010534852743148804, | |
"learning_rate": 2.3987206823027718e-05, | |
"loss": 0.0013, | |
"step": 1220 | |
}, | |
{ | |
"epoch": 2.611940298507463, | |
"grad_norm": 0.14711648225784302, | |
"learning_rate": 2.3880597014925373e-05, | |
"loss": 0.0035, | |
"step": 1225 | |
}, | |
{ | |
"epoch": 2.6226012793176974, | |
"grad_norm": 0.018022488802671432, | |
"learning_rate": 2.3773987206823027e-05, | |
"loss": 0.0015, | |
"step": 1230 | |
}, | |
{ | |
"epoch": 2.633262260127932, | |
"grad_norm": 0.1834811568260193, | |
"learning_rate": 2.3667377398720682e-05, | |
"loss": 0.0014, | |
"step": 1235 | |
}, | |
{ | |
"epoch": 2.6439232409381663, | |
"grad_norm": 0.011194993741810322, | |
"learning_rate": 2.3560767590618337e-05, | |
"loss": 0.0012, | |
"step": 1240 | |
}, | |
{ | |
"epoch": 2.654584221748401, | |
"grad_norm": 6.7584357261657715, | |
"learning_rate": 2.345415778251599e-05, | |
"loss": 0.0104, | |
"step": 1245 | |
}, | |
{ | |
"epoch": 2.6652452025586353, | |
"grad_norm": 0.010337951593101025, | |
"learning_rate": 2.3347547974413646e-05, | |
"loss": 0.0013, | |
"step": 1250 | |
}, | |
{ | |
"epoch": 2.6759061833688698, | |
"grad_norm": 0.010701274499297142, | |
"learning_rate": 2.32409381663113e-05, | |
"loss": 0.0013, | |
"step": 1255 | |
}, | |
{ | |
"epoch": 2.6865671641791042, | |
"grad_norm": 0.010305657051503658, | |
"learning_rate": 2.3134328358208956e-05, | |
"loss": 0.0013, | |
"step": 1260 | |
}, | |
{ | |
"epoch": 2.697228144989339, | |
"grad_norm": 0.01426158007234335, | |
"learning_rate": 2.302771855010661e-05, | |
"loss": 0.0013, | |
"step": 1265 | |
}, | |
{ | |
"epoch": 2.7078891257995736, | |
"grad_norm": 0.010147635824978352, | |
"learning_rate": 2.2921108742004265e-05, | |
"loss": 0.0012, | |
"step": 1270 | |
}, | |
{ | |
"epoch": 2.718550106609808, | |
"grad_norm": 0.010234709829092026, | |
"learning_rate": 2.281449893390192e-05, | |
"loss": 0.0012, | |
"step": 1275 | |
}, | |
{ | |
"epoch": 2.7292110874200426, | |
"grad_norm": 0.009930884465575218, | |
"learning_rate": 2.2707889125799575e-05, | |
"loss": 0.0101, | |
"step": 1280 | |
}, | |
{ | |
"epoch": 2.739872068230277, | |
"grad_norm": 0.01083667203783989, | |
"learning_rate": 2.260127931769723e-05, | |
"loss": 0.0012, | |
"step": 1285 | |
}, | |
{ | |
"epoch": 2.750533049040512, | |
"grad_norm": 0.010491997934877872, | |
"learning_rate": 2.2494669509594884e-05, | |
"loss": 0.0012, | |
"step": 1290 | |
}, | |
{ | |
"epoch": 2.7611940298507465, | |
"grad_norm": 0.009906476363539696, | |
"learning_rate": 2.238805970149254e-05, | |
"loss": 0.0015, | |
"step": 1295 | |
}, | |
{ | |
"epoch": 2.771855010660981, | |
"grad_norm": 0.009655443951487541, | |
"learning_rate": 2.2281449893390194e-05, | |
"loss": 0.0018, | |
"step": 1300 | |
}, | |
{ | |
"epoch": 2.7825159914712154, | |
"grad_norm": 0.010156660340726376, | |
"learning_rate": 2.217484008528785e-05, | |
"loss": 0.0012, | |
"step": 1305 | |
}, | |
{ | |
"epoch": 2.79317697228145, | |
"grad_norm": 0.016699986532330513, | |
"learning_rate": 2.2068230277185503e-05, | |
"loss": 0.0033, | |
"step": 1310 | |
}, | |
{ | |
"epoch": 2.8038379530916844, | |
"grad_norm": 0.019313324242830276, | |
"learning_rate": 2.1961620469083158e-05, | |
"loss": 0.0013, | |
"step": 1315 | |
}, | |
{ | |
"epoch": 2.814498933901919, | |
"grad_norm": 0.009878424927592278, | |
"learning_rate": 2.1855010660980813e-05, | |
"loss": 0.0011, | |
"step": 1320 | |
}, | |
{ | |
"epoch": 2.8251599147121533, | |
"grad_norm": 0.1228569820523262, | |
"learning_rate": 2.1748400852878467e-05, | |
"loss": 0.0013, | |
"step": 1325 | |
}, | |
{ | |
"epoch": 2.835820895522388, | |
"grad_norm": 0.010684937238693237, | |
"learning_rate": 2.164179104477612e-05, | |
"loss": 0.0013, | |
"step": 1330 | |
}, | |
{ | |
"epoch": 2.8464818763326227, | |
"grad_norm": 0.01632467471063137, | |
"learning_rate": 2.1535181236673773e-05, | |
"loss": 0.0019, | |
"step": 1335 | |
}, | |
{ | |
"epoch": 2.857142857142857, | |
"grad_norm": 0.0252471212297678, | |
"learning_rate": 2.1428571428571428e-05, | |
"loss": 0.0016, | |
"step": 1340 | |
}, | |
{ | |
"epoch": 2.8678038379530917, | |
"grad_norm": 0.00938443187624216, | |
"learning_rate": 2.1321961620469083e-05, | |
"loss": 0.0011, | |
"step": 1345 | |
}, | |
{ | |
"epoch": 2.878464818763326, | |
"grad_norm": 0.01014285534620285, | |
"learning_rate": 2.1215351812366738e-05, | |
"loss": 0.0019, | |
"step": 1350 | |
}, | |
{ | |
"epoch": 2.8891257995735606, | |
"grad_norm": 0.009207502007484436, | |
"learning_rate": 2.1108742004264392e-05, | |
"loss": 0.0011, | |
"step": 1355 | |
}, | |
{ | |
"epoch": 2.8997867803837956, | |
"grad_norm": 2.751433849334717, | |
"learning_rate": 2.1002132196162047e-05, | |
"loss": 0.0034, | |
"step": 1360 | |
}, | |
{ | |
"epoch": 2.91044776119403, | |
"grad_norm": 0.010619159787893295, | |
"learning_rate": 2.0895522388059702e-05, | |
"loss": 0.0012, | |
"step": 1365 | |
}, | |
{ | |
"epoch": 2.9211087420042645, | |
"grad_norm": 0.00943779107183218, | |
"learning_rate": 2.0788912579957357e-05, | |
"loss": 0.0011, | |
"step": 1370 | |
}, | |
{ | |
"epoch": 2.931769722814499, | |
"grad_norm": 0.014201086014509201, | |
"learning_rate": 2.068230277185501e-05, | |
"loss": 0.0012, | |
"step": 1375 | |
}, | |
{ | |
"epoch": 2.9424307036247335, | |
"grad_norm": 0.009301646612584591, | |
"learning_rate": 2.0575692963752666e-05, | |
"loss": 0.0013, | |
"step": 1380 | |
}, | |
{ | |
"epoch": 2.953091684434968, | |
"grad_norm": 0.033786121755838394, | |
"learning_rate": 2.046908315565032e-05, | |
"loss": 0.0258, | |
"step": 1385 | |
}, | |
{ | |
"epoch": 2.9637526652452024, | |
"grad_norm": 0.008908209390938282, | |
"learning_rate": 2.0362473347547976e-05, | |
"loss": 0.0098, | |
"step": 1390 | |
}, | |
{ | |
"epoch": 2.974413646055437, | |
"grad_norm": 0.015553606674075127, | |
"learning_rate": 2.025586353944563e-05, | |
"loss": 0.0011, | |
"step": 1395 | |
}, | |
{ | |
"epoch": 2.9850746268656714, | |
"grad_norm": 15.196128845214844, | |
"learning_rate": 2.0149253731343285e-05, | |
"loss": 0.013, | |
"step": 1400 | |
}, | |
{ | |
"epoch": 2.9957356076759063, | |
"grad_norm": 0.04082879796624184, | |
"learning_rate": 2.004264392324094e-05, | |
"loss": 0.0012, | |
"step": 1405 | |
}, | |
{ | |
"epoch": 3.0, | |
"eval_accuracy": 0.9952, | |
"eval_loss": 0.02067788876593113, | |
"eval_runtime": 52.2509, | |
"eval_samples_per_second": 71.769, | |
"eval_steps_per_second": 2.258, | |
"step": 1407 | |
}, | |
{ | |
"epoch": 3.0063965884861408, | |
"grad_norm": 0.008844832889735699, | |
"learning_rate": 1.9936034115138594e-05, | |
"loss": 0.0011, | |
"step": 1410 | |
}, | |
{ | |
"epoch": 3.0170575692963753, | |
"grad_norm": 0.00872724037617445, | |
"learning_rate": 1.982942430703625e-05, | |
"loss": 0.0011, | |
"step": 1415 | |
}, | |
{ | |
"epoch": 3.0277185501066097, | |
"grad_norm": 0.009082204662263393, | |
"learning_rate": 1.9722814498933904e-05, | |
"loss": 0.0011, | |
"step": 1420 | |
}, | |
{ | |
"epoch": 3.038379530916844, | |
"grad_norm": 0.010032808408141136, | |
"learning_rate": 1.961620469083156e-05, | |
"loss": 0.0016, | |
"step": 1425 | |
}, | |
{ | |
"epoch": 3.0490405117270787, | |
"grad_norm": 0.008777284994721413, | |
"learning_rate": 1.9509594882729213e-05, | |
"loss": 0.0012, | |
"step": 1430 | |
}, | |
{ | |
"epoch": 3.0597014925373136, | |
"grad_norm": 0.008815384469926357, | |
"learning_rate": 1.9402985074626868e-05, | |
"loss": 0.0011, | |
"step": 1435 | |
}, | |
{ | |
"epoch": 3.070362473347548, | |
"grad_norm": 0.008484862744808197, | |
"learning_rate": 1.9296375266524523e-05, | |
"loss": 0.001, | |
"step": 1440 | |
}, | |
{ | |
"epoch": 3.0810234541577826, | |
"grad_norm": 1.3054378032684326, | |
"learning_rate": 1.9189765458422178e-05, | |
"loss": 0.0033, | |
"step": 1445 | |
}, | |
{ | |
"epoch": 3.091684434968017, | |
"grad_norm": 0.008614038117229939, | |
"learning_rate": 1.9083155650319832e-05, | |
"loss": 0.001, | |
"step": 1450 | |
}, | |
{ | |
"epoch": 3.1023454157782515, | |
"grad_norm": 0.015650274232029915, | |
"learning_rate": 1.8976545842217487e-05, | |
"loss": 0.0011, | |
"step": 1455 | |
}, | |
{ | |
"epoch": 3.113006396588486, | |
"grad_norm": 0.018463708460330963, | |
"learning_rate": 1.8869936034115142e-05, | |
"loss": 0.0011, | |
"step": 1460 | |
}, | |
{ | |
"epoch": 3.1236673773987205, | |
"grad_norm": 0.008458460681140423, | |
"learning_rate": 1.8763326226012797e-05, | |
"loss": 0.0011, | |
"step": 1465 | |
}, | |
{ | |
"epoch": 3.1343283582089554, | |
"grad_norm": 0.009100639261305332, | |
"learning_rate": 1.865671641791045e-05, | |
"loss": 0.001, | |
"step": 1470 | |
}, | |
{ | |
"epoch": 3.14498933901919, | |
"grad_norm": 0.008369974792003632, | |
"learning_rate": 1.8550106609808106e-05, | |
"loss": 0.001, | |
"step": 1475 | |
}, | |
{ | |
"epoch": 3.1556503198294243, | |
"grad_norm": 0.008374682627618313, | |
"learning_rate": 1.8443496801705757e-05, | |
"loss": 0.001, | |
"step": 1480 | |
}, | |
{ | |
"epoch": 3.166311300639659, | |
"grad_norm": 0.008124297484755516, | |
"learning_rate": 1.8336886993603412e-05, | |
"loss": 0.0011, | |
"step": 1485 | |
}, | |
{ | |
"epoch": 3.1769722814498933, | |
"grad_norm": 0.008286545984447002, | |
"learning_rate": 1.8230277185501067e-05, | |
"loss": 0.001, | |
"step": 1490 | |
}, | |
{ | |
"epoch": 3.1876332622601278, | |
"grad_norm": 0.019535064697265625, | |
"learning_rate": 1.812366737739872e-05, | |
"loss": 0.0011, | |
"step": 1495 | |
}, | |
{ | |
"epoch": 3.1982942430703627, | |
"grad_norm": 0.00818800088018179, | |
"learning_rate": 1.8017057569296376e-05, | |
"loss": 0.001, | |
"step": 1500 | |
}, | |
{ | |
"epoch": 3.208955223880597, | |
"grad_norm": 0.008247826248407364, | |
"learning_rate": 1.791044776119403e-05, | |
"loss": 0.001, | |
"step": 1505 | |
}, | |
{ | |
"epoch": 3.2196162046908317, | |
"grad_norm": 0.00814014207571745, | |
"learning_rate": 1.7803837953091686e-05, | |
"loss": 0.0009, | |
"step": 1510 | |
}, | |
{ | |
"epoch": 3.230277185501066, | |
"grad_norm": 0.00809234194457531, | |
"learning_rate": 1.769722814498934e-05, | |
"loss": 0.0009, | |
"step": 1515 | |
}, | |
{ | |
"epoch": 3.2409381663113006, | |
"grad_norm": 0.008812040090560913, | |
"learning_rate": 1.7590618336886995e-05, | |
"loss": 0.0009, | |
"step": 1520 | |
}, | |
{ | |
"epoch": 3.251599147121535, | |
"grad_norm": 0.00842578150331974, | |
"learning_rate": 1.7484008528784647e-05, | |
"loss": 0.001, | |
"step": 1525 | |
}, | |
{ | |
"epoch": 3.2622601279317696, | |
"grad_norm": 0.007962945848703384, | |
"learning_rate": 1.73773987206823e-05, | |
"loss": 0.0009, | |
"step": 1530 | |
}, | |
{ | |
"epoch": 3.272921108742004, | |
"grad_norm": 0.048308584839105606, | |
"learning_rate": 1.7270788912579956e-05, | |
"loss": 0.0016, | |
"step": 1535 | |
}, | |
{ | |
"epoch": 3.283582089552239, | |
"grad_norm": 0.008920993655920029, | |
"learning_rate": 1.716417910447761e-05, | |
"loss": 0.001, | |
"step": 1540 | |
}, | |
{ | |
"epoch": 3.2942430703624734, | |
"grad_norm": 0.00801799912005663, | |
"learning_rate": 1.7057569296375266e-05, | |
"loss": 0.001, | |
"step": 1545 | |
}, | |
{ | |
"epoch": 3.304904051172708, | |
"grad_norm": 0.008710336871445179, | |
"learning_rate": 1.695095948827292e-05, | |
"loss": 0.0011, | |
"step": 1550 | |
}, | |
{ | |
"epoch": 3.3155650319829424, | |
"grad_norm": 0.007848365232348442, | |
"learning_rate": 1.6844349680170575e-05, | |
"loss": 0.001, | |
"step": 1555 | |
}, | |
{ | |
"epoch": 3.326226012793177, | |
"grad_norm": 0.008013375103473663, | |
"learning_rate": 1.673773987206823e-05, | |
"loss": 0.0009, | |
"step": 1560 | |
}, | |
{ | |
"epoch": 3.3368869936034113, | |
"grad_norm": 0.00788216944783926, | |
"learning_rate": 1.6631130063965885e-05, | |
"loss": 0.0009, | |
"step": 1565 | |
}, | |
{ | |
"epoch": 3.3475479744136463, | |
"grad_norm": 0.007833545096218586, | |
"learning_rate": 1.652452025586354e-05, | |
"loss": 0.001, | |
"step": 1570 | |
}, | |
{ | |
"epoch": 3.3582089552238807, | |
"grad_norm": 0.0081242760643363, | |
"learning_rate": 1.6417910447761194e-05, | |
"loss": 0.001, | |
"step": 1575 | |
}, | |
{ | |
"epoch": 3.368869936034115, | |
"grad_norm": 0.00763153750449419, | |
"learning_rate": 1.631130063965885e-05, | |
"loss": 0.001, | |
"step": 1580 | |
}, | |
{ | |
"epoch": 3.3795309168443497, | |
"grad_norm": 0.00935972761362791, | |
"learning_rate": 1.6204690831556504e-05, | |
"loss": 0.001, | |
"step": 1585 | |
}, | |
{ | |
"epoch": 3.390191897654584, | |
"grad_norm": 0.017936240881681442, | |
"learning_rate": 1.6098081023454158e-05, | |
"loss": 0.0011, | |
"step": 1590 | |
}, | |
{ | |
"epoch": 3.4008528784648187, | |
"grad_norm": 1.1449828147888184, | |
"learning_rate": 1.5991471215351813e-05, | |
"loss": 0.0014, | |
"step": 1595 | |
}, | |
{ | |
"epoch": 3.411513859275053, | |
"grad_norm": 0.007699308451265097, | |
"learning_rate": 1.5884861407249468e-05, | |
"loss": 0.0009, | |
"step": 1600 | |
}, | |
{ | |
"epoch": 3.4221748400852876, | |
"grad_norm": 0.00793137215077877, | |
"learning_rate": 1.5778251599147122e-05, | |
"loss": 0.0009, | |
"step": 1605 | |
}, | |
{ | |
"epoch": 3.4328358208955225, | |
"grad_norm": 0.007621863391250372, | |
"learning_rate": 1.5671641791044777e-05, | |
"loss": 0.0009, | |
"step": 1610 | |
}, | |
{ | |
"epoch": 3.443496801705757, | |
"grad_norm": 0.007543520070612431, | |
"learning_rate": 1.5565031982942432e-05, | |
"loss": 0.0009, | |
"step": 1615 | |
}, | |
{ | |
"epoch": 3.4541577825159915, | |
"grad_norm": 0.0081236083060503, | |
"learning_rate": 1.5458422174840087e-05, | |
"loss": 0.0009, | |
"step": 1620 | |
}, | |
{ | |
"epoch": 3.464818763326226, | |
"grad_norm": 0.007582434453070164, | |
"learning_rate": 1.535181236673774e-05, | |
"loss": 0.0009, | |
"step": 1625 | |
}, | |
{ | |
"epoch": 3.4754797441364604, | |
"grad_norm": 0.008041488006711006, | |
"learning_rate": 1.5245202558635396e-05, | |
"loss": 0.0009, | |
"step": 1630 | |
}, | |
{ | |
"epoch": 3.486140724946695, | |
"grad_norm": 0.09129466116428375, | |
"learning_rate": 1.5138592750533051e-05, | |
"loss": 0.001, | |
"step": 1635 | |
}, | |
{ | |
"epoch": 3.49680170575693, | |
"grad_norm": 0.012182862497866154, | |
"learning_rate": 1.5031982942430706e-05, | |
"loss": 0.0009, | |
"step": 1640 | |
}, | |
{ | |
"epoch": 3.5074626865671643, | |
"grad_norm": 0.008511735126376152, | |
"learning_rate": 1.4925373134328357e-05, | |
"loss": 0.002, | |
"step": 1645 | |
}, | |
{ | |
"epoch": 3.518123667377399, | |
"grad_norm": 0.00730988709256053, | |
"learning_rate": 1.4818763326226012e-05, | |
"loss": 0.0009, | |
"step": 1650 | |
}, | |
{ | |
"epoch": 3.5287846481876333, | |
"grad_norm": 0.007615935988724232, | |
"learning_rate": 1.4712153518123666e-05, | |
"loss": 0.0009, | |
"step": 1655 | |
}, | |
{ | |
"epoch": 3.5394456289978677, | |
"grad_norm": 0.024800151586532593, | |
"learning_rate": 1.4605543710021321e-05, | |
"loss": 0.001, | |
"step": 1660 | |
}, | |
{ | |
"epoch": 3.550106609808102, | |
"grad_norm": 0.07143282890319824, | |
"learning_rate": 1.4498933901918976e-05, | |
"loss": 0.0012, | |
"step": 1665 | |
}, | |
{ | |
"epoch": 3.5607675906183367, | |
"grad_norm": 0.008246131241321564, | |
"learning_rate": 1.439232409381663e-05, | |
"loss": 0.0009, | |
"step": 1670 | |
}, | |
{ | |
"epoch": 3.571428571428571, | |
"grad_norm": 0.007491565775126219, | |
"learning_rate": 1.4285714285714285e-05, | |
"loss": 0.0009, | |
"step": 1675 | |
}, | |
{ | |
"epoch": 3.582089552238806, | |
"grad_norm": 0.007407619617879391, | |
"learning_rate": 1.417910447761194e-05, | |
"loss": 0.0009, | |
"step": 1680 | |
}, | |
{ | |
"epoch": 3.5927505330490406, | |
"grad_norm": 0.007541383150964975, | |
"learning_rate": 1.4072494669509595e-05, | |
"loss": 0.0009, | |
"step": 1685 | |
}, | |
{ | |
"epoch": 3.603411513859275, | |
"grad_norm": 0.007285376079380512, | |
"learning_rate": 1.396588486140725e-05, | |
"loss": 0.0008, | |
"step": 1690 | |
}, | |
{ | |
"epoch": 3.6140724946695095, | |
"grad_norm": 0.007131835911422968, | |
"learning_rate": 1.3859275053304904e-05, | |
"loss": 0.0008, | |
"step": 1695 | |
}, | |
{ | |
"epoch": 3.624733475479744, | |
"grad_norm": 0.007167383097112179, | |
"learning_rate": 1.3752665245202559e-05, | |
"loss": 0.0008, | |
"step": 1700 | |
}, | |
{ | |
"epoch": 3.635394456289979, | |
"grad_norm": 0.007131422404199839, | |
"learning_rate": 1.3646055437100214e-05, | |
"loss": 0.0008, | |
"step": 1705 | |
}, | |
{ | |
"epoch": 3.6460554371002134, | |
"grad_norm": 0.006981381215155125, | |
"learning_rate": 1.3539445628997869e-05, | |
"loss": 0.0008, | |
"step": 1710 | |
}, | |
{ | |
"epoch": 3.656716417910448, | |
"grad_norm": 0.007055275607854128, | |
"learning_rate": 1.3432835820895523e-05, | |
"loss": 0.0009, | |
"step": 1715 | |
}, | |
{ | |
"epoch": 3.6673773987206824, | |
"grad_norm": 0.007165293674916029, | |
"learning_rate": 1.3326226012793178e-05, | |
"loss": 0.0008, | |
"step": 1720 | |
}, | |
{ | |
"epoch": 3.678038379530917, | |
"grad_norm": 0.007502025458961725, | |
"learning_rate": 1.3219616204690833e-05, | |
"loss": 0.0008, | |
"step": 1725 | |
}, | |
{ | |
"epoch": 3.6886993603411513, | |
"grad_norm": 0.00726438919082284, | |
"learning_rate": 1.3113006396588488e-05, | |
"loss": 0.0009, | |
"step": 1730 | |
}, | |
{ | |
"epoch": 3.699360341151386, | |
"grad_norm": 0.007199421990662813, | |
"learning_rate": 1.3006396588486142e-05, | |
"loss": 0.0009, | |
"step": 1735 | |
}, | |
{ | |
"epoch": 3.7100213219616203, | |
"grad_norm": 0.007022072095423937, | |
"learning_rate": 1.2899786780383797e-05, | |
"loss": 0.0008, | |
"step": 1740 | |
}, | |
{ | |
"epoch": 3.7206823027718547, | |
"grad_norm": 0.007612633518874645, | |
"learning_rate": 1.2793176972281452e-05, | |
"loss": 0.0008, | |
"step": 1745 | |
}, | |
{ | |
"epoch": 3.7313432835820897, | |
"grad_norm": 0.007145090028643608, | |
"learning_rate": 1.2686567164179105e-05, | |
"loss": 0.0008, | |
"step": 1750 | |
}, | |
{ | |
"epoch": 3.742004264392324, | |
"grad_norm": 0.007034282200038433, | |
"learning_rate": 1.257995735607676e-05, | |
"loss": 0.0008, | |
"step": 1755 | |
}, | |
{ | |
"epoch": 3.7526652452025586, | |
"grad_norm": 0.007143386639654636, | |
"learning_rate": 1.2473347547974414e-05, | |
"loss": 0.0008, | |
"step": 1760 | |
}, | |
{ | |
"epoch": 3.763326226012793, | |
"grad_norm": 0.007086001802235842, | |
"learning_rate": 1.2366737739872069e-05, | |
"loss": 0.0008, | |
"step": 1765 | |
}, | |
{ | |
"epoch": 3.7739872068230276, | |
"grad_norm": 0.007285911124199629, | |
"learning_rate": 1.2260127931769722e-05, | |
"loss": 0.0008, | |
"step": 1770 | |
}, | |
{ | |
"epoch": 3.7846481876332625, | |
"grad_norm": 0.0068399906158447266, | |
"learning_rate": 1.2153518123667377e-05, | |
"loss": 0.001, | |
"step": 1775 | |
}, | |
{ | |
"epoch": 3.795309168443497, | |
"grad_norm": 0.026388036087155342, | |
"learning_rate": 1.2046908315565032e-05, | |
"loss": 0.0008, | |
"step": 1780 | |
}, | |
{ | |
"epoch": 3.8059701492537314, | |
"grad_norm": 0.006789471488445997, | |
"learning_rate": 1.1940298507462686e-05, | |
"loss": 0.0012, | |
"step": 1785 | |
}, | |
{ | |
"epoch": 3.816631130063966, | |
"grad_norm": 0.006734826602041721, | |
"learning_rate": 1.1833688699360341e-05, | |
"loss": 0.0008, | |
"step": 1790 | |
}, | |
{ | |
"epoch": 3.8272921108742004, | |
"grad_norm": 0.008945029228925705, | |
"learning_rate": 1.1727078891257996e-05, | |
"loss": 0.0008, | |
"step": 1795 | |
}, | |
{ | |
"epoch": 3.837953091684435, | |
"grad_norm": 0.006997235585004091, | |
"learning_rate": 1.162046908315565e-05, | |
"loss": 0.0008, | |
"step": 1800 | |
}, | |
{ | |
"epoch": 3.8486140724946694, | |
"grad_norm": 0.006867801304906607, | |
"learning_rate": 1.1513859275053305e-05, | |
"loss": 0.0008, | |
"step": 1805 | |
}, | |
{ | |
"epoch": 3.859275053304904, | |
"grad_norm": 0.006951611954718828, | |
"learning_rate": 1.140724946695096e-05, | |
"loss": 0.0008, | |
"step": 1810 | |
}, | |
{ | |
"epoch": 3.8699360341151388, | |
"grad_norm": 0.007590449880808592, | |
"learning_rate": 1.1300639658848615e-05, | |
"loss": 0.0008, | |
"step": 1815 | |
}, | |
{ | |
"epoch": 3.8805970149253732, | |
"grad_norm": 0.00673929275944829, | |
"learning_rate": 1.119402985074627e-05, | |
"loss": 0.0008, | |
"step": 1820 | |
}, | |
{ | |
"epoch": 3.8912579957356077, | |
"grad_norm": 0.00712326355278492, | |
"learning_rate": 1.1087420042643924e-05, | |
"loss": 0.0021, | |
"step": 1825 | |
}, | |
{ | |
"epoch": 3.901918976545842, | |
"grad_norm": 0.006742220371961594, | |
"learning_rate": 1.0980810234541579e-05, | |
"loss": 0.0008, | |
"step": 1830 | |
}, | |
{ | |
"epoch": 3.9125799573560767, | |
"grad_norm": 0.006702127400785685, | |
"learning_rate": 1.0874200426439234e-05, | |
"loss": 0.0008, | |
"step": 1835 | |
}, | |
{ | |
"epoch": 3.923240938166311, | |
"grad_norm": 0.023729166015982628, | |
"learning_rate": 1.0767590618336887e-05, | |
"loss": 0.0016, | |
"step": 1840 | |
}, | |
{ | |
"epoch": 3.933901918976546, | |
"grad_norm": 0.007211573887616396, | |
"learning_rate": 1.0660980810234541e-05, | |
"loss": 0.0008, | |
"step": 1845 | |
}, | |
{ | |
"epoch": 3.9445628997867805, | |
"grad_norm": 0.0069965398870408535, | |
"learning_rate": 1.0554371002132196e-05, | |
"loss": 0.0008, | |
"step": 1850 | |
}, | |
{ | |
"epoch": 3.955223880597015, | |
"grad_norm": 0.008190443739295006, | |
"learning_rate": 1.0447761194029851e-05, | |
"loss": 0.0008, | |
"step": 1855 | |
}, | |
{ | |
"epoch": 3.9658848614072495, | |
"grad_norm": 0.00799752026796341, | |
"learning_rate": 1.0341151385927506e-05, | |
"loss": 0.0008, | |
"step": 1860 | |
}, | |
{ | |
"epoch": 3.976545842217484, | |
"grad_norm": 0.00671280175447464, | |
"learning_rate": 1.023454157782516e-05, | |
"loss": 0.0008, | |
"step": 1865 | |
}, | |
{ | |
"epoch": 3.9872068230277184, | |
"grad_norm": 0.006478813476860523, | |
"learning_rate": 1.0127931769722815e-05, | |
"loss": 0.001, | |
"step": 1870 | |
}, | |
{ | |
"epoch": 3.997867803837953, | |
"grad_norm": 0.006525525823235512, | |
"learning_rate": 1.002132196162047e-05, | |
"loss": 0.0008, | |
"step": 1875 | |
}, | |
{ | |
"epoch": 4.0, | |
"eval_accuracy": 0.9954666666666667, | |
"eval_loss": 0.017088035121560097, | |
"eval_runtime": 52.5222, | |
"eval_samples_per_second": 71.398, | |
"eval_steps_per_second": 2.247, | |
"step": 1876 | |
}, | |
{ | |
"epoch": 4.008528784648187, | |
"grad_norm": 0.006642326712608337, | |
"learning_rate": 9.914712153518125e-06, | |
"loss": 0.0599, | |
"step": 1880 | |
}, | |
{ | |
"epoch": 4.019189765458422, | |
"grad_norm": 0.01126047968864441, | |
"learning_rate": 9.80810234541578e-06, | |
"loss": 0.0008, | |
"step": 1885 | |
}, | |
{ | |
"epoch": 4.029850746268656, | |
"grad_norm": 0.01390018593519926, | |
"learning_rate": 9.701492537313434e-06, | |
"loss": 0.0009, | |
"step": 1890 | |
}, | |
{ | |
"epoch": 4.040511727078891, | |
"grad_norm": 0.006929301656782627, | |
"learning_rate": 9.594882729211089e-06, | |
"loss": 0.0008, | |
"step": 1895 | |
}, | |
{ | |
"epoch": 4.051172707889126, | |
"grad_norm": 0.0066797733306884766, | |
"learning_rate": 9.488272921108744e-06, | |
"loss": 0.0008, | |
"step": 1900 | |
}, | |
{ | |
"epoch": 4.061833688699361, | |
"grad_norm": 0.006948092486709356, | |
"learning_rate": 9.381663113006398e-06, | |
"loss": 0.0008, | |
"step": 1905 | |
}, | |
{ | |
"epoch": 4.072494669509595, | |
"grad_norm": 0.006819820497184992, | |
"learning_rate": 9.275053304904053e-06, | |
"loss": 0.0008, | |
"step": 1910 | |
}, | |
{ | |
"epoch": 4.08315565031983, | |
"grad_norm": 0.0069790855050086975, | |
"learning_rate": 9.168443496801706e-06, | |
"loss": 0.0008, | |
"step": 1915 | |
}, | |
{ | |
"epoch": 4.093816631130064, | |
"grad_norm": 0.006708220578730106, | |
"learning_rate": 9.06183368869936e-06, | |
"loss": 0.0008, | |
"step": 1920 | |
}, | |
{ | |
"epoch": 4.104477611940299, | |
"grad_norm": 0.006948885973542929, | |
"learning_rate": 8.955223880597016e-06, | |
"loss": 0.0008, | |
"step": 1925 | |
}, | |
{ | |
"epoch": 4.115138592750533, | |
"grad_norm": 0.007034693378955126, | |
"learning_rate": 8.84861407249467e-06, | |
"loss": 0.0008, | |
"step": 1930 | |
}, | |
{ | |
"epoch": 4.1257995735607675, | |
"grad_norm": 0.006624700501561165, | |
"learning_rate": 8.742004264392323e-06, | |
"loss": 0.0008, | |
"step": 1935 | |
}, | |
{ | |
"epoch": 4.136460554371002, | |
"grad_norm": 0.006907845381647348, | |
"learning_rate": 8.635394456289978e-06, | |
"loss": 0.0008, | |
"step": 1940 | |
}, | |
{ | |
"epoch": 4.1471215351812365, | |
"grad_norm": 0.00695022800937295, | |
"learning_rate": 8.528784648187633e-06, | |
"loss": 0.001, | |
"step": 1945 | |
}, | |
{ | |
"epoch": 4.157782515991471, | |
"grad_norm": 0.006543528288602829, | |
"learning_rate": 8.422174840085288e-06, | |
"loss": 0.0008, | |
"step": 1950 | |
}, | |
{ | |
"epoch": 4.1684434968017055, | |
"grad_norm": 0.006766157690435648, | |
"learning_rate": 8.315565031982942e-06, | |
"loss": 0.0008, | |
"step": 1955 | |
}, | |
{ | |
"epoch": 4.17910447761194, | |
"grad_norm": 0.006587805692106485, | |
"learning_rate": 8.208955223880597e-06, | |
"loss": 0.0008, | |
"step": 1960 | |
}, | |
{ | |
"epoch": 4.189765458422174, | |
"grad_norm": 0.0068255639635026455, | |
"learning_rate": 8.102345415778252e-06, | |
"loss": 0.0008, | |
"step": 1965 | |
}, | |
{ | |
"epoch": 4.20042643923241, | |
"grad_norm": 0.00648807967081666, | |
"learning_rate": 7.995735607675907e-06, | |
"loss": 0.0008, | |
"step": 1970 | |
}, | |
{ | |
"epoch": 4.211087420042644, | |
"grad_norm": 0.030271295458078384, | |
"learning_rate": 7.889125799573561e-06, | |
"loss": 0.0011, | |
"step": 1975 | |
}, | |
{ | |
"epoch": 4.221748400852879, | |
"grad_norm": 0.006809168960899115, | |
"learning_rate": 7.782515991471216e-06, | |
"loss": 0.0008, | |
"step": 1980 | |
}, | |
{ | |
"epoch": 4.232409381663113, | |
"grad_norm": 0.006493579130619764, | |
"learning_rate": 7.67590618336887e-06, | |
"loss": 0.0008, | |
"step": 1985 | |
}, | |
{ | |
"epoch": 4.243070362473348, | |
"grad_norm": 0.00649282755330205, | |
"learning_rate": 7.5692963752665255e-06, | |
"loss": 0.0008, | |
"step": 1990 | |
}, | |
{ | |
"epoch": 4.253731343283582, | |
"grad_norm": 0.0064952559769153595, | |
"learning_rate": 7.4626865671641785e-06, | |
"loss": 0.0007, | |
"step": 1995 | |
}, | |
{ | |
"epoch": 4.264392324093817, | |
"grad_norm": 0.006957747973501682, | |
"learning_rate": 7.356076759061833e-06, | |
"loss": 0.0008, | |
"step": 2000 | |
}, | |
{ | |
"epoch": 4.275053304904051, | |
"grad_norm": 0.0065071796998381615, | |
"learning_rate": 7.249466950959488e-06, | |
"loss": 0.001, | |
"step": 2005 | |
}, | |
{ | |
"epoch": 4.285714285714286, | |
"grad_norm": 0.0067489007487893105, | |
"learning_rate": 7.142857142857143e-06, | |
"loss": 0.0008, | |
"step": 2010 | |
}, | |
{ | |
"epoch": 4.29637526652452, | |
"grad_norm": 0.006357221864163876, | |
"learning_rate": 7.0362473347547975e-06, | |
"loss": 0.0008, | |
"step": 2015 | |
}, | |
{ | |
"epoch": 4.3070362473347545, | |
"grad_norm": 0.006479720119386911, | |
"learning_rate": 6.929637526652452e-06, | |
"loss": 0.0008, | |
"step": 2020 | |
}, | |
{ | |
"epoch": 4.317697228144989, | |
"grad_norm": 0.006618973799049854, | |
"learning_rate": 6.823027718550107e-06, | |
"loss": 0.0008, | |
"step": 2025 | |
}, | |
{ | |
"epoch": 4.3283582089552235, | |
"grad_norm": 0.006500880233943462, | |
"learning_rate": 6.716417910447762e-06, | |
"loss": 0.0441, | |
"step": 2030 | |
}, | |
{ | |
"epoch": 4.339019189765459, | |
"grad_norm": 0.006383256521075964, | |
"learning_rate": 6.609808102345416e-06, | |
"loss": 0.0008, | |
"step": 2035 | |
}, | |
{ | |
"epoch": 4.349680170575693, | |
"grad_norm": 0.008115286007523537, | |
"learning_rate": 6.503198294243071e-06, | |
"loss": 0.0008, | |
"step": 2040 | |
}, | |
{ | |
"epoch": 4.360341151385928, | |
"grad_norm": 0.006482505239546299, | |
"learning_rate": 6.396588486140726e-06, | |
"loss": 0.0008, | |
"step": 2045 | |
}, | |
{ | |
"epoch": 4.371002132196162, | |
"grad_norm": 0.00693723326548934, | |
"learning_rate": 6.28997867803838e-06, | |
"loss": 0.0008, | |
"step": 2050 | |
}, | |
{ | |
"epoch": 4.381663113006397, | |
"grad_norm": 0.006512152962386608, | |
"learning_rate": 6.1833688699360345e-06, | |
"loss": 0.0008, | |
"step": 2055 | |
}, | |
{ | |
"epoch": 4.392324093816631, | |
"grad_norm": 0.013108141720294952, | |
"learning_rate": 6.076759061833688e-06, | |
"loss": 0.0008, | |
"step": 2060 | |
}, | |
{ | |
"epoch": 4.402985074626866, | |
"grad_norm": 0.0065595065243542194, | |
"learning_rate": 5.970149253731343e-06, | |
"loss": 0.0008, | |
"step": 2065 | |
}, | |
{ | |
"epoch": 4.4136460554371, | |
"grad_norm": 0.00672136340290308, | |
"learning_rate": 5.863539445628998e-06, | |
"loss": 0.0008, | |
"step": 2070 | |
}, | |
{ | |
"epoch": 4.424307036247335, | |
"grad_norm": 0.006383867468684912, | |
"learning_rate": 5.756929637526653e-06, | |
"loss": 0.0008, | |
"step": 2075 | |
}, | |
{ | |
"epoch": 4.434968017057569, | |
"grad_norm": 0.006399655714631081, | |
"learning_rate": 5.650319829424307e-06, | |
"loss": 0.0008, | |
"step": 2080 | |
}, | |
{ | |
"epoch": 4.445628997867804, | |
"grad_norm": 0.006387556903064251, | |
"learning_rate": 5.543710021321962e-06, | |
"loss": 0.0008, | |
"step": 2085 | |
}, | |
{ | |
"epoch": 4.456289978678038, | |
"grad_norm": 0.006342255510389805, | |
"learning_rate": 5.437100213219617e-06, | |
"loss": 0.0008, | |
"step": 2090 | |
}, | |
{ | |
"epoch": 4.466950959488273, | |
"grad_norm": 0.006512415129691362, | |
"learning_rate": 5.330490405117271e-06, | |
"loss": 0.0008, | |
"step": 2095 | |
}, | |
{ | |
"epoch": 4.477611940298507, | |
"grad_norm": 0.006460919044911861, | |
"learning_rate": 5.2238805970149255e-06, | |
"loss": 0.0008, | |
"step": 2100 | |
}, | |
{ | |
"epoch": 4.4882729211087415, | |
"grad_norm": 0.006401466205716133, | |
"learning_rate": 5.11727078891258e-06, | |
"loss": 0.0007, | |
"step": 2105 | |
}, | |
{ | |
"epoch": 4.498933901918977, | |
"grad_norm": 0.007441829890012741, | |
"learning_rate": 5.010660980810235e-06, | |
"loss": 0.0008, | |
"step": 2110 | |
}, | |
{ | |
"epoch": 4.509594882729211, | |
"grad_norm": 0.00697102677077055, | |
"learning_rate": 4.90405117270789e-06, | |
"loss": 0.0008, | |
"step": 2115 | |
}, | |
{ | |
"epoch": 4.520255863539446, | |
"grad_norm": 0.006417451426386833, | |
"learning_rate": 4.797441364605544e-06, | |
"loss": 0.0008, | |
"step": 2120 | |
}, | |
{ | |
"epoch": 4.53091684434968, | |
"grad_norm": 0.006671324372291565, | |
"learning_rate": 4.690831556503199e-06, | |
"loss": 0.0009, | |
"step": 2125 | |
}, | |
{ | |
"epoch": 4.541577825159915, | |
"grad_norm": 0.006363190710544586, | |
"learning_rate": 4.584221748400853e-06, | |
"loss": 0.0008, | |
"step": 2130 | |
}, | |
{ | |
"epoch": 4.552238805970149, | |
"grad_norm": 0.006648124195635319, | |
"learning_rate": 4.477611940298508e-06, | |
"loss": 0.0008, | |
"step": 2135 | |
}, | |
{ | |
"epoch": 4.562899786780384, | |
"grad_norm": 0.006491978652775288, | |
"learning_rate": 4.371002132196162e-06, | |
"loss": 0.0008, | |
"step": 2140 | |
}, | |
{ | |
"epoch": 4.573560767590618, | |
"grad_norm": 0.006693553179502487, | |
"learning_rate": 4.264392324093816e-06, | |
"loss": 0.0007, | |
"step": 2145 | |
}, | |
{ | |
"epoch": 4.584221748400853, | |
"grad_norm": 0.00642005680128932, | |
"learning_rate": 4.157782515991471e-06, | |
"loss": 0.001, | |
"step": 2150 | |
}, | |
{ | |
"epoch": 4.594882729211087, | |
"grad_norm": 0.006223942618817091, | |
"learning_rate": 4.051172707889126e-06, | |
"loss": 0.0008, | |
"step": 2155 | |
}, | |
{ | |
"epoch": 4.605543710021322, | |
"grad_norm": 0.006367682479321957, | |
"learning_rate": 3.944562899786781e-06, | |
"loss": 0.0007, | |
"step": 2160 | |
}, | |
{ | |
"epoch": 4.616204690831556, | |
"grad_norm": 0.006506779231131077, | |
"learning_rate": 3.837953091684435e-06, | |
"loss": 0.0008, | |
"step": 2165 | |
}, | |
{ | |
"epoch": 4.6268656716417915, | |
"grad_norm": 0.006613300181925297, | |
"learning_rate": 3.7313432835820893e-06, | |
"loss": 0.0007, | |
"step": 2170 | |
}, | |
{ | |
"epoch": 4.637526652452026, | |
"grad_norm": 0.006388398353010416, | |
"learning_rate": 3.624733475479744e-06, | |
"loss": 0.0008, | |
"step": 2175 | |
}, | |
{ | |
"epoch": 4.6481876332622605, | |
"grad_norm": 0.006248796824365854, | |
"learning_rate": 3.5181236673773987e-06, | |
"loss": 0.0008, | |
"step": 2180 | |
}, | |
{ | |
"epoch": 4.658848614072495, | |
"grad_norm": 0.006196764763444662, | |
"learning_rate": 3.4115138592750535e-06, | |
"loss": 0.0007, | |
"step": 2185 | |
}, | |
{ | |
"epoch": 4.669509594882729, | |
"grad_norm": 0.006270730402320623, | |
"learning_rate": 3.304904051172708e-06, | |
"loss": 0.0007, | |
"step": 2190 | |
}, | |
{ | |
"epoch": 4.680170575692964, | |
"grad_norm": 0.0064180390909314156, | |
"learning_rate": 3.198294243070363e-06, | |
"loss": 0.0007, | |
"step": 2195 | |
}, | |
{ | |
"epoch": 4.690831556503198, | |
"grad_norm": 0.0064066615886986256, | |
"learning_rate": 3.0916844349680173e-06, | |
"loss": 0.0007, | |
"step": 2200 | |
}, | |
{ | |
"epoch": 4.701492537313433, | |
"grad_norm": 0.006128617562353611, | |
"learning_rate": 2.9850746268656716e-06, | |
"loss": 0.0007, | |
"step": 2205 | |
}, | |
{ | |
"epoch": 4.712153518123667, | |
"grad_norm": 0.006440497003495693, | |
"learning_rate": 2.8784648187633263e-06, | |
"loss": 0.0007, | |
"step": 2210 | |
}, | |
{ | |
"epoch": 4.722814498933902, | |
"grad_norm": 0.0062287976033985615, | |
"learning_rate": 2.771855010660981e-06, | |
"loss": 0.0007, | |
"step": 2215 | |
}, | |
{ | |
"epoch": 4.733475479744136, | |
"grad_norm": 0.006312989629805088, | |
"learning_rate": 2.6652452025586354e-06, | |
"loss": 0.0007, | |
"step": 2220 | |
}, | |
{ | |
"epoch": 4.744136460554371, | |
"grad_norm": 0.006661479827016592, | |
"learning_rate": 2.55863539445629e-06, | |
"loss": 0.0007, | |
"step": 2225 | |
}, | |
{ | |
"epoch": 4.754797441364605, | |
"grad_norm": 0.006829616613686085, | |
"learning_rate": 2.452025586353945e-06, | |
"loss": 0.0007, | |
"step": 2230 | |
}, | |
{ | |
"epoch": 4.76545842217484, | |
"grad_norm": 0.006615268532186747, | |
"learning_rate": 2.3454157782515996e-06, | |
"loss": 0.0007, | |
"step": 2235 | |
}, | |
{ | |
"epoch": 4.776119402985074, | |
"grad_norm": 0.006259176414459944, | |
"learning_rate": 2.238805970149254e-06, | |
"loss": 0.0011, | |
"step": 2240 | |
}, | |
{ | |
"epoch": 4.786780383795309, | |
"grad_norm": 0.006526503246277571, | |
"learning_rate": 2.132196162046908e-06, | |
"loss": 0.0008, | |
"step": 2245 | |
}, | |
{ | |
"epoch": 4.797441364605544, | |
"grad_norm": 0.006221674848347902, | |
"learning_rate": 2.025586353944563e-06, | |
"loss": 0.0007, | |
"step": 2250 | |
}, | |
{ | |
"epoch": 4.8081023454157785, | |
"grad_norm": 0.006503719836473465, | |
"learning_rate": 1.9189765458422177e-06, | |
"loss": 0.0007, | |
"step": 2255 | |
}, | |
{ | |
"epoch": 4.818763326226013, | |
"grad_norm": 0.006352351978421211, | |
"learning_rate": 1.812366737739872e-06, | |
"loss": 0.0007, | |
"step": 2260 | |
}, | |
{ | |
"epoch": 4.8294243070362475, | |
"grad_norm": 0.006284451112151146, | |
"learning_rate": 1.7057569296375267e-06, | |
"loss": 0.0007, | |
"step": 2265 | |
}, | |
{ | |
"epoch": 4.840085287846482, | |
"grad_norm": 0.006210811901837587, | |
"learning_rate": 1.5991471215351815e-06, | |
"loss": 0.0007, | |
"step": 2270 | |
}, | |
{ | |
"epoch": 4.850746268656716, | |
"grad_norm": 0.006394446827471256, | |
"learning_rate": 1.4925373134328358e-06, | |
"loss": 0.0007, | |
"step": 2275 | |
}, | |
{ | |
"epoch": 4.861407249466951, | |
"grad_norm": 0.006252561695873737, | |
"learning_rate": 1.3859275053304905e-06, | |
"loss": 0.0007, | |
"step": 2280 | |
}, | |
{ | |
"epoch": 4.872068230277185, | |
"grad_norm": 0.00647474592551589, | |
"learning_rate": 1.279317697228145e-06, | |
"loss": 0.0007, | |
"step": 2285 | |
}, | |
{ | |
"epoch": 4.88272921108742, | |
"grad_norm": 0.0062627485021948814, | |
"learning_rate": 1.1727078891257998e-06, | |
"loss": 0.0007, | |
"step": 2290 | |
}, | |
{ | |
"epoch": 4.893390191897654, | |
"grad_norm": 0.008225005120038986, | |
"learning_rate": 1.066098081023454e-06, | |
"loss": 0.0007, | |
"step": 2295 | |
}, | |
{ | |
"epoch": 4.904051172707889, | |
"grad_norm": 0.006463615223765373, | |
"learning_rate": 9.594882729211088e-07, | |
"loss": 0.0009, | |
"step": 2300 | |
}, | |
{ | |
"epoch": 4.914712153518123, | |
"grad_norm": 0.006971514318138361, | |
"learning_rate": 8.528784648187634e-07, | |
"loss": 0.0007, | |
"step": 2305 | |
}, | |
{ | |
"epoch": 4.925373134328359, | |
"grad_norm": 0.006189883686602116, | |
"learning_rate": 7.462686567164179e-07, | |
"loss": 0.0007, | |
"step": 2310 | |
}, | |
{ | |
"epoch": 4.936034115138593, | |
"grad_norm": 0.006787551566958427, | |
"learning_rate": 6.396588486140725e-07, | |
"loss": 0.0007, | |
"step": 2315 | |
}, | |
{ | |
"epoch": 4.946695095948828, | |
"grad_norm": 0.00628741504624486, | |
"learning_rate": 5.33049040511727e-07, | |
"loss": 0.0007, | |
"step": 2320 | |
}, | |
{ | |
"epoch": 4.957356076759062, | |
"grad_norm": 0.006164039950817823, | |
"learning_rate": 4.264392324093817e-07, | |
"loss": 0.0008, | |
"step": 2325 | |
}, | |
{ | |
"epoch": 4.968017057569297, | |
"grad_norm": 0.006166805978864431, | |
"learning_rate": 3.1982942430703626e-07, | |
"loss": 0.0007, | |
"step": 2330 | |
}, | |
{ | |
"epoch": 4.978678038379531, | |
"grad_norm": 0.006152087822556496, | |
"learning_rate": 2.1321961620469084e-07, | |
"loss": 0.0007, | |
"step": 2335 | |
}, | |
{ | |
"epoch": 4.9893390191897655, | |
"grad_norm": 0.006514523644000292, | |
"learning_rate": 1.0660980810234542e-07, | |
"loss": 0.0007, | |
"step": 2340 | |
}, | |
{ | |
"epoch": 5.0, | |
"grad_norm": 0.006427063141018152, | |
"learning_rate": 0.0, | |
"loss": 0.0007, | |
"step": 2345 | |
}, | |
{ | |
"epoch": 5.0, | |
"eval_accuracy": 0.9954666666666667, | |
"eval_loss": 0.018111038953065872, | |
"eval_runtime": 52.1039, | |
"eval_samples_per_second": 71.972, | |
"eval_steps_per_second": 2.265, | |
"step": 2345 | |
}, | |
{ | |
"epoch": 5.0, | |
"step": 2345, | |
"total_flos": 5.8118992210944e+18, | |
"train_loss": 0.015165097594682151, | |
"train_runtime": 2989.946, | |
"train_samples_per_second": 25.084, | |
"train_steps_per_second": 0.784 | |
} | |
], | |
"logging_steps": 5, | |
"max_steps": 2345, | |
"num_input_tokens_seen": 0, | |
"num_train_epochs": 5, | |
"save_steps": 500, | |
"stateful_callbacks": { | |
"TrainerControl": { | |
"args": { | |
"should_epoch_stop": false, | |
"should_evaluate": false, | |
"should_log": false, | |
"should_save": true, | |
"should_training_stop": true | |
}, | |
"attributes": {} | |
} | |
}, | |
"total_flos": 5.8118992210944e+18, | |
"train_batch_size": 32, | |
"trial_name": null, | |
"trial_params": null | |
} | |