vit_epochs5_batch32_lr5e-05_size224_tiles1_seed1_classic_image_classification_local
/
trainer_state.json
{ | |
"best_metric": 0.022856945171952248, | |
"best_model_checkpoint": "vit_epochs5_batch32_lr5e-05_size224_tiles1_seed1_classic_image_classification_local\\checkpoint-1407", | |
"epoch": 5.0, | |
"eval_steps": 500, | |
"global_step": 2345, | |
"is_hyper_param_search": false, | |
"is_local_process_zero": true, | |
"is_world_process_zero": true, | |
"log_history": [ | |
{ | |
"epoch": 0.010660980810234541, | |
"grad_norm": 1.3030591011047363, | |
"learning_rate": 4.989339019189766e-05, | |
"loss": 0.6439, | |
"step": 5 | |
}, | |
{ | |
"epoch": 0.021321961620469083, | |
"grad_norm": 1.8761012554168701, | |
"learning_rate": 4.978678038379531e-05, | |
"loss": 0.488, | |
"step": 10 | |
}, | |
{ | |
"epoch": 0.031982942430703626, | |
"grad_norm": 1.1793400049209595, | |
"learning_rate": 4.9680170575692967e-05, | |
"loss": 0.337, | |
"step": 15 | |
}, | |
{ | |
"epoch": 0.042643923240938165, | |
"grad_norm": 1.3804587125778198, | |
"learning_rate": 4.957356076759062e-05, | |
"loss": 0.2404, | |
"step": 20 | |
}, | |
{ | |
"epoch": 0.053304904051172705, | |
"grad_norm": 0.987789511680603, | |
"learning_rate": 4.9466950959488276e-05, | |
"loss": 0.1539, | |
"step": 25 | |
}, | |
{ | |
"epoch": 0.06396588486140725, | |
"grad_norm": 2.2496049404144287, | |
"learning_rate": 4.936034115138593e-05, | |
"loss": 0.1444, | |
"step": 30 | |
}, | |
{ | |
"epoch": 0.07462686567164178, | |
"grad_norm": 0.39667999744415283, | |
"learning_rate": 4.9253731343283586e-05, | |
"loss": 0.079, | |
"step": 35 | |
}, | |
{ | |
"epoch": 0.08528784648187633, | |
"grad_norm": 1.3956135511398315, | |
"learning_rate": 4.914712153518124e-05, | |
"loss": 0.0746, | |
"step": 40 | |
}, | |
{ | |
"epoch": 0.09594882729211088, | |
"grad_norm": 4.631561756134033, | |
"learning_rate": 4.904051172707889e-05, | |
"loss": 0.0829, | |
"step": 45 | |
}, | |
{ | |
"epoch": 0.10660980810234541, | |
"grad_norm": 2.049582004547119, | |
"learning_rate": 4.893390191897655e-05, | |
"loss": 0.0496, | |
"step": 50 | |
}, | |
{ | |
"epoch": 0.11727078891257996, | |
"grad_norm": 3.339857816696167, | |
"learning_rate": 4.88272921108742e-05, | |
"loss": 0.0542, | |
"step": 55 | |
}, | |
{ | |
"epoch": 0.1279317697228145, | |
"grad_norm": 3.415274143218994, | |
"learning_rate": 4.872068230277186e-05, | |
"loss": 0.1034, | |
"step": 60 | |
}, | |
{ | |
"epoch": 0.13859275053304904, | |
"grad_norm": 0.7254932522773743, | |
"learning_rate": 4.861407249466951e-05, | |
"loss": 0.1021, | |
"step": 65 | |
}, | |
{ | |
"epoch": 0.14925373134328357, | |
"grad_norm": 0.7911928296089172, | |
"learning_rate": 4.850746268656717e-05, | |
"loss": 0.0581, | |
"step": 70 | |
}, | |
{ | |
"epoch": 0.15991471215351813, | |
"grad_norm": 2.9554224014282227, | |
"learning_rate": 4.840085287846482e-05, | |
"loss": 0.0626, | |
"step": 75 | |
}, | |
{ | |
"epoch": 0.17057569296375266, | |
"grad_norm": 0.17032524943351746, | |
"learning_rate": 4.829424307036248e-05, | |
"loss": 0.0366, | |
"step": 80 | |
}, | |
{ | |
"epoch": 0.1812366737739872, | |
"grad_norm": 4.795769214630127, | |
"learning_rate": 4.8187633262260126e-05, | |
"loss": 0.0359, | |
"step": 85 | |
}, | |
{ | |
"epoch": 0.19189765458422176, | |
"grad_norm": 0.13845813274383545, | |
"learning_rate": 4.808102345415779e-05, | |
"loss": 0.0231, | |
"step": 90 | |
}, | |
{ | |
"epoch": 0.2025586353944563, | |
"grad_norm": 0.14712730050086975, | |
"learning_rate": 4.7974413646055436e-05, | |
"loss": 0.046, | |
"step": 95 | |
}, | |
{ | |
"epoch": 0.21321961620469082, | |
"grad_norm": 0.1327529400587082, | |
"learning_rate": 4.78678038379531e-05, | |
"loss": 0.0522, | |
"step": 100 | |
}, | |
{ | |
"epoch": 0.22388059701492538, | |
"grad_norm": 0.12169656902551651, | |
"learning_rate": 4.7761194029850745e-05, | |
"loss": 0.039, | |
"step": 105 | |
}, | |
{ | |
"epoch": 0.2345415778251599, | |
"grad_norm": 4.029494762420654, | |
"learning_rate": 4.765458422174841e-05, | |
"loss": 0.0986, | |
"step": 110 | |
}, | |
{ | |
"epoch": 0.24520255863539445, | |
"grad_norm": 0.569814145565033, | |
"learning_rate": 4.7547974413646055e-05, | |
"loss": 0.0501, | |
"step": 115 | |
}, | |
{ | |
"epoch": 0.255863539445629, | |
"grad_norm": 2.2544350624084473, | |
"learning_rate": 4.7441364605543716e-05, | |
"loss": 0.0229, | |
"step": 120 | |
}, | |
{ | |
"epoch": 0.26652452025586354, | |
"grad_norm": 1.0173734426498413, | |
"learning_rate": 4.7334754797441364e-05, | |
"loss": 0.0453, | |
"step": 125 | |
}, | |
{ | |
"epoch": 0.2771855010660981, | |
"grad_norm": 0.32047396898269653, | |
"learning_rate": 4.7228144989339026e-05, | |
"loss": 0.0371, | |
"step": 130 | |
}, | |
{ | |
"epoch": 0.2878464818763326, | |
"grad_norm": 2.2969608306884766, | |
"learning_rate": 4.7121535181236674e-05, | |
"loss": 0.0379, | |
"step": 135 | |
}, | |
{ | |
"epoch": 0.29850746268656714, | |
"grad_norm": 0.11387304216623306, | |
"learning_rate": 4.7014925373134335e-05, | |
"loss": 0.0304, | |
"step": 140 | |
}, | |
{ | |
"epoch": 0.3091684434968017, | |
"grad_norm": 0.2686951756477356, | |
"learning_rate": 4.690831556503198e-05, | |
"loss": 0.0677, | |
"step": 145 | |
}, | |
{ | |
"epoch": 0.31982942430703626, | |
"grad_norm": 0.10119659453630447, | |
"learning_rate": 4.6801705756929645e-05, | |
"loss": 0.0423, | |
"step": 150 | |
}, | |
{ | |
"epoch": 0.3304904051172708, | |
"grad_norm": 3.7759270668029785, | |
"learning_rate": 4.669509594882729e-05, | |
"loss": 0.0497, | |
"step": 155 | |
}, | |
{ | |
"epoch": 0.3411513859275053, | |
"grad_norm": 0.09366951137781143, | |
"learning_rate": 4.658848614072495e-05, | |
"loss": 0.0196, | |
"step": 160 | |
}, | |
{ | |
"epoch": 0.35181236673773986, | |
"grad_norm": 0.3446415364742279, | |
"learning_rate": 4.64818763326226e-05, | |
"loss": 0.0411, | |
"step": 165 | |
}, | |
{ | |
"epoch": 0.3624733475479744, | |
"grad_norm": 0.0982455462217331, | |
"learning_rate": 4.637526652452026e-05, | |
"loss": 0.0206, | |
"step": 170 | |
}, | |
{ | |
"epoch": 0.373134328358209, | |
"grad_norm": 2.6638343334198, | |
"learning_rate": 4.626865671641791e-05, | |
"loss": 0.0489, | |
"step": 175 | |
}, | |
{ | |
"epoch": 0.3837953091684435, | |
"grad_norm": 0.2727707326412201, | |
"learning_rate": 4.6162046908315566e-05, | |
"loss": 0.0153, | |
"step": 180 | |
}, | |
{ | |
"epoch": 0.39445628997867804, | |
"grad_norm": 0.17455758154392242, | |
"learning_rate": 4.605543710021322e-05, | |
"loss": 0.0446, | |
"step": 185 | |
}, | |
{ | |
"epoch": 0.4051172707889126, | |
"grad_norm": 0.10797569900751114, | |
"learning_rate": 4.5948827292110876e-05, | |
"loss": 0.0139, | |
"step": 190 | |
}, | |
{ | |
"epoch": 0.4157782515991471, | |
"grad_norm": 0.802625298500061, | |
"learning_rate": 4.584221748400853e-05, | |
"loss": 0.0366, | |
"step": 195 | |
}, | |
{ | |
"epoch": 0.42643923240938164, | |
"grad_norm": 0.33289510011672974, | |
"learning_rate": 4.5735607675906185e-05, | |
"loss": 0.0832, | |
"step": 200 | |
}, | |
{ | |
"epoch": 0.43710021321961623, | |
"grad_norm": 0.07750815153121948, | |
"learning_rate": 4.562899786780384e-05, | |
"loss": 0.017, | |
"step": 205 | |
}, | |
{ | |
"epoch": 0.44776119402985076, | |
"grad_norm": 0.07023430615663528, | |
"learning_rate": 4.5522388059701495e-05, | |
"loss": 0.0474, | |
"step": 210 | |
}, | |
{ | |
"epoch": 0.4584221748400853, | |
"grad_norm": 7.5608625411987305, | |
"learning_rate": 4.541577825159915e-05, | |
"loss": 0.0238, | |
"step": 215 | |
}, | |
{ | |
"epoch": 0.4690831556503198, | |
"grad_norm": 0.07039786130189896, | |
"learning_rate": 4.5309168443496804e-05, | |
"loss": 0.0177, | |
"step": 220 | |
}, | |
{ | |
"epoch": 0.47974413646055436, | |
"grad_norm": 1.5955876111984253, | |
"learning_rate": 4.520255863539446e-05, | |
"loss": 0.0529, | |
"step": 225 | |
}, | |
{ | |
"epoch": 0.4904051172707889, | |
"grad_norm": 0.29084673523902893, | |
"learning_rate": 4.5095948827292114e-05, | |
"loss": 0.0389, | |
"step": 230 | |
}, | |
{ | |
"epoch": 0.5010660980810234, | |
"grad_norm": 4.458634853363037, | |
"learning_rate": 4.498933901918977e-05, | |
"loss": 0.028, | |
"step": 235 | |
}, | |
{ | |
"epoch": 0.511727078891258, | |
"grad_norm": 0.24322013556957245, | |
"learning_rate": 4.488272921108742e-05, | |
"loss": 0.014, | |
"step": 240 | |
}, | |
{ | |
"epoch": 0.5223880597014925, | |
"grad_norm": 1.5862743854522705, | |
"learning_rate": 4.477611940298508e-05, | |
"loss": 0.0273, | |
"step": 245 | |
}, | |
{ | |
"epoch": 0.5330490405117271, | |
"grad_norm": 2.472909927368164, | |
"learning_rate": 4.466950959488273e-05, | |
"loss": 0.0572, | |
"step": 250 | |
}, | |
{ | |
"epoch": 0.5437100213219617, | |
"grad_norm": 0.1901959925889969, | |
"learning_rate": 4.456289978678039e-05, | |
"loss": 0.0095, | |
"step": 255 | |
}, | |
{ | |
"epoch": 0.5543710021321961, | |
"grad_norm": 1.9824448823928833, | |
"learning_rate": 4.445628997867804e-05, | |
"loss": 0.0374, | |
"step": 260 | |
}, | |
{ | |
"epoch": 0.5650319829424307, | |
"grad_norm": 0.0593295581638813, | |
"learning_rate": 4.43496801705757e-05, | |
"loss": 0.0357, | |
"step": 265 | |
}, | |
{ | |
"epoch": 0.5756929637526652, | |
"grad_norm": 0.060530565679073334, | |
"learning_rate": 4.424307036247335e-05, | |
"loss": 0.0222, | |
"step": 270 | |
}, | |
{ | |
"epoch": 0.5863539445628998, | |
"grad_norm": 1.5108375549316406, | |
"learning_rate": 4.4136460554371006e-05, | |
"loss": 0.0104, | |
"step": 275 | |
}, | |
{ | |
"epoch": 0.5970149253731343, | |
"grad_norm": 1.9284145832061768, | |
"learning_rate": 4.402985074626866e-05, | |
"loss": 0.0312, | |
"step": 280 | |
}, | |
{ | |
"epoch": 0.6076759061833689, | |
"grad_norm": 0.061266589909791946, | |
"learning_rate": 4.3923240938166316e-05, | |
"loss": 0.0625, | |
"step": 285 | |
}, | |
{ | |
"epoch": 0.6183368869936035, | |
"grad_norm": 0.05914849042892456, | |
"learning_rate": 4.381663113006397e-05, | |
"loss": 0.0446, | |
"step": 290 | |
}, | |
{ | |
"epoch": 0.6289978678038379, | |
"grad_norm": 0.4006780982017517, | |
"learning_rate": 4.3710021321961625e-05, | |
"loss": 0.0497, | |
"step": 295 | |
}, | |
{ | |
"epoch": 0.6396588486140725, | |
"grad_norm": 0.0631353035569191, | |
"learning_rate": 4.360341151385928e-05, | |
"loss": 0.0098, | |
"step": 300 | |
}, | |
{ | |
"epoch": 0.650319829424307, | |
"grad_norm": 0.2760462760925293, | |
"learning_rate": 4.3496801705756935e-05, | |
"loss": 0.0444, | |
"step": 305 | |
}, | |
{ | |
"epoch": 0.6609808102345416, | |
"grad_norm": 0.05796327069401741, | |
"learning_rate": 4.339019189765459e-05, | |
"loss": 0.0107, | |
"step": 310 | |
}, | |
{ | |
"epoch": 0.6716417910447762, | |
"grad_norm": 5.437504291534424, | |
"learning_rate": 4.328358208955224e-05, | |
"loss": 0.027, | |
"step": 315 | |
}, | |
{ | |
"epoch": 0.6823027718550106, | |
"grad_norm": 0.05112091079354286, | |
"learning_rate": 4.31769722814499e-05, | |
"loss": 0.0296, | |
"step": 320 | |
}, | |
{ | |
"epoch": 0.6929637526652452, | |
"grad_norm": 0.0484708808362484, | |
"learning_rate": 4.307036247334755e-05, | |
"loss": 0.0419, | |
"step": 325 | |
}, | |
{ | |
"epoch": 0.7036247334754797, | |
"grad_norm": 0.05781492963433266, | |
"learning_rate": 4.29637526652452e-05, | |
"loss": 0.0191, | |
"step": 330 | |
}, | |
{ | |
"epoch": 0.7142857142857143, | |
"grad_norm": 0.07626837491989136, | |
"learning_rate": 4.2857142857142856e-05, | |
"loss": 0.032, | |
"step": 335 | |
}, | |
{ | |
"epoch": 0.7249466950959488, | |
"grad_norm": 0.046046238392591476, | |
"learning_rate": 4.275053304904051e-05, | |
"loss": 0.0315, | |
"step": 340 | |
}, | |
{ | |
"epoch": 0.7356076759061834, | |
"grad_norm": 0.07111696898937225, | |
"learning_rate": 4.2643923240938166e-05, | |
"loss": 0.0617, | |
"step": 345 | |
}, | |
{ | |
"epoch": 0.746268656716418, | |
"grad_norm": 0.2557612359523773, | |
"learning_rate": 4.253731343283582e-05, | |
"loss": 0.0183, | |
"step": 350 | |
}, | |
{ | |
"epoch": 0.7569296375266524, | |
"grad_norm": 0.20927634835243225, | |
"learning_rate": 4.2430703624733475e-05, | |
"loss": 0.0275, | |
"step": 355 | |
}, | |
{ | |
"epoch": 0.767590618336887, | |
"grad_norm": 1.5735142230987549, | |
"learning_rate": 4.232409381663113e-05, | |
"loss": 0.0319, | |
"step": 360 | |
}, | |
{ | |
"epoch": 0.7782515991471215, | |
"grad_norm": 3.6991448402404785, | |
"learning_rate": 4.2217484008528785e-05, | |
"loss": 0.0247, | |
"step": 365 | |
}, | |
{ | |
"epoch": 0.7889125799573561, | |
"grad_norm": 0.05717187374830246, | |
"learning_rate": 4.211087420042644e-05, | |
"loss": 0.0127, | |
"step": 370 | |
}, | |
{ | |
"epoch": 0.7995735607675906, | |
"grad_norm": 0.07145973294973373, | |
"learning_rate": 4.2004264392324094e-05, | |
"loss": 0.0091, | |
"step": 375 | |
}, | |
{ | |
"epoch": 0.8102345415778252, | |
"grad_norm": 4.079801082611084, | |
"learning_rate": 4.189765458422175e-05, | |
"loss": 0.0135, | |
"step": 380 | |
}, | |
{ | |
"epoch": 0.8208955223880597, | |
"grad_norm": 0.5012163519859314, | |
"learning_rate": 4.1791044776119404e-05, | |
"loss": 0.0447, | |
"step": 385 | |
}, | |
{ | |
"epoch": 0.8315565031982942, | |
"grad_norm": 0.07103706151247025, | |
"learning_rate": 4.168443496801706e-05, | |
"loss": 0.0522, | |
"step": 390 | |
}, | |
{ | |
"epoch": 0.8422174840085288, | |
"grad_norm": 0.045256324112415314, | |
"learning_rate": 4.157782515991471e-05, | |
"loss": 0.0238, | |
"step": 395 | |
}, | |
{ | |
"epoch": 0.8528784648187633, | |
"grad_norm": 1.065915584564209, | |
"learning_rate": 4.147121535181237e-05, | |
"loss": 0.0762, | |
"step": 400 | |
}, | |
{ | |
"epoch": 0.8635394456289979, | |
"grad_norm": 7.5331597328186035, | |
"learning_rate": 4.136460554371002e-05, | |
"loss": 0.0476, | |
"step": 405 | |
}, | |
{ | |
"epoch": 0.8742004264392325, | |
"grad_norm": 3.337735176086426, | |
"learning_rate": 4.125799573560768e-05, | |
"loss": 0.0595, | |
"step": 410 | |
}, | |
{ | |
"epoch": 0.8848614072494669, | |
"grad_norm": 0.09289413690567017, | |
"learning_rate": 4.115138592750533e-05, | |
"loss": 0.0096, | |
"step": 415 | |
}, | |
{ | |
"epoch": 0.8955223880597015, | |
"grad_norm": 0.04636334627866745, | |
"learning_rate": 4.104477611940299e-05, | |
"loss": 0.0262, | |
"step": 420 | |
}, | |
{ | |
"epoch": 0.906183368869936, | |
"grad_norm": 5.231013298034668, | |
"learning_rate": 4.093816631130064e-05, | |
"loss": 0.0344, | |
"step": 425 | |
}, | |
{ | |
"epoch": 0.9168443496801706, | |
"grad_norm": 0.558976948261261, | |
"learning_rate": 4.0831556503198296e-05, | |
"loss": 0.0635, | |
"step": 430 | |
}, | |
{ | |
"epoch": 0.9275053304904051, | |
"grad_norm": 0.05399622395634651, | |
"learning_rate": 4.072494669509595e-05, | |
"loss": 0.0208, | |
"step": 435 | |
}, | |
{ | |
"epoch": 0.9381663113006397, | |
"grad_norm": 0.04404484108090401, | |
"learning_rate": 4.0618336886993606e-05, | |
"loss": 0.0312, | |
"step": 440 | |
}, | |
{ | |
"epoch": 0.9488272921108742, | |
"grad_norm": 0.3174332082271576, | |
"learning_rate": 4.051172707889126e-05, | |
"loss": 0.0447, | |
"step": 445 | |
}, | |
{ | |
"epoch": 0.9594882729211087, | |
"grad_norm": 0.06034679710865021, | |
"learning_rate": 4.0405117270788915e-05, | |
"loss": 0.006, | |
"step": 450 | |
}, | |
{ | |
"epoch": 0.9701492537313433, | |
"grad_norm": 2.384519577026367, | |
"learning_rate": 4.029850746268657e-05, | |
"loss": 0.0217, | |
"step": 455 | |
}, | |
{ | |
"epoch": 0.9808102345415778, | |
"grad_norm": 2.445675849914551, | |
"learning_rate": 4.0191897654584225e-05, | |
"loss": 0.0107, | |
"step": 460 | |
}, | |
{ | |
"epoch": 0.9914712153518124, | |
"grad_norm": 1.2337150573730469, | |
"learning_rate": 4.008528784648188e-05, | |
"loss": 0.0735, | |
"step": 465 | |
}, | |
{ | |
"epoch": 1.0, | |
"eval_accuracy": 0.9762666666666666, | |
"eval_loss": 0.07783844321966171, | |
"eval_runtime": 16.5274, | |
"eval_samples_per_second": 226.896, | |
"eval_steps_per_second": 7.14, | |
"step": 469 | |
}, | |
{ | |
"epoch": 1.0021321961620469, | |
"grad_norm": 0.03896041214466095, | |
"learning_rate": 3.997867803837953e-05, | |
"loss": 0.0095, | |
"step": 470 | |
}, | |
{ | |
"epoch": 1.0127931769722816, | |
"grad_norm": 0.30038806796073914, | |
"learning_rate": 3.987206823027719e-05, | |
"loss": 0.0497, | |
"step": 475 | |
}, | |
{ | |
"epoch": 1.023454157782516, | |
"grad_norm": 0.04247400909662247, | |
"learning_rate": 3.976545842217484e-05, | |
"loss": 0.0644, | |
"step": 480 | |
}, | |
{ | |
"epoch": 1.0341151385927505, | |
"grad_norm": 0.1584388017654419, | |
"learning_rate": 3.96588486140725e-05, | |
"loss": 0.0136, | |
"step": 485 | |
}, | |
{ | |
"epoch": 1.044776119402985, | |
"grad_norm": 1.492022156715393, | |
"learning_rate": 3.9552238805970146e-05, | |
"loss": 0.0354, | |
"step": 490 | |
}, | |
{ | |
"epoch": 1.0554371002132197, | |
"grad_norm": 0.07293384522199631, | |
"learning_rate": 3.944562899786781e-05, | |
"loss": 0.0095, | |
"step": 495 | |
}, | |
{ | |
"epoch": 1.0660980810234542, | |
"grad_norm": 0.12515315413475037, | |
"learning_rate": 3.9339019189765456e-05, | |
"loss": 0.0061, | |
"step": 500 | |
}, | |
{ | |
"epoch": 1.0767590618336886, | |
"grad_norm": 0.04049481078982353, | |
"learning_rate": 3.923240938166312e-05, | |
"loss": 0.0048, | |
"step": 505 | |
}, | |
{ | |
"epoch": 1.0874200426439233, | |
"grad_norm": 0.3493141531944275, | |
"learning_rate": 3.9125799573560765e-05, | |
"loss": 0.0072, | |
"step": 510 | |
}, | |
{ | |
"epoch": 1.0980810234541578, | |
"grad_norm": 0.040050432085990906, | |
"learning_rate": 3.901918976545843e-05, | |
"loss": 0.0062, | |
"step": 515 | |
}, | |
{ | |
"epoch": 1.1087420042643923, | |
"grad_norm": 0.03947841376066208, | |
"learning_rate": 3.8912579957356075e-05, | |
"loss": 0.0043, | |
"step": 520 | |
}, | |
{ | |
"epoch": 1.1194029850746268, | |
"grad_norm": 0.03871216997504234, | |
"learning_rate": 3.8805970149253736e-05, | |
"loss": 0.0341, | |
"step": 525 | |
}, | |
{ | |
"epoch": 1.1300639658848615, | |
"grad_norm": 0.03848418593406677, | |
"learning_rate": 3.8699360341151384e-05, | |
"loss": 0.0061, | |
"step": 530 | |
}, | |
{ | |
"epoch": 1.140724946695096, | |
"grad_norm": 0.03081074170768261, | |
"learning_rate": 3.8592750533049046e-05, | |
"loss": 0.0044, | |
"step": 535 | |
}, | |
{ | |
"epoch": 1.1513859275053304, | |
"grad_norm": 0.036050815135240555, | |
"learning_rate": 3.8486140724946694e-05, | |
"loss": 0.0044, | |
"step": 540 | |
}, | |
{ | |
"epoch": 1.1620469083155651, | |
"grad_norm": 0.27078014612197876, | |
"learning_rate": 3.8379530916844355e-05, | |
"loss": 0.0193, | |
"step": 545 | |
}, | |
{ | |
"epoch": 1.1727078891257996, | |
"grad_norm": 0.03798219561576843, | |
"learning_rate": 3.8272921108742e-05, | |
"loss": 0.0042, | |
"step": 550 | |
}, | |
{ | |
"epoch": 1.183368869936034, | |
"grad_norm": 0.03544671833515167, | |
"learning_rate": 3.8166311300639665e-05, | |
"loss": 0.0074, | |
"step": 555 | |
}, | |
{ | |
"epoch": 1.1940298507462686, | |
"grad_norm": 0.028461942449212074, | |
"learning_rate": 3.805970149253731e-05, | |
"loss": 0.007, | |
"step": 560 | |
}, | |
{ | |
"epoch": 1.2046908315565032, | |
"grad_norm": 0.03148175776004791, | |
"learning_rate": 3.7953091684434974e-05, | |
"loss": 0.0044, | |
"step": 565 | |
}, | |
{ | |
"epoch": 1.2153518123667377, | |
"grad_norm": 0.027858976274728775, | |
"learning_rate": 3.784648187633262e-05, | |
"loss": 0.0038, | |
"step": 570 | |
}, | |
{ | |
"epoch": 1.2260127931769722, | |
"grad_norm": 3.376032590866089, | |
"learning_rate": 3.7739872068230284e-05, | |
"loss": 0.0057, | |
"step": 575 | |
}, | |
{ | |
"epoch": 1.236673773987207, | |
"grad_norm": 0.026746030896902084, | |
"learning_rate": 3.763326226012793e-05, | |
"loss": 0.0035, | |
"step": 580 | |
}, | |
{ | |
"epoch": 1.2473347547974414, | |
"grad_norm": 0.03644810989499092, | |
"learning_rate": 3.752665245202559e-05, | |
"loss": 0.0047, | |
"step": 585 | |
}, | |
{ | |
"epoch": 1.2579957356076759, | |
"grad_norm": 0.7633171081542969, | |
"learning_rate": 3.742004264392324e-05, | |
"loss": 0.0039, | |
"step": 590 | |
}, | |
{ | |
"epoch": 1.2686567164179103, | |
"grad_norm": 0.027774915099143982, | |
"learning_rate": 3.73134328358209e-05, | |
"loss": 0.0036, | |
"step": 595 | |
}, | |
{ | |
"epoch": 1.279317697228145, | |
"grad_norm": 1.0888952016830444, | |
"learning_rate": 3.720682302771855e-05, | |
"loss": 0.0112, | |
"step": 600 | |
}, | |
{ | |
"epoch": 1.2899786780383795, | |
"grad_norm": 0.030503449961543083, | |
"learning_rate": 3.710021321961621e-05, | |
"loss": 0.0082, | |
"step": 605 | |
}, | |
{ | |
"epoch": 1.3006396588486142, | |
"grad_norm": 0.025810692459344864, | |
"learning_rate": 3.699360341151386e-05, | |
"loss": 0.0036, | |
"step": 610 | |
}, | |
{ | |
"epoch": 1.3113006396588487, | |
"grad_norm": 0.024351516738533974, | |
"learning_rate": 3.6886993603411515e-05, | |
"loss": 0.0336, | |
"step": 615 | |
}, | |
{ | |
"epoch": 1.3219616204690832, | |
"grad_norm": 0.0555342361330986, | |
"learning_rate": 3.678038379530917e-05, | |
"loss": 0.0036, | |
"step": 620 | |
}, | |
{ | |
"epoch": 1.3326226012793176, | |
"grad_norm": 0.06257825344800949, | |
"learning_rate": 3.6673773987206824e-05, | |
"loss": 0.0084, | |
"step": 625 | |
}, | |
{ | |
"epoch": 1.3432835820895521, | |
"grad_norm": 0.02445884980261326, | |
"learning_rate": 3.656716417910448e-05, | |
"loss": 0.0045, | |
"step": 630 | |
}, | |
{ | |
"epoch": 1.3539445628997868, | |
"grad_norm": 0.025071511045098305, | |
"learning_rate": 3.6460554371002134e-05, | |
"loss": 0.003, | |
"step": 635 | |
}, | |
{ | |
"epoch": 1.3646055437100213, | |
"grad_norm": 0.15359072387218475, | |
"learning_rate": 3.635394456289979e-05, | |
"loss": 0.0034, | |
"step": 640 | |
}, | |
{ | |
"epoch": 1.375266524520256, | |
"grad_norm": 0.631420373916626, | |
"learning_rate": 3.624733475479744e-05, | |
"loss": 0.0035, | |
"step": 645 | |
}, | |
{ | |
"epoch": 1.3859275053304905, | |
"grad_norm": 0.024551689624786377, | |
"learning_rate": 3.61407249466951e-05, | |
"loss": 0.0029, | |
"step": 650 | |
}, | |
{ | |
"epoch": 1.396588486140725, | |
"grad_norm": 0.02399507351219654, | |
"learning_rate": 3.603411513859275e-05, | |
"loss": 0.0057, | |
"step": 655 | |
}, | |
{ | |
"epoch": 1.4072494669509594, | |
"grad_norm": 6.411247730255127, | |
"learning_rate": 3.592750533049041e-05, | |
"loss": 0.0067, | |
"step": 660 | |
}, | |
{ | |
"epoch": 1.417910447761194, | |
"grad_norm": 0.022543814033269882, | |
"learning_rate": 3.582089552238806e-05, | |
"loss": 0.027, | |
"step": 665 | |
}, | |
{ | |
"epoch": 1.4285714285714286, | |
"grad_norm": 0.021749133244156837, | |
"learning_rate": 3.571428571428572e-05, | |
"loss": 0.0116, | |
"step": 670 | |
}, | |
{ | |
"epoch": 1.439232409381663, | |
"grad_norm": 0.022446081042289734, | |
"learning_rate": 3.560767590618337e-05, | |
"loss": 0.0032, | |
"step": 675 | |
}, | |
{ | |
"epoch": 1.4498933901918978, | |
"grad_norm": 0.14957591891288757, | |
"learning_rate": 3.5501066098081026e-05, | |
"loss": 0.0227, | |
"step": 680 | |
}, | |
{ | |
"epoch": 1.4605543710021323, | |
"grad_norm": 0.2059992253780365, | |
"learning_rate": 3.539445628997868e-05, | |
"loss": 0.0042, | |
"step": 685 | |
}, | |
{ | |
"epoch": 1.4712153518123667, | |
"grad_norm": 0.07327228784561157, | |
"learning_rate": 3.5287846481876336e-05, | |
"loss": 0.0033, | |
"step": 690 | |
}, | |
{ | |
"epoch": 1.4818763326226012, | |
"grad_norm": 0.022074097767472267, | |
"learning_rate": 3.518123667377399e-05, | |
"loss": 0.0031, | |
"step": 695 | |
}, | |
{ | |
"epoch": 1.4925373134328357, | |
"grad_norm": 0.035878945142030716, | |
"learning_rate": 3.5074626865671645e-05, | |
"loss": 0.0348, | |
"step": 700 | |
}, | |
{ | |
"epoch": 1.5031982942430704, | |
"grad_norm": 0.020859839394688606, | |
"learning_rate": 3.496801705756929e-05, | |
"loss": 0.0816, | |
"step": 705 | |
}, | |
{ | |
"epoch": 1.5138592750533049, | |
"grad_norm": 0.024958930909633636, | |
"learning_rate": 3.4861407249466955e-05, | |
"loss": 0.0028, | |
"step": 710 | |
}, | |
{ | |
"epoch": 1.5245202558635396, | |
"grad_norm": 0.02834257483482361, | |
"learning_rate": 3.47547974413646e-05, | |
"loss": 0.0027, | |
"step": 715 | |
}, | |
{ | |
"epoch": 1.535181236673774, | |
"grad_norm": 0.022769009694457054, | |
"learning_rate": 3.4648187633262264e-05, | |
"loss": 0.0174, | |
"step": 720 | |
}, | |
{ | |
"epoch": 1.5458422174840085, | |
"grad_norm": 0.02601616457104683, | |
"learning_rate": 3.454157782515991e-05, | |
"loss": 0.0027, | |
"step": 725 | |
}, | |
{ | |
"epoch": 1.556503198294243, | |
"grad_norm": 0.24697370827198029, | |
"learning_rate": 3.4434968017057574e-05, | |
"loss": 0.0056, | |
"step": 730 | |
}, | |
{ | |
"epoch": 1.5671641791044775, | |
"grad_norm": 0.01992082968354225, | |
"learning_rate": 3.432835820895522e-05, | |
"loss": 0.0056, | |
"step": 735 | |
}, | |
{ | |
"epoch": 1.5778251599147122, | |
"grad_norm": 0.020425518974661827, | |
"learning_rate": 3.422174840085288e-05, | |
"loss": 0.0026, | |
"step": 740 | |
}, | |
{ | |
"epoch": 1.5884861407249466, | |
"grad_norm": 0.021747827529907227, | |
"learning_rate": 3.411513859275053e-05, | |
"loss": 0.0034, | |
"step": 745 | |
}, | |
{ | |
"epoch": 1.5991471215351813, | |
"grad_norm": 0.02213311195373535, | |
"learning_rate": 3.400852878464819e-05, | |
"loss": 0.0389, | |
"step": 750 | |
}, | |
{ | |
"epoch": 1.6098081023454158, | |
"grad_norm": 0.01999504491686821, | |
"learning_rate": 3.390191897654584e-05, | |
"loss": 0.0043, | |
"step": 755 | |
}, | |
{ | |
"epoch": 1.6204690831556503, | |
"grad_norm": 0.019559355452656746, | |
"learning_rate": 3.37953091684435e-05, | |
"loss": 0.0026, | |
"step": 760 | |
}, | |
{ | |
"epoch": 1.6311300639658848, | |
"grad_norm": 0.019753890112042427, | |
"learning_rate": 3.368869936034115e-05, | |
"loss": 0.0024, | |
"step": 765 | |
}, | |
{ | |
"epoch": 1.6417910447761193, | |
"grad_norm": 0.024971093982458115, | |
"learning_rate": 3.358208955223881e-05, | |
"loss": 0.0026, | |
"step": 770 | |
}, | |
{ | |
"epoch": 1.652452025586354, | |
"grad_norm": 0.109837107360363, | |
"learning_rate": 3.347547974413646e-05, | |
"loss": 0.0296, | |
"step": 775 | |
}, | |
{ | |
"epoch": 1.6631130063965884, | |
"grad_norm": 0.01920832134783268, | |
"learning_rate": 3.336886993603412e-05, | |
"loss": 0.0024, | |
"step": 780 | |
}, | |
{ | |
"epoch": 1.6737739872068231, | |
"grad_norm": 0.019149158149957657, | |
"learning_rate": 3.326226012793177e-05, | |
"loss": 0.0026, | |
"step": 785 | |
}, | |
{ | |
"epoch": 1.6844349680170576, | |
"grad_norm": 0.01869184523820877, | |
"learning_rate": 3.3155650319829424e-05, | |
"loss": 0.0034, | |
"step": 790 | |
}, | |
{ | |
"epoch": 1.695095948827292, | |
"grad_norm": 0.22865666449069977, | |
"learning_rate": 3.304904051172708e-05, | |
"loss": 0.0027, | |
"step": 795 | |
}, | |
{ | |
"epoch": 1.7057569296375266, | |
"grad_norm": 0.020090360194444656, | |
"learning_rate": 3.294243070362473e-05, | |
"loss": 0.0024, | |
"step": 800 | |
}, | |
{ | |
"epoch": 1.716417910447761, | |
"grad_norm": 0.018748512491583824, | |
"learning_rate": 3.283582089552239e-05, | |
"loss": 0.0022, | |
"step": 805 | |
}, | |
{ | |
"epoch": 1.7270788912579957, | |
"grad_norm": 0.020859690383076668, | |
"learning_rate": 3.272921108742004e-05, | |
"loss": 0.0265, | |
"step": 810 | |
}, | |
{ | |
"epoch": 1.7377398720682304, | |
"grad_norm": 0.020721951499581337, | |
"learning_rate": 3.26226012793177e-05, | |
"loss": 0.0025, | |
"step": 815 | |
}, | |
{ | |
"epoch": 1.748400852878465, | |
"grad_norm": 0.019519254565238953, | |
"learning_rate": 3.251599147121535e-05, | |
"loss": 0.0022, | |
"step": 820 | |
}, | |
{ | |
"epoch": 1.7590618336886994, | |
"grad_norm": 0.019015362486243248, | |
"learning_rate": 3.240938166311301e-05, | |
"loss": 0.0276, | |
"step": 825 | |
}, | |
{ | |
"epoch": 1.7697228144989339, | |
"grad_norm": 7.688128471374512, | |
"learning_rate": 3.230277185501066e-05, | |
"loss": 0.0149, | |
"step": 830 | |
}, | |
{ | |
"epoch": 1.7803837953091683, | |
"grad_norm": 0.017749857157468796, | |
"learning_rate": 3.2196162046908317e-05, | |
"loss": 0.0024, | |
"step": 835 | |
}, | |
{ | |
"epoch": 1.7910447761194028, | |
"grad_norm": 0.017808672040700912, | |
"learning_rate": 3.208955223880597e-05, | |
"loss": 0.0023, | |
"step": 840 | |
}, | |
{ | |
"epoch": 1.8017057569296375, | |
"grad_norm": 0.0173553004860878, | |
"learning_rate": 3.1982942430703626e-05, | |
"loss": 0.0021, | |
"step": 845 | |
}, | |
{ | |
"epoch": 1.8123667377398722, | |
"grad_norm": 0.018168503418564796, | |
"learning_rate": 3.187633262260128e-05, | |
"loss": 0.0026, | |
"step": 850 | |
}, | |
{ | |
"epoch": 1.8230277185501067, | |
"grad_norm": 0.016489172354340553, | |
"learning_rate": 3.1769722814498935e-05, | |
"loss": 0.0045, | |
"step": 855 | |
}, | |
{ | |
"epoch": 1.8336886993603412, | |
"grad_norm": 0.018007785081863403, | |
"learning_rate": 3.166311300639659e-05, | |
"loss": 0.0021, | |
"step": 860 | |
}, | |
{ | |
"epoch": 1.8443496801705757, | |
"grad_norm": 0.017267387360334396, | |
"learning_rate": 3.1556503198294245e-05, | |
"loss": 0.0023, | |
"step": 865 | |
}, | |
{ | |
"epoch": 1.8550106609808101, | |
"grad_norm": 0.016420844942331314, | |
"learning_rate": 3.14498933901919e-05, | |
"loss": 0.0104, | |
"step": 870 | |
}, | |
{ | |
"epoch": 1.8656716417910446, | |
"grad_norm": 0.016852952539920807, | |
"learning_rate": 3.1343283582089554e-05, | |
"loss": 0.002, | |
"step": 875 | |
}, | |
{ | |
"epoch": 1.8763326226012793, | |
"grad_norm": 0.017920179292559624, | |
"learning_rate": 3.123667377398721e-05, | |
"loss": 0.002, | |
"step": 880 | |
}, | |
{ | |
"epoch": 1.886993603411514, | |
"grad_norm": 0.016802307218313217, | |
"learning_rate": 3.1130063965884864e-05, | |
"loss": 0.0019, | |
"step": 885 | |
}, | |
{ | |
"epoch": 1.8976545842217485, | |
"grad_norm": 0.016610046848654747, | |
"learning_rate": 3.102345415778252e-05, | |
"loss": 0.002, | |
"step": 890 | |
}, | |
{ | |
"epoch": 1.908315565031983, | |
"grad_norm": 0.019902439787983894, | |
"learning_rate": 3.0916844349680173e-05, | |
"loss": 0.0021, | |
"step": 895 | |
}, | |
{ | |
"epoch": 1.9189765458422174, | |
"grad_norm": 0.01531369797885418, | |
"learning_rate": 3.081023454157783e-05, | |
"loss": 0.0025, | |
"step": 900 | |
}, | |
{ | |
"epoch": 1.929637526652452, | |
"grad_norm": 0.015508510172367096, | |
"learning_rate": 3.070362473347548e-05, | |
"loss": 0.0019, | |
"step": 905 | |
}, | |
{ | |
"epoch": 1.9402985074626866, | |
"grad_norm": 0.5575276613235474, | |
"learning_rate": 3.059701492537314e-05, | |
"loss": 0.0122, | |
"step": 910 | |
}, | |
{ | |
"epoch": 1.950959488272921, | |
"grad_norm": 0.015613945201039314, | |
"learning_rate": 3.0490405117270792e-05, | |
"loss": 0.0019, | |
"step": 915 | |
}, | |
{ | |
"epoch": 1.9616204690831558, | |
"grad_norm": 0.015503398142755032, | |
"learning_rate": 3.0383795309168444e-05, | |
"loss": 0.0055, | |
"step": 920 | |
}, | |
{ | |
"epoch": 1.9722814498933903, | |
"grad_norm": 3.0477633476257324, | |
"learning_rate": 3.0277185501066102e-05, | |
"loss": 0.0294, | |
"step": 925 | |
}, | |
{ | |
"epoch": 1.9829424307036247, | |
"grad_norm": 0.015289404429495335, | |
"learning_rate": 3.0170575692963753e-05, | |
"loss": 0.0018, | |
"step": 930 | |
}, | |
{ | |
"epoch": 1.9936034115138592, | |
"grad_norm": 6.750314235687256, | |
"learning_rate": 3.006396588486141e-05, | |
"loss": 0.0205, | |
"step": 935 | |
}, | |
{ | |
"epoch": 2.0, | |
"eval_accuracy": 0.9896, | |
"eval_loss": 0.04148600623011589, | |
"eval_runtime": 17.7849, | |
"eval_samples_per_second": 210.852, | |
"eval_steps_per_second": 6.635, | |
"step": 938 | |
}, | |
{ | |
"epoch": 2.0042643923240937, | |
"grad_norm": 0.08804287761449814, | |
"learning_rate": 2.9957356076759063e-05, | |
"loss": 0.0022, | |
"step": 940 | |
}, | |
{ | |
"epoch": 2.014925373134328, | |
"grad_norm": 0.015377379953861237, | |
"learning_rate": 2.9850746268656714e-05, | |
"loss": 0.0326, | |
"step": 945 | |
}, | |
{ | |
"epoch": 2.025586353944563, | |
"grad_norm": 0.024583661928772926, | |
"learning_rate": 2.9744136460554372e-05, | |
"loss": 0.0019, | |
"step": 950 | |
}, | |
{ | |
"epoch": 2.0362473347547976, | |
"grad_norm": 0.015179556794464588, | |
"learning_rate": 2.9637526652452023e-05, | |
"loss": 0.0019, | |
"step": 955 | |
}, | |
{ | |
"epoch": 2.046908315565032, | |
"grad_norm": 0.014737558551132679, | |
"learning_rate": 2.953091684434968e-05, | |
"loss": 0.0018, | |
"step": 960 | |
}, | |
{ | |
"epoch": 2.0575692963752665, | |
"grad_norm": 0.059039849787950516, | |
"learning_rate": 2.9424307036247333e-05, | |
"loss": 0.0022, | |
"step": 965 | |
}, | |
{ | |
"epoch": 2.068230277185501, | |
"grad_norm": 0.014863478019833565, | |
"learning_rate": 2.931769722814499e-05, | |
"loss": 0.0018, | |
"step": 970 | |
}, | |
{ | |
"epoch": 2.0788912579957355, | |
"grad_norm": 0.015474963933229446, | |
"learning_rate": 2.9211087420042642e-05, | |
"loss": 0.0018, | |
"step": 975 | |
}, | |
{ | |
"epoch": 2.08955223880597, | |
"grad_norm": 0.015029369853436947, | |
"learning_rate": 2.91044776119403e-05, | |
"loss": 0.0017, | |
"step": 980 | |
}, | |
{ | |
"epoch": 2.100213219616205, | |
"grad_norm": 0.014501337893307209, | |
"learning_rate": 2.8997867803837952e-05, | |
"loss": 0.0017, | |
"step": 985 | |
}, | |
{ | |
"epoch": 2.1108742004264394, | |
"grad_norm": 0.014287133701145649, | |
"learning_rate": 2.889125799573561e-05, | |
"loss": 0.0017, | |
"step": 990 | |
}, | |
{ | |
"epoch": 2.121535181236674, | |
"grad_norm": 0.013947279192507267, | |
"learning_rate": 2.878464818763326e-05, | |
"loss": 0.0032, | |
"step": 995 | |
}, | |
{ | |
"epoch": 2.1321961620469083, | |
"grad_norm": 0.0139941880479455, | |
"learning_rate": 2.867803837953092e-05, | |
"loss": 0.002, | |
"step": 1000 | |
}, | |
{ | |
"epoch": 2.142857142857143, | |
"grad_norm": 0.014081170782446861, | |
"learning_rate": 2.857142857142857e-05, | |
"loss": 0.0017, | |
"step": 1005 | |
}, | |
{ | |
"epoch": 2.1535181236673773, | |
"grad_norm": 0.015363846905529499, | |
"learning_rate": 2.846481876332623e-05, | |
"loss": 0.007, | |
"step": 1010 | |
}, | |
{ | |
"epoch": 2.1641791044776117, | |
"grad_norm": 0.01426735706627369, | |
"learning_rate": 2.835820895522388e-05, | |
"loss": 0.0017, | |
"step": 1015 | |
}, | |
{ | |
"epoch": 2.1748400852878467, | |
"grad_norm": 0.04286054149270058, | |
"learning_rate": 2.825159914712154e-05, | |
"loss": 0.0018, | |
"step": 1020 | |
}, | |
{ | |
"epoch": 2.185501066098081, | |
"grad_norm": 0.013738875277340412, | |
"learning_rate": 2.814498933901919e-05, | |
"loss": 0.0026, | |
"step": 1025 | |
}, | |
{ | |
"epoch": 2.1961620469083156, | |
"grad_norm": 0.4904980957508087, | |
"learning_rate": 2.8038379530916848e-05, | |
"loss": 0.0051, | |
"step": 1030 | |
}, | |
{ | |
"epoch": 2.20682302771855, | |
"grad_norm": 0.013008911162614822, | |
"learning_rate": 2.79317697228145e-05, | |
"loss": 0.0016, | |
"step": 1035 | |
}, | |
{ | |
"epoch": 2.2174840085287846, | |
"grad_norm": 0.013201174326241016, | |
"learning_rate": 2.7825159914712157e-05, | |
"loss": 0.0016, | |
"step": 1040 | |
}, | |
{ | |
"epoch": 2.228144989339019, | |
"grad_norm": 0.03662689030170441, | |
"learning_rate": 2.771855010660981e-05, | |
"loss": 0.0019, | |
"step": 1045 | |
}, | |
{ | |
"epoch": 2.2388059701492535, | |
"grad_norm": 0.012772216461598873, | |
"learning_rate": 2.7611940298507467e-05, | |
"loss": 0.0015, | |
"step": 1050 | |
}, | |
{ | |
"epoch": 2.2494669509594885, | |
"grad_norm": 0.012919310480356216, | |
"learning_rate": 2.7505330490405118e-05, | |
"loss": 0.0019, | |
"step": 1055 | |
}, | |
{ | |
"epoch": 2.260127931769723, | |
"grad_norm": 0.01390413660556078, | |
"learning_rate": 2.7398720682302776e-05, | |
"loss": 0.0017, | |
"step": 1060 | |
}, | |
{ | |
"epoch": 2.2707889125799574, | |
"grad_norm": 0.013280141167342663, | |
"learning_rate": 2.7292110874200428e-05, | |
"loss": 0.0015, | |
"step": 1065 | |
}, | |
{ | |
"epoch": 2.281449893390192, | |
"grad_norm": 0.013000053353607655, | |
"learning_rate": 2.7185501066098086e-05, | |
"loss": 0.0015, | |
"step": 1070 | |
}, | |
{ | |
"epoch": 2.2921108742004264, | |
"grad_norm": 0.012426944449543953, | |
"learning_rate": 2.7078891257995737e-05, | |
"loss": 0.0016, | |
"step": 1075 | |
}, | |
{ | |
"epoch": 2.302771855010661, | |
"grad_norm": 0.012249843217432499, | |
"learning_rate": 2.6972281449893395e-05, | |
"loss": 0.0015, | |
"step": 1080 | |
}, | |
{ | |
"epoch": 2.3134328358208958, | |
"grad_norm": 0.012140439823269844, | |
"learning_rate": 2.6865671641791047e-05, | |
"loss": 0.0014, | |
"step": 1085 | |
}, | |
{ | |
"epoch": 2.3240938166311302, | |
"grad_norm": 0.01209353469312191, | |
"learning_rate": 2.6759061833688705e-05, | |
"loss": 0.0015, | |
"step": 1090 | |
}, | |
{ | |
"epoch": 2.3347547974413647, | |
"grad_norm": 0.01500478945672512, | |
"learning_rate": 2.6652452025586356e-05, | |
"loss": 0.0024, | |
"step": 1095 | |
}, | |
{ | |
"epoch": 2.345415778251599, | |
"grad_norm": 0.012158229947090149, | |
"learning_rate": 2.6545842217484007e-05, | |
"loss": 0.0029, | |
"step": 1100 | |
}, | |
{ | |
"epoch": 2.3560767590618337, | |
"grad_norm": 0.012071085162460804, | |
"learning_rate": 2.6439232409381666e-05, | |
"loss": 0.0014, | |
"step": 1105 | |
}, | |
{ | |
"epoch": 2.366737739872068, | |
"grad_norm": 0.012218043208122253, | |
"learning_rate": 2.6332622601279317e-05, | |
"loss": 0.0049, | |
"step": 1110 | |
}, | |
{ | |
"epoch": 2.3773987206823026, | |
"grad_norm": 0.01214613951742649, | |
"learning_rate": 2.6226012793176975e-05, | |
"loss": 0.0016, | |
"step": 1115 | |
}, | |
{ | |
"epoch": 2.388059701492537, | |
"grad_norm": 0.012283292599022388, | |
"learning_rate": 2.6119402985074626e-05, | |
"loss": 0.0015, | |
"step": 1120 | |
}, | |
{ | |
"epoch": 2.398720682302772, | |
"grad_norm": 0.011755337938666344, | |
"learning_rate": 2.6012793176972285e-05, | |
"loss": 0.0014, | |
"step": 1125 | |
}, | |
{ | |
"epoch": 2.4093816631130065, | |
"grad_norm": 0.01584736444056034, | |
"learning_rate": 2.5906183368869936e-05, | |
"loss": 0.0014, | |
"step": 1130 | |
}, | |
{ | |
"epoch": 2.420042643923241, | |
"grad_norm": 0.01156493742018938, | |
"learning_rate": 2.5799573560767594e-05, | |
"loss": 0.0014, | |
"step": 1135 | |
}, | |
{ | |
"epoch": 2.4307036247334755, | |
"grad_norm": 0.02171207033097744, | |
"learning_rate": 2.5692963752665245e-05, | |
"loss": 0.0015, | |
"step": 1140 | |
}, | |
{ | |
"epoch": 2.44136460554371, | |
"grad_norm": 0.019722815603017807, | |
"learning_rate": 2.5586353944562904e-05, | |
"loss": 0.0014, | |
"step": 1145 | |
}, | |
{ | |
"epoch": 2.4520255863539444, | |
"grad_norm": 0.013081517070531845, | |
"learning_rate": 2.5479744136460555e-05, | |
"loss": 0.0014, | |
"step": 1150 | |
}, | |
{ | |
"epoch": 2.4626865671641793, | |
"grad_norm": 0.011529254727065563, | |
"learning_rate": 2.537313432835821e-05, | |
"loss": 0.0014, | |
"step": 1155 | |
}, | |
{ | |
"epoch": 2.473347547974414, | |
"grad_norm": 0.011184630915522575, | |
"learning_rate": 2.5266524520255864e-05, | |
"loss": 0.0058, | |
"step": 1160 | |
}, | |
{ | |
"epoch": 2.4840085287846483, | |
"grad_norm": 0.014834458939731121, | |
"learning_rate": 2.515991471215352e-05, | |
"loss": 0.0013, | |
"step": 1165 | |
}, | |
{ | |
"epoch": 2.4946695095948828, | |
"grad_norm": 0.011027764528989792, | |
"learning_rate": 2.5053304904051174e-05, | |
"loss": 0.0014, | |
"step": 1170 | |
}, | |
{ | |
"epoch": 2.5053304904051172, | |
"grad_norm": 0.030367983505129814, | |
"learning_rate": 2.494669509594883e-05, | |
"loss": 0.0015, | |
"step": 1175 | |
}, | |
{ | |
"epoch": 2.5159914712153517, | |
"grad_norm": 0.011093689128756523, | |
"learning_rate": 2.4840085287846483e-05, | |
"loss": 0.0013, | |
"step": 1180 | |
}, | |
{ | |
"epoch": 2.526652452025586, | |
"grad_norm": 0.010750957764685154, | |
"learning_rate": 2.4733475479744138e-05, | |
"loss": 0.0029, | |
"step": 1185 | |
}, | |
{ | |
"epoch": 2.5373134328358207, | |
"grad_norm": 0.010756141506135464, | |
"learning_rate": 2.4626865671641793e-05, | |
"loss": 0.0013, | |
"step": 1190 | |
}, | |
{ | |
"epoch": 2.5479744136460556, | |
"grad_norm": 0.011194289661943913, | |
"learning_rate": 2.4520255863539444e-05, | |
"loss": 0.0013, | |
"step": 1195 | |
}, | |
{ | |
"epoch": 2.55863539445629, | |
"grad_norm": 0.010614649392664433, | |
"learning_rate": 2.44136460554371e-05, | |
"loss": 0.0013, | |
"step": 1200 | |
}, | |
{ | |
"epoch": 2.5692963752665245, | |
"grad_norm": 0.01083033811300993, | |
"learning_rate": 2.4307036247334754e-05, | |
"loss": 0.0013, | |
"step": 1205 | |
}, | |
{ | |
"epoch": 2.579957356076759, | |
"grad_norm": 0.010807648301124573, | |
"learning_rate": 2.420042643923241e-05, | |
"loss": 0.0013, | |
"step": 1210 | |
}, | |
{ | |
"epoch": 2.5906183368869935, | |
"grad_norm": 0.022627221420407295, | |
"learning_rate": 2.4093816631130063e-05, | |
"loss": 0.0013, | |
"step": 1215 | |
}, | |
{ | |
"epoch": 2.6012793176972284, | |
"grad_norm": 0.010628443211317062, | |
"learning_rate": 2.3987206823027718e-05, | |
"loss": 0.0048, | |
"step": 1220 | |
}, | |
{ | |
"epoch": 2.611940298507463, | |
"grad_norm": 0.010282629169523716, | |
"learning_rate": 2.3880597014925373e-05, | |
"loss": 0.0012, | |
"step": 1225 | |
}, | |
{ | |
"epoch": 2.6226012793176974, | |
"grad_norm": 0.01436874084174633, | |
"learning_rate": 2.3773987206823027e-05, | |
"loss": 0.0017, | |
"step": 1230 | |
}, | |
{ | |
"epoch": 2.633262260127932, | |
"grad_norm": 0.010025793686509132, | |
"learning_rate": 2.3667377398720682e-05, | |
"loss": 0.0012, | |
"step": 1235 | |
}, | |
{ | |
"epoch": 2.6439232409381663, | |
"grad_norm": 0.2057594358921051, | |
"learning_rate": 2.3560767590618337e-05, | |
"loss": 0.0067, | |
"step": 1240 | |
}, | |
{ | |
"epoch": 2.654584221748401, | |
"grad_norm": 0.010275648906826973, | |
"learning_rate": 2.345415778251599e-05, | |
"loss": 0.0012, | |
"step": 1245 | |
}, | |
{ | |
"epoch": 2.6652452025586353, | |
"grad_norm": 0.029614688828587532, | |
"learning_rate": 2.3347547974413646e-05, | |
"loss": 0.0015, | |
"step": 1250 | |
}, | |
{ | |
"epoch": 2.6759061833688698, | |
"grad_norm": 0.01017677504569292, | |
"learning_rate": 2.32409381663113e-05, | |
"loss": 0.0012, | |
"step": 1255 | |
}, | |
{ | |
"epoch": 2.6865671641791042, | |
"grad_norm": 0.010082006454467773, | |
"learning_rate": 2.3134328358208956e-05, | |
"loss": 0.0012, | |
"step": 1260 | |
}, | |
{ | |
"epoch": 2.697228144989339, | |
"grad_norm": 0.01118564885109663, | |
"learning_rate": 2.302771855010661e-05, | |
"loss": 0.0012, | |
"step": 1265 | |
}, | |
{ | |
"epoch": 2.7078891257995736, | |
"grad_norm": 0.009883342310786247, | |
"learning_rate": 2.2921108742004265e-05, | |
"loss": 0.0015, | |
"step": 1270 | |
}, | |
{ | |
"epoch": 2.718550106609808, | |
"grad_norm": 0.010351916775107384, | |
"learning_rate": 2.281449893390192e-05, | |
"loss": 0.0012, | |
"step": 1275 | |
}, | |
{ | |
"epoch": 2.7292110874200426, | |
"grad_norm": 0.011892825365066528, | |
"learning_rate": 2.2707889125799575e-05, | |
"loss": 0.0027, | |
"step": 1280 | |
}, | |
{ | |
"epoch": 2.739872068230277, | |
"grad_norm": 0.010221291333436966, | |
"learning_rate": 2.260127931769723e-05, | |
"loss": 0.0012, | |
"step": 1285 | |
}, | |
{ | |
"epoch": 2.750533049040512, | |
"grad_norm": 0.010303476825356483, | |
"learning_rate": 2.2494669509594884e-05, | |
"loss": 0.0012, | |
"step": 1290 | |
}, | |
{ | |
"epoch": 2.7611940298507465, | |
"grad_norm": 0.019318614155054092, | |
"learning_rate": 2.238805970149254e-05, | |
"loss": 0.0013, | |
"step": 1295 | |
}, | |
{ | |
"epoch": 2.771855010660981, | |
"grad_norm": 0.009472822770476341, | |
"learning_rate": 2.2281449893390194e-05, | |
"loss": 0.0011, | |
"step": 1300 | |
}, | |
{ | |
"epoch": 2.7825159914712154, | |
"grad_norm": 0.009815825149416924, | |
"learning_rate": 2.217484008528785e-05, | |
"loss": 0.0012, | |
"step": 1305 | |
}, | |
{ | |
"epoch": 2.79317697228145, | |
"grad_norm": 0.009688694961369038, | |
"learning_rate": 2.2068230277185503e-05, | |
"loss": 0.0011, | |
"step": 1310 | |
}, | |
{ | |
"epoch": 2.8038379530916844, | |
"grad_norm": 0.28238606452941895, | |
"learning_rate": 2.1961620469083158e-05, | |
"loss": 0.0435, | |
"step": 1315 | |
}, | |
{ | |
"epoch": 2.814498933901919, | |
"grad_norm": 0.01650206558406353, | |
"learning_rate": 2.1855010660980813e-05, | |
"loss": 0.0011, | |
"step": 1320 | |
}, | |
{ | |
"epoch": 2.8251599147121533, | |
"grad_norm": 0.009697536937892437, | |
"learning_rate": 2.1748400852878467e-05, | |
"loss": 0.0012, | |
"step": 1325 | |
}, | |
{ | |
"epoch": 2.835820895522388, | |
"grad_norm": 0.010664798319339752, | |
"learning_rate": 2.164179104477612e-05, | |
"loss": 0.0011, | |
"step": 1330 | |
}, | |
{ | |
"epoch": 2.8464818763326227, | |
"grad_norm": 0.009492074139416218, | |
"learning_rate": 2.1535181236673773e-05, | |
"loss": 0.0012, | |
"step": 1335 | |
}, | |
{ | |
"epoch": 2.857142857142857, | |
"grad_norm": 0.009845663793385029, | |
"learning_rate": 2.1428571428571428e-05, | |
"loss": 0.0011, | |
"step": 1340 | |
}, | |
{ | |
"epoch": 2.8678038379530917, | |
"grad_norm": 0.009600410237908363, | |
"learning_rate": 2.1321961620469083e-05, | |
"loss": 0.0011, | |
"step": 1345 | |
}, | |
{ | |
"epoch": 2.878464818763326, | |
"grad_norm": 0.009601897560060024, | |
"learning_rate": 2.1215351812366738e-05, | |
"loss": 0.0011, | |
"step": 1350 | |
}, | |
{ | |
"epoch": 2.8891257995735606, | |
"grad_norm": 0.010103050619363785, | |
"learning_rate": 2.1108742004264392e-05, | |
"loss": 0.0029, | |
"step": 1355 | |
}, | |
{ | |
"epoch": 2.8997867803837956, | |
"grad_norm": 0.009285909123718739, | |
"learning_rate": 2.1002132196162047e-05, | |
"loss": 0.0131, | |
"step": 1360 | |
}, | |
{ | |
"epoch": 2.91044776119403, | |
"grad_norm": 0.01248752698302269, | |
"learning_rate": 2.0895522388059702e-05, | |
"loss": 0.0011, | |
"step": 1365 | |
}, | |
{ | |
"epoch": 2.9211087420042645, | |
"grad_norm": 0.010123838670551777, | |
"learning_rate": 2.0788912579957357e-05, | |
"loss": 0.0011, | |
"step": 1370 | |
}, | |
{ | |
"epoch": 2.931769722814499, | |
"grad_norm": 0.009308276697993279, | |
"learning_rate": 2.068230277185501e-05, | |
"loss": 0.0011, | |
"step": 1375 | |
}, | |
{ | |
"epoch": 2.9424307036247335, | |
"grad_norm": 0.009351525455713272, | |
"learning_rate": 2.0575692963752666e-05, | |
"loss": 0.0013, | |
"step": 1380 | |
}, | |
{ | |
"epoch": 2.953091684434968, | |
"grad_norm": 0.016460668295621872, | |
"learning_rate": 2.046908315565032e-05, | |
"loss": 0.0012, | |
"step": 1385 | |
}, | |
{ | |
"epoch": 2.9637526652452024, | |
"grad_norm": 0.00919937714934349, | |
"learning_rate": 2.0362473347547976e-05, | |
"loss": 0.0011, | |
"step": 1390 | |
}, | |
{ | |
"epoch": 2.974413646055437, | |
"grad_norm": 0.009609651751816273, | |
"learning_rate": 2.025586353944563e-05, | |
"loss": 0.0011, | |
"step": 1395 | |
}, | |
{ | |
"epoch": 2.9850746268656714, | |
"grad_norm": 0.013855514116585255, | |
"learning_rate": 2.0149253731343285e-05, | |
"loss": 0.0011, | |
"step": 1400 | |
}, | |
{ | |
"epoch": 2.9957356076759063, | |
"grad_norm": 0.009133475832641125, | |
"learning_rate": 2.004264392324094e-05, | |
"loss": 0.0011, | |
"step": 1405 | |
}, | |
{ | |
"epoch": 3.0, | |
"eval_accuracy": 0.9946666666666667, | |
"eval_loss": 0.022856945171952248, | |
"eval_runtime": 17.0183, | |
"eval_samples_per_second": 220.351, | |
"eval_steps_per_second": 6.934, | |
"step": 1407 | |
}, | |
{ | |
"epoch": 3.0063965884861408, | |
"grad_norm": 0.009368853643536568, | |
"learning_rate": 1.9936034115138594e-05, | |
"loss": 0.0011, | |
"step": 1410 | |
}, | |
{ | |
"epoch": 3.0170575692963753, | |
"grad_norm": 0.00928573589771986, | |
"learning_rate": 1.982942430703625e-05, | |
"loss": 0.0011, | |
"step": 1415 | |
}, | |
{ | |
"epoch": 3.0277185501066097, | |
"grad_norm": 0.009126279503107071, | |
"learning_rate": 1.9722814498933904e-05, | |
"loss": 0.0012, | |
"step": 1420 | |
}, | |
{ | |
"epoch": 3.038379530916844, | |
"grad_norm": 0.008992459625005722, | |
"learning_rate": 1.961620469083156e-05, | |
"loss": 0.001, | |
"step": 1425 | |
}, | |
{ | |
"epoch": 3.0490405117270787, | |
"grad_norm": 0.009131360799074173, | |
"learning_rate": 1.9509594882729213e-05, | |
"loss": 0.0011, | |
"step": 1430 | |
}, | |
{ | |
"epoch": 3.0597014925373136, | |
"grad_norm": 0.00903151836246252, | |
"learning_rate": 1.9402985074626868e-05, | |
"loss": 0.0071, | |
"step": 1435 | |
}, | |
{ | |
"epoch": 3.070362473347548, | |
"grad_norm": 0.008744587190449238, | |
"learning_rate": 1.9296375266524523e-05, | |
"loss": 0.001, | |
"step": 1440 | |
}, | |
{ | |
"epoch": 3.0810234541577826, | |
"grad_norm": 0.00879298523068428, | |
"learning_rate": 1.9189765458422178e-05, | |
"loss": 0.001, | |
"step": 1445 | |
}, | |
{ | |
"epoch": 3.091684434968017, | |
"grad_norm": 0.008923621848225594, | |
"learning_rate": 1.9083155650319832e-05, | |
"loss": 0.001, | |
"step": 1450 | |
}, | |
{ | |
"epoch": 3.1023454157782515, | |
"grad_norm": 0.008955532684922218, | |
"learning_rate": 1.8976545842217487e-05, | |
"loss": 0.0016, | |
"step": 1455 | |
}, | |
{ | |
"epoch": 3.113006396588486, | |
"grad_norm": 0.008777831681072712, | |
"learning_rate": 1.8869936034115142e-05, | |
"loss": 0.001, | |
"step": 1460 | |
}, | |
{ | |
"epoch": 3.1236673773987205, | |
"grad_norm": 0.009090494364500046, | |
"learning_rate": 1.8763326226012797e-05, | |
"loss": 0.001, | |
"step": 1465 | |
}, | |
{ | |
"epoch": 3.1343283582089554, | |
"grad_norm": 0.00862858910113573, | |
"learning_rate": 1.865671641791045e-05, | |
"loss": 0.001, | |
"step": 1470 | |
}, | |
{ | |
"epoch": 3.14498933901919, | |
"grad_norm": 0.009000065736472607, | |
"learning_rate": 1.8550106609808106e-05, | |
"loss": 0.001, | |
"step": 1475 | |
}, | |
{ | |
"epoch": 3.1556503198294243, | |
"grad_norm": 0.008759795688092709, | |
"learning_rate": 1.8443496801705757e-05, | |
"loss": 0.0035, | |
"step": 1480 | |
}, | |
{ | |
"epoch": 3.166311300639659, | |
"grad_norm": 0.008655063807964325, | |
"learning_rate": 1.8336886993603412e-05, | |
"loss": 0.001, | |
"step": 1485 | |
}, | |
{ | |
"epoch": 3.1769722814498933, | |
"grad_norm": 0.008448773995041847, | |
"learning_rate": 1.8230277185501067e-05, | |
"loss": 0.001, | |
"step": 1490 | |
}, | |
{ | |
"epoch": 3.1876332622601278, | |
"grad_norm": 0.00870588980615139, | |
"learning_rate": 1.812366737739872e-05, | |
"loss": 0.0011, | |
"step": 1495 | |
}, | |
{ | |
"epoch": 3.1982942430703627, | |
"grad_norm": 0.008393441326916218, | |
"learning_rate": 1.8017057569296376e-05, | |
"loss": 0.001, | |
"step": 1500 | |
}, | |
{ | |
"epoch": 3.208955223880597, | |
"grad_norm": 0.009119479916989803, | |
"learning_rate": 1.791044776119403e-05, | |
"loss": 0.001, | |
"step": 1505 | |
}, | |
{ | |
"epoch": 3.2196162046908317, | |
"grad_norm": 0.008332905359566212, | |
"learning_rate": 1.7803837953091686e-05, | |
"loss": 0.001, | |
"step": 1510 | |
}, | |
{ | |
"epoch": 3.230277185501066, | |
"grad_norm": 0.011142686009407043, | |
"learning_rate": 1.769722814498934e-05, | |
"loss": 0.001, | |
"step": 1515 | |
}, | |
{ | |
"epoch": 3.2409381663113006, | |
"grad_norm": 0.008251124061644077, | |
"learning_rate": 1.7590618336886995e-05, | |
"loss": 0.001, | |
"step": 1520 | |
}, | |
{ | |
"epoch": 3.251599147121535, | |
"grad_norm": 0.008427221328020096, | |
"learning_rate": 1.7484008528784647e-05, | |
"loss": 0.001, | |
"step": 1525 | |
}, | |
{ | |
"epoch": 3.2622601279317696, | |
"grad_norm": 0.00855677668005228, | |
"learning_rate": 1.73773987206823e-05, | |
"loss": 0.001, | |
"step": 1530 | |
}, | |
{ | |
"epoch": 3.272921108742004, | |
"grad_norm": 0.008078392595052719, | |
"learning_rate": 1.7270788912579956e-05, | |
"loss": 0.001, | |
"step": 1535 | |
}, | |
{ | |
"epoch": 3.283582089552239, | |
"grad_norm": 0.008482052944600582, | |
"learning_rate": 1.716417910447761e-05, | |
"loss": 0.001, | |
"step": 1540 | |
}, | |
{ | |
"epoch": 3.2942430703624734, | |
"grad_norm": 0.009341388940811157, | |
"learning_rate": 1.7057569296375266e-05, | |
"loss": 0.001, | |
"step": 1545 | |
}, | |
{ | |
"epoch": 3.304904051172708, | |
"grad_norm": 0.008152157068252563, | |
"learning_rate": 1.695095948827292e-05, | |
"loss": 0.001, | |
"step": 1550 | |
}, | |
{ | |
"epoch": 3.3155650319829424, | |
"grad_norm": 0.008092079311609268, | |
"learning_rate": 1.6844349680170575e-05, | |
"loss": 0.001, | |
"step": 1555 | |
}, | |
{ | |
"epoch": 3.326226012793177, | |
"grad_norm": 0.008420913480222225, | |
"learning_rate": 1.673773987206823e-05, | |
"loss": 0.001, | |
"step": 1560 | |
}, | |
{ | |
"epoch": 3.3368869936034113, | |
"grad_norm": 0.008343126624822617, | |
"learning_rate": 1.6631130063965885e-05, | |
"loss": 0.001, | |
"step": 1565 | |
}, | |
{ | |
"epoch": 3.3475479744136463, | |
"grad_norm": 0.008144230581820011, | |
"learning_rate": 1.652452025586354e-05, | |
"loss": 0.001, | |
"step": 1570 | |
}, | |
{ | |
"epoch": 3.3582089552238807, | |
"grad_norm": 0.007995973341166973, | |
"learning_rate": 1.6417910447761194e-05, | |
"loss": 0.0009, | |
"step": 1575 | |
}, | |
{ | |
"epoch": 3.368869936034115, | |
"grad_norm": 0.007915866561233997, | |
"learning_rate": 1.631130063965885e-05, | |
"loss": 0.0009, | |
"step": 1580 | |
}, | |
{ | |
"epoch": 3.3795309168443497, | |
"grad_norm": 0.007987083867192268, | |
"learning_rate": 1.6204690831556504e-05, | |
"loss": 0.0009, | |
"step": 1585 | |
}, | |
{ | |
"epoch": 3.390191897654584, | |
"grad_norm": 0.008029541000723839, | |
"learning_rate": 1.6098081023454158e-05, | |
"loss": 0.0009, | |
"step": 1590 | |
}, | |
{ | |
"epoch": 3.4008528784648187, | |
"grad_norm": 0.008263756521046162, | |
"learning_rate": 1.5991471215351813e-05, | |
"loss": 0.0009, | |
"step": 1595 | |
}, | |
{ | |
"epoch": 3.411513859275053, | |
"grad_norm": 0.00779961422085762, | |
"learning_rate": 1.5884861407249468e-05, | |
"loss": 0.0009, | |
"step": 1600 | |
}, | |
{ | |
"epoch": 3.4221748400852876, | |
"grad_norm": 0.00786847248673439, | |
"learning_rate": 1.5778251599147122e-05, | |
"loss": 0.0009, | |
"step": 1605 | |
}, | |
{ | |
"epoch": 3.4328358208955225, | |
"grad_norm": 0.007777598220854998, | |
"learning_rate": 1.5671641791044777e-05, | |
"loss": 0.0009, | |
"step": 1610 | |
}, | |
{ | |
"epoch": 3.443496801705757, | |
"grad_norm": 0.007693154737353325, | |
"learning_rate": 1.5565031982942432e-05, | |
"loss": 0.001, | |
"step": 1615 | |
}, | |
{ | |
"epoch": 3.4541577825159915, | |
"grad_norm": 0.00797303393483162, | |
"learning_rate": 1.5458422174840087e-05, | |
"loss": 0.0009, | |
"step": 1620 | |
}, | |
{ | |
"epoch": 3.464818763326226, | |
"grad_norm": 0.007763400208204985, | |
"learning_rate": 1.535181236673774e-05, | |
"loss": 0.0009, | |
"step": 1625 | |
}, | |
{ | |
"epoch": 3.4754797441364604, | |
"grad_norm": 0.007800037506967783, | |
"learning_rate": 1.5245202558635396e-05, | |
"loss": 0.0009, | |
"step": 1630 | |
}, | |
{ | |
"epoch": 3.486140724946695, | |
"grad_norm": 0.007904596626758575, | |
"learning_rate": 1.5138592750533051e-05, | |
"loss": 0.0009, | |
"step": 1635 | |
}, | |
{ | |
"epoch": 3.49680170575693, | |
"grad_norm": 0.008192162029445171, | |
"learning_rate": 1.5031982942430706e-05, | |
"loss": 0.0013, | |
"step": 1640 | |
}, | |
{ | |
"epoch": 3.5074626865671643, | |
"grad_norm": 0.007729103323072195, | |
"learning_rate": 1.4925373134328357e-05, | |
"loss": 0.0009, | |
"step": 1645 | |
}, | |
{ | |
"epoch": 3.518123667377399, | |
"grad_norm": 0.2967156171798706, | |
"learning_rate": 1.4818763326226012e-05, | |
"loss": 0.044, | |
"step": 1650 | |
}, | |
{ | |
"epoch": 3.5287846481876333, | |
"grad_norm": 0.008105948567390442, | |
"learning_rate": 1.4712153518123666e-05, | |
"loss": 0.0009, | |
"step": 1655 | |
}, | |
{ | |
"epoch": 3.5394456289978677, | |
"grad_norm": 0.00775302154943347, | |
"learning_rate": 1.4605543710021321e-05, | |
"loss": 0.0009, | |
"step": 1660 | |
}, | |
{ | |
"epoch": 3.550106609808102, | |
"grad_norm": 0.0077361236326396465, | |
"learning_rate": 1.4498933901918976e-05, | |
"loss": 0.0009, | |
"step": 1665 | |
}, | |
{ | |
"epoch": 3.5607675906183367, | |
"grad_norm": 0.0088364128023386, | |
"learning_rate": 1.439232409381663e-05, | |
"loss": 0.0009, | |
"step": 1670 | |
}, | |
{ | |
"epoch": 3.571428571428571, | |
"grad_norm": 0.009987774305045605, | |
"learning_rate": 1.4285714285714285e-05, | |
"loss": 0.001, | |
"step": 1675 | |
}, | |
{ | |
"epoch": 3.582089552238806, | |
"grad_norm": 0.0083088343963027, | |
"learning_rate": 1.417910447761194e-05, | |
"loss": 0.0009, | |
"step": 1680 | |
}, | |
{ | |
"epoch": 3.5927505330490406, | |
"grad_norm": 0.008930937387049198, | |
"learning_rate": 1.4072494669509595e-05, | |
"loss": 0.0052, | |
"step": 1685 | |
}, | |
{ | |
"epoch": 3.603411513859275, | |
"grad_norm": 0.007975965738296509, | |
"learning_rate": 1.396588486140725e-05, | |
"loss": 0.0009, | |
"step": 1690 | |
}, | |
{ | |
"epoch": 3.6140724946695095, | |
"grad_norm": 0.007811375893652439, | |
"learning_rate": 1.3859275053304904e-05, | |
"loss": 0.001, | |
"step": 1695 | |
}, | |
{ | |
"epoch": 3.624733475479744, | |
"grad_norm": 0.007806049659848213, | |
"learning_rate": 1.3752665245202559e-05, | |
"loss": 0.0009, | |
"step": 1700 | |
}, | |
{ | |
"epoch": 3.635394456289979, | |
"grad_norm": 0.007902558892965317, | |
"learning_rate": 1.3646055437100214e-05, | |
"loss": 0.0009, | |
"step": 1705 | |
}, | |
{ | |
"epoch": 3.6460554371002134, | |
"grad_norm": 0.007543427404016256, | |
"learning_rate": 1.3539445628997869e-05, | |
"loss": 0.0009, | |
"step": 1710 | |
}, | |
{ | |
"epoch": 3.656716417910448, | |
"grad_norm": 0.013299783691763878, | |
"learning_rate": 1.3432835820895523e-05, | |
"loss": 0.001, | |
"step": 1715 | |
}, | |
{ | |
"epoch": 3.6673773987206824, | |
"grad_norm": 0.007508652750402689, | |
"learning_rate": 1.3326226012793178e-05, | |
"loss": 0.0009, | |
"step": 1720 | |
}, | |
{ | |
"epoch": 3.678038379530917, | |
"grad_norm": 0.00801705289632082, | |
"learning_rate": 1.3219616204690833e-05, | |
"loss": 0.0009, | |
"step": 1725 | |
}, | |
{ | |
"epoch": 3.6886993603411513, | |
"grad_norm": 0.007567389402538538, | |
"learning_rate": 1.3113006396588488e-05, | |
"loss": 0.0009, | |
"step": 1730 | |
}, | |
{ | |
"epoch": 3.699360341151386, | |
"grad_norm": 0.007756579201668501, | |
"learning_rate": 1.3006396588486142e-05, | |
"loss": 0.0082, | |
"step": 1735 | |
}, | |
{ | |
"epoch": 3.7100213219616203, | |
"grad_norm": 0.007432934828102589, | |
"learning_rate": 1.2899786780383797e-05, | |
"loss": 0.0009, | |
"step": 1740 | |
}, | |
{ | |
"epoch": 3.7206823027718547, | |
"grad_norm": 0.007582199759781361, | |
"learning_rate": 1.2793176972281452e-05, | |
"loss": 0.0009, | |
"step": 1745 | |
}, | |
{ | |
"epoch": 3.7313432835820897, | |
"grad_norm": 0.007463539484888315, | |
"learning_rate": 1.2686567164179105e-05, | |
"loss": 0.0008, | |
"step": 1750 | |
}, | |
{ | |
"epoch": 3.742004264392324, | |
"grad_norm": 0.00755223399028182, | |
"learning_rate": 1.257995735607676e-05, | |
"loss": 0.0009, | |
"step": 1755 | |
}, | |
{ | |
"epoch": 3.7526652452025586, | |
"grad_norm": 0.007484814617782831, | |
"learning_rate": 1.2473347547974414e-05, | |
"loss": 0.001, | |
"step": 1760 | |
}, | |
{ | |
"epoch": 3.763326226012793, | |
"grad_norm": 0.007422641385346651, | |
"learning_rate": 1.2366737739872069e-05, | |
"loss": 0.0009, | |
"step": 1765 | |
}, | |
{ | |
"epoch": 3.7739872068230276, | |
"grad_norm": 0.007435275241732597, | |
"learning_rate": 1.2260127931769722e-05, | |
"loss": 0.0009, | |
"step": 1770 | |
}, | |
{ | |
"epoch": 3.7846481876332625, | |
"grad_norm": 0.00717807887122035, | |
"learning_rate": 1.2153518123667377e-05, | |
"loss": 0.0009, | |
"step": 1775 | |
}, | |
{ | |
"epoch": 3.795309168443497, | |
"grad_norm": 0.007124255411326885, | |
"learning_rate": 1.2046908315565032e-05, | |
"loss": 0.0008, | |
"step": 1780 | |
}, | |
{ | |
"epoch": 3.8059701492537314, | |
"grad_norm": 0.007450129836797714, | |
"learning_rate": 1.1940298507462686e-05, | |
"loss": 0.0008, | |
"step": 1785 | |
}, | |
{ | |
"epoch": 3.816631130063966, | |
"grad_norm": 0.0072946385480463505, | |
"learning_rate": 1.1833688699360341e-05, | |
"loss": 0.0008, | |
"step": 1790 | |
}, | |
{ | |
"epoch": 3.8272921108742004, | |
"grad_norm": 0.00721264211460948, | |
"learning_rate": 1.1727078891257996e-05, | |
"loss": 0.0008, | |
"step": 1795 | |
}, | |
{ | |
"epoch": 3.837953091684435, | |
"grad_norm": 0.007415530737489462, | |
"learning_rate": 1.162046908315565e-05, | |
"loss": 0.0008, | |
"step": 1800 | |
}, | |
{ | |
"epoch": 3.8486140724946694, | |
"grad_norm": 0.007288595661520958, | |
"learning_rate": 1.1513859275053305e-05, | |
"loss": 0.0037, | |
"step": 1805 | |
}, | |
{ | |
"epoch": 3.859275053304904, | |
"grad_norm": 0.00771984551101923, | |
"learning_rate": 1.140724946695096e-05, | |
"loss": 0.0008, | |
"step": 1810 | |
}, | |
{ | |
"epoch": 3.8699360341151388, | |
"grad_norm": 0.018424108624458313, | |
"learning_rate": 1.1300639658848615e-05, | |
"loss": 0.0013, | |
"step": 1815 | |
}, | |
{ | |
"epoch": 3.8805970149253732, | |
"grad_norm": 0.03557995706796646, | |
"learning_rate": 1.119402985074627e-05, | |
"loss": 0.0012, | |
"step": 1820 | |
}, | |
{ | |
"epoch": 3.8912579957356077, | |
"grad_norm": 0.007265831809490919, | |
"learning_rate": 1.1087420042643924e-05, | |
"loss": 0.0008, | |
"step": 1825 | |
}, | |
{ | |
"epoch": 3.901918976545842, | |
"grad_norm": 0.00740558747202158, | |
"learning_rate": 1.0980810234541579e-05, | |
"loss": 0.0008, | |
"step": 1830 | |
}, | |
{ | |
"epoch": 3.9125799573560767, | |
"grad_norm": 0.007415784057229757, | |
"learning_rate": 1.0874200426439234e-05, | |
"loss": 0.0008, | |
"step": 1835 | |
}, | |
{ | |
"epoch": 3.923240938166311, | |
"grad_norm": 0.00691164331510663, | |
"learning_rate": 1.0767590618336887e-05, | |
"loss": 0.0013, | |
"step": 1840 | |
}, | |
{ | |
"epoch": 3.933901918976546, | |
"grad_norm": 0.007646833546459675, | |
"learning_rate": 1.0660980810234541e-05, | |
"loss": 0.0008, | |
"step": 1845 | |
}, | |
{ | |
"epoch": 3.9445628997867805, | |
"grad_norm": 0.006887191440910101, | |
"learning_rate": 1.0554371002132196e-05, | |
"loss": 0.0008, | |
"step": 1850 | |
}, | |
{ | |
"epoch": 3.955223880597015, | |
"grad_norm": 0.007158663123846054, | |
"learning_rate": 1.0447761194029851e-05, | |
"loss": 0.0008, | |
"step": 1855 | |
}, | |
{ | |
"epoch": 3.9658848614072495, | |
"grad_norm": 0.007033709902316332, | |
"learning_rate": 1.0341151385927506e-05, | |
"loss": 0.0008, | |
"step": 1860 | |
}, | |
{ | |
"epoch": 3.976545842217484, | |
"grad_norm": 0.00696564232930541, | |
"learning_rate": 1.023454157782516e-05, | |
"loss": 0.0008, | |
"step": 1865 | |
}, | |
{ | |
"epoch": 3.9872068230277184, | |
"grad_norm": 0.006933414377272129, | |
"learning_rate": 1.0127931769722815e-05, | |
"loss": 0.001, | |
"step": 1870 | |
}, | |
{ | |
"epoch": 3.997867803837953, | |
"grad_norm": 0.0068906634114682674, | |
"learning_rate": 1.002132196162047e-05, | |
"loss": 0.0008, | |
"step": 1875 | |
}, | |
{ | |
"epoch": 4.0, | |
"eval_accuracy": 0.9949333333333333, | |
"eval_loss": 0.02470521070063114, | |
"eval_runtime": 20.5328, | |
"eval_samples_per_second": 182.634, | |
"eval_steps_per_second": 5.747, | |
"step": 1876 | |
}, | |
{ | |
"epoch": 4.008528784648187, | |
"grad_norm": 0.007092602085322142, | |
"learning_rate": 9.914712153518125e-06, | |
"loss": 0.0008, | |
"step": 1880 | |
}, | |
{ | |
"epoch": 4.019189765458422, | |
"grad_norm": 0.007114503066986799, | |
"learning_rate": 9.80810234541578e-06, | |
"loss": 0.0009, | |
"step": 1885 | |
}, | |
{ | |
"epoch": 4.029850746268656, | |
"grad_norm": 0.007054983638226986, | |
"learning_rate": 9.701492537313434e-06, | |
"loss": 0.0008, | |
"step": 1890 | |
}, | |
{ | |
"epoch": 4.040511727078891, | |
"grad_norm": 0.028849003836512566, | |
"learning_rate": 9.594882729211089e-06, | |
"loss": 0.001, | |
"step": 1895 | |
}, | |
{ | |
"epoch": 4.051172707889126, | |
"grad_norm": 0.007059966679662466, | |
"learning_rate": 9.488272921108744e-06, | |
"loss": 0.0012, | |
"step": 1900 | |
}, | |
{ | |
"epoch": 4.061833688699361, | |
"grad_norm": 0.006836135871708393, | |
"learning_rate": 9.381663113006398e-06, | |
"loss": 0.0009, | |
"step": 1905 | |
}, | |
{ | |
"epoch": 4.072494669509595, | |
"grad_norm": 0.0068479483015835285, | |
"learning_rate": 9.275053304904053e-06, | |
"loss": 0.0008, | |
"step": 1910 | |
}, | |
{ | |
"epoch": 4.08315565031983, | |
"grad_norm": 0.006866186857223511, | |
"learning_rate": 9.168443496801706e-06, | |
"loss": 0.0008, | |
"step": 1915 | |
}, | |
{ | |
"epoch": 4.093816631130064, | |
"grad_norm": 0.006824200972914696, | |
"learning_rate": 9.06183368869936e-06, | |
"loss": 0.001, | |
"step": 1920 | |
}, | |
{ | |
"epoch": 4.104477611940299, | |
"grad_norm": 0.006811595056205988, | |
"learning_rate": 8.955223880597016e-06, | |
"loss": 0.0008, | |
"step": 1925 | |
}, | |
{ | |
"epoch": 4.115138592750533, | |
"grad_norm": 0.00677255354821682, | |
"learning_rate": 8.84861407249467e-06, | |
"loss": 0.0008, | |
"step": 1930 | |
}, | |
{ | |
"epoch": 4.1257995735607675, | |
"grad_norm": 0.007070044055581093, | |
"learning_rate": 8.742004264392323e-06, | |
"loss": 0.0008, | |
"step": 1935 | |
}, | |
{ | |
"epoch": 4.136460554371002, | |
"grad_norm": 0.0068147857673466206, | |
"learning_rate": 8.635394456289978e-06, | |
"loss": 0.0008, | |
"step": 1940 | |
}, | |
{ | |
"epoch": 4.1471215351812365, | |
"grad_norm": 0.0068505811505019665, | |
"learning_rate": 8.528784648187633e-06, | |
"loss": 0.0008, | |
"step": 1945 | |
}, | |
{ | |
"epoch": 4.157782515991471, | |
"grad_norm": 0.006723499391227961, | |
"learning_rate": 8.422174840085288e-06, | |
"loss": 0.0008, | |
"step": 1950 | |
}, | |
{ | |
"epoch": 4.1684434968017055, | |
"grad_norm": 0.008438466116786003, | |
"learning_rate": 8.315565031982942e-06, | |
"loss": 0.0041, | |
"step": 1955 | |
}, | |
{ | |
"epoch": 4.17910447761194, | |
"grad_norm": 0.006672563962638378, | |
"learning_rate": 8.208955223880597e-06, | |
"loss": 0.0008, | |
"step": 1960 | |
}, | |
{ | |
"epoch": 4.189765458422174, | |
"grad_norm": 0.006688879802823067, | |
"learning_rate": 8.102345415778252e-06, | |
"loss": 0.0008, | |
"step": 1965 | |
}, | |
{ | |
"epoch": 4.20042643923241, | |
"grad_norm": 0.006508278660476208, | |
"learning_rate": 7.995735607675907e-06, | |
"loss": 0.0008, | |
"step": 1970 | |
}, | |
{ | |
"epoch": 4.211087420042644, | |
"grad_norm": 0.006888619624078274, | |
"learning_rate": 7.889125799573561e-06, | |
"loss": 0.0008, | |
"step": 1975 | |
}, | |
{ | |
"epoch": 4.221748400852879, | |
"grad_norm": 0.006600632332265377, | |
"learning_rate": 7.782515991471216e-06, | |
"loss": 0.0008, | |
"step": 1980 | |
}, | |
{ | |
"epoch": 4.232409381663113, | |
"grad_norm": 0.16250911355018616, | |
"learning_rate": 7.67590618336887e-06, | |
"loss": 0.007, | |
"step": 1985 | |
}, | |
{ | |
"epoch": 4.243070362473348, | |
"grad_norm": 0.006614594720304012, | |
"learning_rate": 7.5692963752665255e-06, | |
"loss": 0.0008, | |
"step": 1990 | |
}, | |
{ | |
"epoch": 4.253731343283582, | |
"grad_norm": 0.006743111647665501, | |
"learning_rate": 7.4626865671641785e-06, | |
"loss": 0.0008, | |
"step": 1995 | |
}, | |
{ | |
"epoch": 4.264392324093817, | |
"grad_norm": 0.006710912100970745, | |
"learning_rate": 7.356076759061833e-06, | |
"loss": 0.0008, | |
"step": 2000 | |
}, | |
{ | |
"epoch": 4.275053304904051, | |
"grad_norm": 0.0064783464185893536, | |
"learning_rate": 7.249466950959488e-06, | |
"loss": 0.001, | |
"step": 2005 | |
}, | |
{ | |
"epoch": 4.285714285714286, | |
"grad_norm": 0.006849293597042561, | |
"learning_rate": 7.142857142857143e-06, | |
"loss": 0.0008, | |
"step": 2010 | |
}, | |
{ | |
"epoch": 4.29637526652452, | |
"grad_norm": 0.006612967699766159, | |
"learning_rate": 7.0362473347547975e-06, | |
"loss": 0.0008, | |
"step": 2015 | |
}, | |
{ | |
"epoch": 4.3070362473347545, | |
"grad_norm": 0.006666961126029491, | |
"learning_rate": 6.929637526652452e-06, | |
"loss": 0.0008, | |
"step": 2020 | |
}, | |
{ | |
"epoch": 4.317697228144989, | |
"grad_norm": 0.09179400652647018, | |
"learning_rate": 6.823027718550107e-06, | |
"loss": 0.003, | |
"step": 2025 | |
}, | |
{ | |
"epoch": 4.3283582089552235, | |
"grad_norm": 0.006482471711933613, | |
"learning_rate": 6.716417910447762e-06, | |
"loss": 0.0008, | |
"step": 2030 | |
}, | |
{ | |
"epoch": 4.339019189765459, | |
"grad_norm": 0.006697284523397684, | |
"learning_rate": 6.609808102345416e-06, | |
"loss": 0.0008, | |
"step": 2035 | |
}, | |
{ | |
"epoch": 4.349680170575693, | |
"grad_norm": 0.007023471407592297, | |
"learning_rate": 6.503198294243071e-06, | |
"loss": 0.0008, | |
"step": 2040 | |
}, | |
{ | |
"epoch": 4.360341151385928, | |
"grad_norm": 0.006777188275009394, | |
"learning_rate": 6.396588486140726e-06, | |
"loss": 0.0008, | |
"step": 2045 | |
}, | |
{ | |
"epoch": 4.371002132196162, | |
"grad_norm": 0.006447089836001396, | |
"learning_rate": 6.28997867803838e-06, | |
"loss": 0.0008, | |
"step": 2050 | |
}, | |
{ | |
"epoch": 4.381663113006397, | |
"grad_norm": 0.006505937315523624, | |
"learning_rate": 6.1833688699360345e-06, | |
"loss": 0.0008, | |
"step": 2055 | |
}, | |
{ | |
"epoch": 4.392324093816631, | |
"grad_norm": 0.006575414910912514, | |
"learning_rate": 6.076759061833688e-06, | |
"loss": 0.0007, | |
"step": 2060 | |
}, | |
{ | |
"epoch": 4.402985074626866, | |
"grad_norm": 0.006808110978454351, | |
"learning_rate": 5.970149253731343e-06, | |
"loss": 0.0008, | |
"step": 2065 | |
}, | |
{ | |
"epoch": 4.4136460554371, | |
"grad_norm": 0.00644000293686986, | |
"learning_rate": 5.863539445628998e-06, | |
"loss": 0.0007, | |
"step": 2070 | |
}, | |
{ | |
"epoch": 4.424307036247335, | |
"grad_norm": 0.015466631390154362, | |
"learning_rate": 5.756929637526653e-06, | |
"loss": 0.0009, | |
"step": 2075 | |
}, | |
{ | |
"epoch": 4.434968017057569, | |
"grad_norm": 0.006661558523774147, | |
"learning_rate": 5.650319829424307e-06, | |
"loss": 0.0008, | |
"step": 2080 | |
}, | |
{ | |
"epoch": 4.445628997867804, | |
"grad_norm": 0.006613501347601414, | |
"learning_rate": 5.543710021321962e-06, | |
"loss": 0.0008, | |
"step": 2085 | |
}, | |
{ | |
"epoch": 4.456289978678038, | |
"grad_norm": 0.007009352091699839, | |
"learning_rate": 5.437100213219617e-06, | |
"loss": 0.0008, | |
"step": 2090 | |
}, | |
{ | |
"epoch": 4.466950959488273, | |
"grad_norm": 0.006467157509177923, | |
"learning_rate": 5.330490405117271e-06, | |
"loss": 0.0007, | |
"step": 2095 | |
}, | |
{ | |
"epoch": 4.477611940298507, | |
"grad_norm": 0.006350195035338402, | |
"learning_rate": 5.2238805970149255e-06, | |
"loss": 0.0007, | |
"step": 2100 | |
}, | |
{ | |
"epoch": 4.4882729211087415, | |
"grad_norm": 0.006541652604937553, | |
"learning_rate": 5.11727078891258e-06, | |
"loss": 0.0007, | |
"step": 2105 | |
}, | |
{ | |
"epoch": 4.498933901918977, | |
"grad_norm": 0.006451810710132122, | |
"learning_rate": 5.010660980810235e-06, | |
"loss": 0.0009, | |
"step": 2110 | |
}, | |
{ | |
"epoch": 4.509594882729211, | |
"grad_norm": 0.006328529678285122, | |
"learning_rate": 4.90405117270789e-06, | |
"loss": 0.0008, | |
"step": 2115 | |
}, | |
{ | |
"epoch": 4.520255863539446, | |
"grad_norm": 0.006482933647930622, | |
"learning_rate": 4.797441364605544e-06, | |
"loss": 0.0008, | |
"step": 2120 | |
}, | |
{ | |
"epoch": 4.53091684434968, | |
"grad_norm": 0.006646956317126751, | |
"learning_rate": 4.690831556503199e-06, | |
"loss": 0.0007, | |
"step": 2125 | |
}, | |
{ | |
"epoch": 4.541577825159915, | |
"grad_norm": 0.006530864164233208, | |
"learning_rate": 4.584221748400853e-06, | |
"loss": 0.0007, | |
"step": 2130 | |
}, | |
{ | |
"epoch": 4.552238805970149, | |
"grad_norm": 0.006505405064672232, | |
"learning_rate": 4.477611940298508e-06, | |
"loss": 0.0007, | |
"step": 2135 | |
}, | |
{ | |
"epoch": 4.562899786780384, | |
"grad_norm": 0.006481026764959097, | |
"learning_rate": 4.371002132196162e-06, | |
"loss": 0.0007, | |
"step": 2140 | |
}, | |
{ | |
"epoch": 4.573560767590618, | |
"grad_norm": 0.00649348646402359, | |
"learning_rate": 4.264392324093816e-06, | |
"loss": 0.0007, | |
"step": 2145 | |
}, | |
{ | |
"epoch": 4.584221748400853, | |
"grad_norm": 0.006477253511548042, | |
"learning_rate": 4.157782515991471e-06, | |
"loss": 0.0007, | |
"step": 2150 | |
}, | |
{ | |
"epoch": 4.594882729211087, | |
"grad_norm": 0.006447369698435068, | |
"learning_rate": 4.051172707889126e-06, | |
"loss": 0.0007, | |
"step": 2155 | |
}, | |
{ | |
"epoch": 4.605543710021322, | |
"grad_norm": 0.006516118999570608, | |
"learning_rate": 3.944562899786781e-06, | |
"loss": 0.0007, | |
"step": 2160 | |
}, | |
{ | |
"epoch": 4.616204690831556, | |
"grad_norm": 0.006422132253646851, | |
"learning_rate": 3.837953091684435e-06, | |
"loss": 0.0007, | |
"step": 2165 | |
}, | |
{ | |
"epoch": 4.6268656716417915, | |
"grad_norm": 0.006467184517532587, | |
"learning_rate": 3.7313432835820893e-06, | |
"loss": 0.0007, | |
"step": 2170 | |
}, | |
{ | |
"epoch": 4.637526652452026, | |
"grad_norm": 0.0063330139964818954, | |
"learning_rate": 3.624733475479744e-06, | |
"loss": 0.0008, | |
"step": 2175 | |
}, | |
{ | |
"epoch": 4.6481876332622605, | |
"grad_norm": 0.006526220589876175, | |
"learning_rate": 3.5181236673773987e-06, | |
"loss": 0.0007, | |
"step": 2180 | |
}, | |
{ | |
"epoch": 4.658848614072495, | |
"grad_norm": 0.006420309655368328, | |
"learning_rate": 3.4115138592750535e-06, | |
"loss": 0.0007, | |
"step": 2185 | |
}, | |
{ | |
"epoch": 4.669509594882729, | |
"grad_norm": 0.006409101188182831, | |
"learning_rate": 3.304904051172708e-06, | |
"loss": 0.0007, | |
"step": 2190 | |
}, | |
{ | |
"epoch": 4.680170575692964, | |
"grad_norm": 0.006357685197144747, | |
"learning_rate": 3.198294243070363e-06, | |
"loss": 0.0007, | |
"step": 2195 | |
}, | |
{ | |
"epoch": 4.690831556503198, | |
"grad_norm": 0.006533981300890446, | |
"learning_rate": 3.0916844349680173e-06, | |
"loss": 0.0007, | |
"step": 2200 | |
}, | |
{ | |
"epoch": 4.701492537313433, | |
"grad_norm": 0.006736787501722574, | |
"learning_rate": 2.9850746268656716e-06, | |
"loss": 0.0007, | |
"step": 2205 | |
}, | |
{ | |
"epoch": 4.712153518123667, | |
"grad_norm": 0.006252239923924208, | |
"learning_rate": 2.8784648187633263e-06, | |
"loss": 0.0007, | |
"step": 2210 | |
}, | |
{ | |
"epoch": 4.722814498933902, | |
"grad_norm": 0.19979675114154816, | |
"learning_rate": 2.771855010660981e-06, | |
"loss": 0.0075, | |
"step": 2215 | |
}, | |
{ | |
"epoch": 4.733475479744136, | |
"grad_norm": 0.006329044234007597, | |
"learning_rate": 2.6652452025586354e-06, | |
"loss": 0.0007, | |
"step": 2220 | |
}, | |
{ | |
"epoch": 4.744136460554371, | |
"grad_norm": 0.006287151016294956, | |
"learning_rate": 2.55863539445629e-06, | |
"loss": 0.0007, | |
"step": 2225 | |
}, | |
{ | |
"epoch": 4.754797441364605, | |
"grad_norm": 0.006514046806842089, | |
"learning_rate": 2.452025586353945e-06, | |
"loss": 0.0007, | |
"step": 2230 | |
}, | |
{ | |
"epoch": 4.76545842217484, | |
"grad_norm": 0.006423839833587408, | |
"learning_rate": 2.3454157782515996e-06, | |
"loss": 0.0009, | |
"step": 2235 | |
}, | |
{ | |
"epoch": 4.776119402985074, | |
"grad_norm": 0.006428393069654703, | |
"learning_rate": 2.238805970149254e-06, | |
"loss": 0.0007, | |
"step": 2240 | |
}, | |
{ | |
"epoch": 4.786780383795309, | |
"grad_norm": 0.00650773337110877, | |
"learning_rate": 2.132196162046908e-06, | |
"loss": 0.0007, | |
"step": 2245 | |
}, | |
{ | |
"epoch": 4.797441364605544, | |
"grad_norm": 0.0062748147174716, | |
"learning_rate": 2.025586353944563e-06, | |
"loss": 0.0007, | |
"step": 2250 | |
}, | |
{ | |
"epoch": 4.8081023454157785, | |
"grad_norm": 0.006249801255762577, | |
"learning_rate": 1.9189765458422177e-06, | |
"loss": 0.0007, | |
"step": 2255 | |
}, | |
{ | |
"epoch": 4.818763326226013, | |
"grad_norm": 0.006277838721871376, | |
"learning_rate": 1.812366737739872e-06, | |
"loss": 0.0007, | |
"step": 2260 | |
}, | |
{ | |
"epoch": 4.8294243070362475, | |
"grad_norm": 0.006403633393347263, | |
"learning_rate": 1.7057569296375267e-06, | |
"loss": 0.0007, | |
"step": 2265 | |
}, | |
{ | |
"epoch": 4.840085287846482, | |
"grad_norm": 0.0062882257625460625, | |
"learning_rate": 1.5991471215351815e-06, | |
"loss": 0.0007, | |
"step": 2270 | |
}, | |
{ | |
"epoch": 4.850746268656716, | |
"grad_norm": 0.006579338572919369, | |
"learning_rate": 1.4925373134328358e-06, | |
"loss": 0.0007, | |
"step": 2275 | |
}, | |
{ | |
"epoch": 4.861407249466951, | |
"grad_norm": 0.006250455509871244, | |
"learning_rate": 1.3859275053304905e-06, | |
"loss": 0.0442, | |
"step": 2280 | |
}, | |
{ | |
"epoch": 4.872068230277185, | |
"grad_norm": 0.006211218424141407, | |
"learning_rate": 1.279317697228145e-06, | |
"loss": 0.0007, | |
"step": 2285 | |
}, | |
{ | |
"epoch": 4.88272921108742, | |
"grad_norm": 0.006650151219218969, | |
"learning_rate": 1.1727078891257998e-06, | |
"loss": 0.0007, | |
"step": 2290 | |
}, | |
{ | |
"epoch": 4.893390191897654, | |
"grad_norm": 0.006827252451330423, | |
"learning_rate": 1.066098081023454e-06, | |
"loss": 0.0029, | |
"step": 2295 | |
}, | |
{ | |
"epoch": 4.904051172707889, | |
"grad_norm": 0.006380705162882805, | |
"learning_rate": 9.594882729211088e-07, | |
"loss": 0.0007, | |
"step": 2300 | |
}, | |
{ | |
"epoch": 4.914712153518123, | |
"grad_norm": 0.007317108102142811, | |
"learning_rate": 8.528784648187634e-07, | |
"loss": 0.0007, | |
"step": 2305 | |
}, | |
{ | |
"epoch": 4.925373134328359, | |
"grad_norm": 0.006272484548389912, | |
"learning_rate": 7.462686567164179e-07, | |
"loss": 0.0007, | |
"step": 2310 | |
}, | |
{ | |
"epoch": 4.936034115138593, | |
"grad_norm": 0.008044550195336342, | |
"learning_rate": 6.396588486140725e-07, | |
"loss": 0.0008, | |
"step": 2315 | |
}, | |
{ | |
"epoch": 4.946695095948828, | |
"grad_norm": 0.006260822061449289, | |
"learning_rate": 5.33049040511727e-07, | |
"loss": 0.0007, | |
"step": 2320 | |
}, | |
{ | |
"epoch": 4.957356076759062, | |
"grad_norm": 0.0062827724032104015, | |
"learning_rate": 4.264392324093817e-07, | |
"loss": 0.0007, | |
"step": 2325 | |
}, | |
{ | |
"epoch": 4.968017057569297, | |
"grad_norm": 0.006444897036999464, | |
"learning_rate": 3.1982942430703626e-07, | |
"loss": 0.0007, | |
"step": 2330 | |
}, | |
{ | |
"epoch": 4.978678038379531, | |
"grad_norm": 0.015175371430814266, | |
"learning_rate": 2.1321961620469084e-07, | |
"loss": 0.0008, | |
"step": 2335 | |
}, | |
{ | |
"epoch": 4.9893390191897655, | |
"grad_norm": 0.006180144846439362, | |
"learning_rate": 1.0660980810234542e-07, | |
"loss": 0.0007, | |
"step": 2340 | |
}, | |
{ | |
"epoch": 5.0, | |
"grad_norm": 0.006668759509921074, | |
"learning_rate": 0.0, | |
"loss": 0.0007, | |
"step": 2345 | |
}, | |
{ | |
"epoch": 5.0, | |
"eval_accuracy": 0.9949333333333333, | |
"eval_loss": 0.02496175467967987, | |
"eval_runtime": 16.5113, | |
"eval_samples_per_second": 227.117, | |
"eval_steps_per_second": 7.147, | |
"step": 2345 | |
}, | |
{ | |
"epoch": 5.0, | |
"step": 2345, | |
"total_flos": 5.8118992210944e+18, | |
"train_loss": 0.014759776034136253, | |
"train_runtime": 832.1114, | |
"train_samples_per_second": 90.132, | |
"train_steps_per_second": 2.818 | |
} | |
], | |
"logging_steps": 5, | |
"max_steps": 2345, | |
"num_input_tokens_seen": 0, | |
"num_train_epochs": 5, | |
"save_steps": 500, | |
"stateful_callbacks": { | |
"TrainerControl": { | |
"args": { | |
"should_epoch_stop": false, | |
"should_evaluate": false, | |
"should_log": false, | |
"should_save": true, | |
"should_training_stop": true | |
}, | |
"attributes": {} | |
} | |
}, | |
"total_flos": 5.8118992210944e+18, | |
"train_batch_size": 32, | |
"trial_name": null, | |
"trial_params": null | |
} | |