|
{ |
|
"best_metric": 0.97, |
|
"best_model_checkpoint": "swin-tiny-patch4-window7-224-finetuned-eurosat/checkpoint-1053", |
|
"epoch": 2.9936034115138592, |
|
"eval_steps": 500, |
|
"global_step": 1053, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.028429282160625444, |
|
"grad_norm": 5.58591365814209, |
|
"learning_rate": 4.716981132075472e-06, |
|
"loss": 2.3278, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05685856432125089, |
|
"grad_norm": 4.536377429962158, |
|
"learning_rate": 9.433962264150944e-06, |
|
"loss": 2.2763, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.08528784648187633, |
|
"grad_norm": 4.974976539611816, |
|
"learning_rate": 1.4150943396226415e-05, |
|
"loss": 2.1632, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.11371712864250177, |
|
"grad_norm": 5.416134834289551, |
|
"learning_rate": 1.8867924528301888e-05, |
|
"loss": 1.9768, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.14214641080312723, |
|
"grad_norm": 8.11315631866455, |
|
"learning_rate": 2.358490566037736e-05, |
|
"loss": 1.7307, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.17057569296375266, |
|
"grad_norm": 11.47766399383545, |
|
"learning_rate": 2.830188679245283e-05, |
|
"loss": 1.4492, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.19900497512437812, |
|
"grad_norm": 15.754049301147461, |
|
"learning_rate": 3.30188679245283e-05, |
|
"loss": 1.2237, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.22743425728500355, |
|
"grad_norm": 14.765463829040527, |
|
"learning_rate": 3.7735849056603776e-05, |
|
"loss": 1.0719, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.255863539445629, |
|
"grad_norm": 9.880644798278809, |
|
"learning_rate": 4.245283018867925e-05, |
|
"loss": 0.9904, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.28429282160625446, |
|
"grad_norm": 16.637893676757812, |
|
"learning_rate": 4.716981132075472e-05, |
|
"loss": 0.902, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.31272210376687987, |
|
"grad_norm": 14.136957168579102, |
|
"learning_rate": 4.978880675818374e-05, |
|
"loss": 0.8026, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.3411513859275053, |
|
"grad_norm": 11.47761344909668, |
|
"learning_rate": 4.9260823653643085e-05, |
|
"loss": 0.7073, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.3695806680881308, |
|
"grad_norm": 19.195457458496094, |
|
"learning_rate": 4.8732840549102435e-05, |
|
"loss": 0.7635, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.39800995024875624, |
|
"grad_norm": 13.571340560913086, |
|
"learning_rate": 4.820485744456177e-05, |
|
"loss": 0.7359, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.42643923240938164, |
|
"grad_norm": 9.327037811279297, |
|
"learning_rate": 4.767687434002112e-05, |
|
"loss": 0.6394, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.4548685145700071, |
|
"grad_norm": 11.49142837524414, |
|
"learning_rate": 4.7148891235480466e-05, |
|
"loss": 0.6305, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.48329779673063256, |
|
"grad_norm": 9.735462188720703, |
|
"learning_rate": 4.662090813093981e-05, |
|
"loss": 0.6641, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.511727078891258, |
|
"grad_norm": 14.966803550720215, |
|
"learning_rate": 4.609292502639916e-05, |
|
"loss": 0.6013, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.5401563610518835, |
|
"grad_norm": 10.155216217041016, |
|
"learning_rate": 4.55649419218585e-05, |
|
"loss": 0.6402, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.5685856432125089, |
|
"grad_norm": 9.162310600280762, |
|
"learning_rate": 4.503695881731785e-05, |
|
"loss": 0.593, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.5970149253731343, |
|
"grad_norm": 10.704963684082031, |
|
"learning_rate": 4.45089757127772e-05, |
|
"loss": 0.5944, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.6254442075337597, |
|
"grad_norm": 9.836397171020508, |
|
"learning_rate": 4.398099260823654e-05, |
|
"loss": 0.5976, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.6538734896943852, |
|
"grad_norm": 9.258991241455078, |
|
"learning_rate": 4.3453009503695884e-05, |
|
"loss": 0.5715, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.6823027718550106, |
|
"grad_norm": 10.040568351745605, |
|
"learning_rate": 4.292502639915523e-05, |
|
"loss": 0.6008, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.7107320540156361, |
|
"grad_norm": 11.320852279663086, |
|
"learning_rate": 4.239704329461457e-05, |
|
"loss": 0.5517, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.7391613361762616, |
|
"grad_norm": 14.075531005859375, |
|
"learning_rate": 4.186906019007392e-05, |
|
"loss": 0.5207, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.767590618336887, |
|
"grad_norm": 8.05193042755127, |
|
"learning_rate": 4.1341077085533265e-05, |
|
"loss": 0.5601, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.7960199004975125, |
|
"grad_norm": 9.463001251220703, |
|
"learning_rate": 4.081309398099261e-05, |
|
"loss": 0.5246, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.8244491826581379, |
|
"grad_norm": 8.247632026672363, |
|
"learning_rate": 4.028511087645195e-05, |
|
"loss": 0.5047, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.8528784648187633, |
|
"grad_norm": 13.2966890335083, |
|
"learning_rate": 3.97571277719113e-05, |
|
"loss": 0.4996, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.8813077469793887, |
|
"grad_norm": 9.029179573059082, |
|
"learning_rate": 3.9229144667370646e-05, |
|
"loss": 0.4836, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.9097370291400142, |
|
"grad_norm": 8.053582191467285, |
|
"learning_rate": 3.870116156282999e-05, |
|
"loss": 0.4933, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.9381663113006397, |
|
"grad_norm": 9.279789924621582, |
|
"learning_rate": 3.817317845828934e-05, |
|
"loss": 0.4748, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.9665955934612651, |
|
"grad_norm": 16.859342575073242, |
|
"learning_rate": 3.764519535374868e-05, |
|
"loss": 0.5047, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.9950248756218906, |
|
"grad_norm": 8.529033660888672, |
|
"learning_rate": 3.711721224920803e-05, |
|
"loss": 0.4857, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.997867803837953, |
|
"eval_accuracy": 0.947, |
|
"eval_loss": 0.158402681350708, |
|
"eval_runtime": 30.7764, |
|
"eval_samples_per_second": 162.462, |
|
"eval_steps_per_second": 5.101, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.023454157782516, |
|
"grad_norm": 11.528229713439941, |
|
"learning_rate": 3.658922914466738e-05, |
|
"loss": 0.4896, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.0518834399431414, |
|
"grad_norm": 10.65190315246582, |
|
"learning_rate": 3.6061246040126714e-05, |
|
"loss": 0.4846, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.080312722103767, |
|
"grad_norm": 9.108841896057129, |
|
"learning_rate": 3.5533262935586064e-05, |
|
"loss": 0.4785, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.1087420042643923, |
|
"grad_norm": 13.28585147857666, |
|
"learning_rate": 3.500527983104541e-05, |
|
"loss": 0.478, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.1371712864250179, |
|
"grad_norm": 12.707968711853027, |
|
"learning_rate": 3.447729672650475e-05, |
|
"loss": 0.4623, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.1656005685856432, |
|
"grad_norm": 10.01926040649414, |
|
"learning_rate": 3.3949313621964095e-05, |
|
"loss": 0.4082, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.1940298507462686, |
|
"grad_norm": 13.867918968200684, |
|
"learning_rate": 3.3421330517423445e-05, |
|
"loss": 0.4163, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.2224591329068941, |
|
"grad_norm": 8.311562538146973, |
|
"learning_rate": 3.289334741288279e-05, |
|
"loss": 0.4527, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.2508884150675195, |
|
"grad_norm": 8.391691207885742, |
|
"learning_rate": 3.236536430834213e-05, |
|
"loss": 0.4782, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.279317697228145, |
|
"grad_norm": 17.120820999145508, |
|
"learning_rate": 3.183738120380148e-05, |
|
"loss": 0.4233, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.3077469793887704, |
|
"grad_norm": 10.727948188781738, |
|
"learning_rate": 3.130939809926082e-05, |
|
"loss": 0.48, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.336176261549396, |
|
"grad_norm": 10.990976333618164, |
|
"learning_rate": 3.078141499472017e-05, |
|
"loss": 0.4643, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.3646055437100213, |
|
"grad_norm": 10.494908332824707, |
|
"learning_rate": 3.0253431890179517e-05, |
|
"loss": 0.4159, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.3930348258706466, |
|
"grad_norm": 12.02963638305664, |
|
"learning_rate": 2.972544878563886e-05, |
|
"loss": 0.4374, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.4214641080312722, |
|
"grad_norm": 9.365103721618652, |
|
"learning_rate": 2.9197465681098207e-05, |
|
"loss": 0.4491, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.4498933901918978, |
|
"grad_norm": 9.779677391052246, |
|
"learning_rate": 2.8669482576557548e-05, |
|
"loss": 0.4478, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.4783226723525231, |
|
"grad_norm": 10.054044723510742, |
|
"learning_rate": 2.8141499472016898e-05, |
|
"loss": 0.4301, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.5067519545131485, |
|
"grad_norm": 11.813431739807129, |
|
"learning_rate": 2.7613516367476245e-05, |
|
"loss": 0.4675, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.535181236673774, |
|
"grad_norm": 11.177428245544434, |
|
"learning_rate": 2.7085533262935585e-05, |
|
"loss": 0.4683, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.5636105188343994, |
|
"grad_norm": 8.778236389160156, |
|
"learning_rate": 2.6557550158394935e-05, |
|
"loss": 0.434, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.5920398009950247, |
|
"grad_norm": 9.267476081848145, |
|
"learning_rate": 2.6029567053854276e-05, |
|
"loss": 0.4076, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.6204690831556503, |
|
"grad_norm": 8.8627347946167, |
|
"learning_rate": 2.5501583949313622e-05, |
|
"loss": 0.441, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.6488983653162759, |
|
"grad_norm": 8.466072082519531, |
|
"learning_rate": 2.497360084477297e-05, |
|
"loss": 0.4463, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.6773276474769012, |
|
"grad_norm": 9.252815246582031, |
|
"learning_rate": 2.4445617740232313e-05, |
|
"loss": 0.4143, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.7057569296375266, |
|
"grad_norm": 11.29322624206543, |
|
"learning_rate": 2.391763463569166e-05, |
|
"loss": 0.4185, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.7341862117981521, |
|
"grad_norm": 8.104042053222656, |
|
"learning_rate": 2.3389651531151003e-05, |
|
"loss": 0.4053, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.7626154939587777, |
|
"grad_norm": 8.014995574951172, |
|
"learning_rate": 2.286166842661035e-05, |
|
"loss": 0.4331, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.7910447761194028, |
|
"grad_norm": 5.712684631347656, |
|
"learning_rate": 2.2333685322069694e-05, |
|
"loss": 0.366, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.8194740582800284, |
|
"grad_norm": 10.232897758483887, |
|
"learning_rate": 2.180570221752904e-05, |
|
"loss": 0.4036, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.847903340440654, |
|
"grad_norm": 7.411330699920654, |
|
"learning_rate": 2.1277719112988384e-05, |
|
"loss": 0.3692, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.8763326226012793, |
|
"grad_norm": 11.07670783996582, |
|
"learning_rate": 2.074973600844773e-05, |
|
"loss": 0.4312, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.9047619047619047, |
|
"grad_norm": 13.74502182006836, |
|
"learning_rate": 2.0221752903907075e-05, |
|
"loss": 0.3928, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.9331911869225302, |
|
"grad_norm": 8.972488403320312, |
|
"learning_rate": 1.9693769799366422e-05, |
|
"loss": 0.3961, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.9616204690831558, |
|
"grad_norm": 7.764671802520752, |
|
"learning_rate": 1.9165786694825765e-05, |
|
"loss": 0.3436, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.9900497512437811, |
|
"grad_norm": 14.391060829162598, |
|
"learning_rate": 1.863780359028511e-05, |
|
"loss": 0.3775, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.9985785358919688, |
|
"eval_accuracy": 0.9658, |
|
"eval_loss": 0.10344034433364868, |
|
"eval_runtime": 30.5926, |
|
"eval_samples_per_second": 163.438, |
|
"eval_steps_per_second": 5.132, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 2.0184790334044065, |
|
"grad_norm": 10.261659622192383, |
|
"learning_rate": 1.810982048574446e-05, |
|
"loss": 0.38, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.046908315565032, |
|
"grad_norm": 8.334637641906738, |
|
"learning_rate": 1.7581837381203803e-05, |
|
"loss": 0.3874, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.0753375977256576, |
|
"grad_norm": 10.484701156616211, |
|
"learning_rate": 1.7053854276663146e-05, |
|
"loss": 0.382, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.1037668798862827, |
|
"grad_norm": 7.803796291351318, |
|
"learning_rate": 1.6525871172122493e-05, |
|
"loss": 0.3841, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.1321961620469083, |
|
"grad_norm": 10.345030784606934, |
|
"learning_rate": 1.5997888067581837e-05, |
|
"loss": 0.3578, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.160625444207534, |
|
"grad_norm": 9.595276832580566, |
|
"learning_rate": 1.5469904963041184e-05, |
|
"loss": 0.3473, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.189054726368159, |
|
"grad_norm": 11.557988166809082, |
|
"learning_rate": 1.4941921858500529e-05, |
|
"loss": 0.3799, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.2174840085287846, |
|
"grad_norm": 7.893617153167725, |
|
"learning_rate": 1.4413938753959874e-05, |
|
"loss": 0.3934, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.24591329068941, |
|
"grad_norm": 10.705870628356934, |
|
"learning_rate": 1.388595564941922e-05, |
|
"loss": 0.38, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.2743425728500357, |
|
"grad_norm": 9.459859848022461, |
|
"learning_rate": 1.3357972544878563e-05, |
|
"loss": 0.3926, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.302771855010661, |
|
"grad_norm": 11.68864917755127, |
|
"learning_rate": 1.2829989440337912e-05, |
|
"loss": 0.3547, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.3312011371712864, |
|
"grad_norm": 10.39009952545166, |
|
"learning_rate": 1.2302006335797255e-05, |
|
"loss": 0.3512, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.359630419331912, |
|
"grad_norm": 8.366880416870117, |
|
"learning_rate": 1.17740232312566e-05, |
|
"loss": 0.4027, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.388059701492537, |
|
"grad_norm": 9.72803020477295, |
|
"learning_rate": 1.1246040126715946e-05, |
|
"loss": 0.3649, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.4164889836531627, |
|
"grad_norm": 11.347773551940918, |
|
"learning_rate": 1.0718057022175291e-05, |
|
"loss": 0.3598, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.4449182658137882, |
|
"grad_norm": 8.40634536743164, |
|
"learning_rate": 1.0190073917634636e-05, |
|
"loss": 0.3775, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.473347547974414, |
|
"grad_norm": 8.892584800720215, |
|
"learning_rate": 9.662090813093982e-06, |
|
"loss": 0.3607, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.501776830135039, |
|
"grad_norm": 14.059830665588379, |
|
"learning_rate": 9.134107708553327e-06, |
|
"loss": 0.3789, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.5302061122956645, |
|
"grad_norm": 9.594950675964355, |
|
"learning_rate": 8.606124604012672e-06, |
|
"loss": 0.3328, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.55863539445629, |
|
"grad_norm": 12.827249526977539, |
|
"learning_rate": 8.078141499472017e-06, |
|
"loss": 0.3698, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.587064676616915, |
|
"grad_norm": 12.365326881408691, |
|
"learning_rate": 7.5501583949313625e-06, |
|
"loss": 0.3644, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.6154939587775408, |
|
"grad_norm": 5.1415534019470215, |
|
"learning_rate": 7.022175290390708e-06, |
|
"loss": 0.3417, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.6439232409381663, |
|
"grad_norm": 7.832413673400879, |
|
"learning_rate": 6.494192185850054e-06, |
|
"loss": 0.3696, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.672352523098792, |
|
"grad_norm": 8.78602123260498, |
|
"learning_rate": 5.966209081309398e-06, |
|
"loss": 0.3428, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.7007818052594175, |
|
"grad_norm": 9.317280769348145, |
|
"learning_rate": 5.438225976768744e-06, |
|
"loss": 0.3492, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.7292110874200426, |
|
"grad_norm": 7.861667633056641, |
|
"learning_rate": 4.910242872228089e-06, |
|
"loss": 0.3556, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.757640369580668, |
|
"grad_norm": 8.467522621154785, |
|
"learning_rate": 4.382259767687434e-06, |
|
"loss": 0.3456, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.7860696517412933, |
|
"grad_norm": 10.145249366760254, |
|
"learning_rate": 3.854276663146779e-06, |
|
"loss": 0.3221, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.814498933901919, |
|
"grad_norm": 11.336479187011719, |
|
"learning_rate": 3.326293558606125e-06, |
|
"loss": 0.4209, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.8429282160625444, |
|
"grad_norm": 9.06911849975586, |
|
"learning_rate": 2.79831045406547e-06, |
|
"loss": 0.3384, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.87135749822317, |
|
"grad_norm": 8.496155738830566, |
|
"learning_rate": 2.2703273495248154e-06, |
|
"loss": 0.3254, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.8997867803837956, |
|
"grad_norm": 7.5788960456848145, |
|
"learning_rate": 1.7423442449841606e-06, |
|
"loss": 0.3787, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.9282160625444207, |
|
"grad_norm": 11.145151138305664, |
|
"learning_rate": 1.2143611404435059e-06, |
|
"loss": 0.3311, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.9566453447050463, |
|
"grad_norm": 6.017477512359619, |
|
"learning_rate": 6.863780359028511e-07, |
|
"loss": 0.3586, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.9850746268656714, |
|
"grad_norm": 12.3096342086792, |
|
"learning_rate": 1.5839493136219642e-07, |
|
"loss": 0.3216, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.9936034115138592, |
|
"eval_accuracy": 0.97, |
|
"eval_loss": 0.0925411656498909, |
|
"eval_runtime": 30.7915, |
|
"eval_samples_per_second": 162.382, |
|
"eval_steps_per_second": 5.099, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 2.9936034115138592, |
|
"step": 1053, |
|
"total_flos": 3.3497451642252165e+18, |
|
"train_loss": 0.5581566964798843, |
|
"train_runtime": 2011.3946, |
|
"train_samples_per_second": 67.118, |
|
"train_steps_per_second": 0.524 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 1053, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 3.3497451642252165e+18, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|