|
{ |
|
"best_metric": 0.969, |
|
"best_model_checkpoint": "swin-tiny-patch4-window7-224-finetuned-eurosat/checkpoint-1053", |
|
"epoch": 2.9936034115138592, |
|
"eval_steps": 500, |
|
"global_step": 1053, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 5.63645601272583, |
|
"learning_rate": 4.716981132075472e-06, |
|
"loss": 2.3306, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 6.468899726867676, |
|
"learning_rate": 9.433962264150944e-06, |
|
"loss": 2.2678, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 6.23107385635376, |
|
"learning_rate": 1.4150943396226415e-05, |
|
"loss": 2.1477, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 6.676746368408203, |
|
"learning_rate": 1.8867924528301888e-05, |
|
"loss": 1.9591, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 10.20413875579834, |
|
"learning_rate": 2.358490566037736e-05, |
|
"loss": 1.6696, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 12.014700889587402, |
|
"learning_rate": 2.830188679245283e-05, |
|
"loss": 1.3329, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 13.840166091918945, |
|
"learning_rate": 3.30188679245283e-05, |
|
"loss": 1.1415, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 14.116910934448242, |
|
"learning_rate": 3.7735849056603776e-05, |
|
"loss": 1.024, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 10.790963172912598, |
|
"learning_rate": 4.245283018867925e-05, |
|
"loss": 0.9195, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 14.884149551391602, |
|
"learning_rate": 4.716981132075472e-05, |
|
"loss": 0.8209, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 14.443344116210938, |
|
"learning_rate": 4.978880675818374e-05, |
|
"loss": 0.7393, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 15.056041717529297, |
|
"learning_rate": 4.9260823653643085e-05, |
|
"loss": 0.7175, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 10.33287525177002, |
|
"learning_rate": 4.8732840549102435e-05, |
|
"loss": 0.7329, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 12.791647911071777, |
|
"learning_rate": 4.820485744456177e-05, |
|
"loss": 0.7075, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 12.029749870300293, |
|
"learning_rate": 4.767687434002112e-05, |
|
"loss": 0.6584, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 11.643570899963379, |
|
"learning_rate": 4.7148891235480466e-05, |
|
"loss": 0.6047, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 19.02515983581543, |
|
"learning_rate": 4.662090813093981e-05, |
|
"loss": 0.6052, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 10.113385200500488, |
|
"learning_rate": 4.609292502639916e-05, |
|
"loss": 0.6043, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 12.620966911315918, |
|
"learning_rate": 4.55649419218585e-05, |
|
"loss": 0.5803, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 8.98147201538086, |
|
"learning_rate": 4.503695881731785e-05, |
|
"loss": 0.5365, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 11.942453384399414, |
|
"learning_rate": 4.45089757127772e-05, |
|
"loss": 0.6061, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 8.406760215759277, |
|
"learning_rate": 4.398099260823654e-05, |
|
"loss": 0.4963, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 8.798361778259277, |
|
"learning_rate": 4.3453009503695884e-05, |
|
"loss": 0.552, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 8.614947319030762, |
|
"learning_rate": 4.292502639915523e-05, |
|
"loss": 0.5015, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 9.218803405761719, |
|
"learning_rate": 4.239704329461457e-05, |
|
"loss": 0.5246, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 9.763008117675781, |
|
"learning_rate": 4.186906019007392e-05, |
|
"loss": 0.5188, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 10.057807922363281, |
|
"learning_rate": 4.1341077085533265e-05, |
|
"loss": 0.4884, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 10.842576026916504, |
|
"learning_rate": 4.081309398099261e-05, |
|
"loss": 0.5345, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 8.137351036071777, |
|
"learning_rate": 4.028511087645195e-05, |
|
"loss": 0.5157, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 14.897061347961426, |
|
"learning_rate": 3.97571277719113e-05, |
|
"loss": 0.5212, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 11.816178321838379, |
|
"learning_rate": 3.9229144667370646e-05, |
|
"loss": 0.5337, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 10.712966918945312, |
|
"learning_rate": 3.870116156282999e-05, |
|
"loss": 0.5154, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 7.876273155212402, |
|
"learning_rate": 3.817317845828934e-05, |
|
"loss": 0.4833, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 14.542892456054688, |
|
"learning_rate": 3.764519535374868e-05, |
|
"loss": 0.5037, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 8.809218406677246, |
|
"learning_rate": 3.711721224920803e-05, |
|
"loss": 0.5592, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.9538, |
|
"eval_loss": 0.14359258115291595, |
|
"eval_runtime": 31.1413, |
|
"eval_samples_per_second": 160.558, |
|
"eval_steps_per_second": 5.042, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 8.733403205871582, |
|
"learning_rate": 3.658922914466738e-05, |
|
"loss": 0.4327, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 9.024731636047363, |
|
"learning_rate": 3.6061246040126714e-05, |
|
"loss": 0.4674, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 8.668274879455566, |
|
"learning_rate": 3.5533262935586064e-05, |
|
"loss": 0.4807, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 11.616405487060547, |
|
"learning_rate": 3.500527983104541e-05, |
|
"loss": 0.51, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 8.094477653503418, |
|
"learning_rate": 3.447729672650475e-05, |
|
"loss": 0.4959, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 11.38330364227295, |
|
"learning_rate": 3.3949313621964095e-05, |
|
"loss": 0.4142, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 13.148619651794434, |
|
"learning_rate": 3.3421330517423445e-05, |
|
"loss": 0.4217, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 20.912269592285156, |
|
"learning_rate": 3.289334741288279e-05, |
|
"loss": 0.4437, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 12.211991310119629, |
|
"learning_rate": 3.236536430834213e-05, |
|
"loss": 0.4687, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 11.126351356506348, |
|
"learning_rate": 3.183738120380148e-05, |
|
"loss": 0.4691, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 9.944380760192871, |
|
"learning_rate": 3.130939809926082e-05, |
|
"loss": 0.4593, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 10.881024360656738, |
|
"learning_rate": 3.078141499472017e-05, |
|
"loss": 0.4594, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 7.276984214782715, |
|
"learning_rate": 3.0253431890179517e-05, |
|
"loss": 0.4566, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 11.3895263671875, |
|
"learning_rate": 2.972544878563886e-05, |
|
"loss": 0.4061, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 8.26075267791748, |
|
"learning_rate": 2.9197465681098207e-05, |
|
"loss": 0.4233, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 9.681741714477539, |
|
"learning_rate": 2.8669482576557548e-05, |
|
"loss": 0.4564, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 12.245609283447266, |
|
"learning_rate": 2.8141499472016898e-05, |
|
"loss": 0.4561, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 10.530965805053711, |
|
"learning_rate": 2.7613516367476245e-05, |
|
"loss": 0.429, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 6.2105889320373535, |
|
"learning_rate": 2.7085533262935585e-05, |
|
"loss": 0.3995, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 9.41408920288086, |
|
"learning_rate": 2.6557550158394935e-05, |
|
"loss": 0.4159, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 10.506197929382324, |
|
"learning_rate": 2.6029567053854276e-05, |
|
"loss": 0.4396, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 9.805855751037598, |
|
"learning_rate": 2.5501583949313622e-05, |
|
"loss": 0.4258, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 6.874490261077881, |
|
"learning_rate": 2.497360084477297e-05, |
|
"loss": 0.4014, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 9.369732856750488, |
|
"learning_rate": 2.4445617740232313e-05, |
|
"loss": 0.4045, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 10.258170127868652, |
|
"learning_rate": 2.391763463569166e-05, |
|
"loss": 0.4133, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 9.34408950805664, |
|
"learning_rate": 2.3389651531151003e-05, |
|
"loss": 0.4196, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 9.928686141967773, |
|
"learning_rate": 2.286166842661035e-05, |
|
"loss": 0.4014, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 10.1293306350708, |
|
"learning_rate": 2.2333685322069694e-05, |
|
"loss": 0.427, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 10.649211883544922, |
|
"learning_rate": 2.180570221752904e-05, |
|
"loss": 0.3947, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 12.145565032958984, |
|
"learning_rate": 2.1277719112988384e-05, |
|
"loss": 0.4131, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 9.348143577575684, |
|
"learning_rate": 2.074973600844773e-05, |
|
"loss": 0.459, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 8.802924156188965, |
|
"learning_rate": 2.0221752903907075e-05, |
|
"loss": 0.3921, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 18.41295051574707, |
|
"learning_rate": 1.9693769799366422e-05, |
|
"loss": 0.3756, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 8.476001739501953, |
|
"learning_rate": 1.9165786694825765e-05, |
|
"loss": 0.3941, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 9.943327903747559, |
|
"learning_rate": 1.863780359028511e-05, |
|
"loss": 0.3338, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.9632, |
|
"eval_loss": 0.11315808445215225, |
|
"eval_runtime": 31.2355, |
|
"eval_samples_per_second": 160.074, |
|
"eval_steps_per_second": 5.026, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 10.112526893615723, |
|
"learning_rate": 1.810982048574446e-05, |
|
"loss": 0.3586, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 9.651534080505371, |
|
"learning_rate": 1.7581837381203803e-05, |
|
"loss": 0.3957, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 7.620846748352051, |
|
"learning_rate": 1.7053854276663146e-05, |
|
"loss": 0.402, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 10.414019584655762, |
|
"learning_rate": 1.6525871172122493e-05, |
|
"loss": 0.3809, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 9.351815223693848, |
|
"learning_rate": 1.5997888067581837e-05, |
|
"loss": 0.3745, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 10.798675537109375, |
|
"learning_rate": 1.5469904963041184e-05, |
|
"loss": 0.3507, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 10.772116661071777, |
|
"learning_rate": 1.4941921858500529e-05, |
|
"loss": 0.3589, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 9.006869316101074, |
|
"learning_rate": 1.4413938753959874e-05, |
|
"loss": 0.359, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 7.237900257110596, |
|
"learning_rate": 1.388595564941922e-05, |
|
"loss": 0.3636, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 8.02499008178711, |
|
"learning_rate": 1.3357972544878563e-05, |
|
"loss": 0.3817, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 7.373661041259766, |
|
"learning_rate": 1.2829989440337912e-05, |
|
"loss": 0.3812, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 9.66775131225586, |
|
"learning_rate": 1.2302006335797255e-05, |
|
"loss": 0.3681, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 9.115776062011719, |
|
"learning_rate": 1.17740232312566e-05, |
|
"loss": 0.4156, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 8.92050838470459, |
|
"learning_rate": 1.1246040126715946e-05, |
|
"loss": 0.3629, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 7.8812031745910645, |
|
"learning_rate": 1.0718057022175291e-05, |
|
"loss": 0.356, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 5.577208518981934, |
|
"learning_rate": 1.0190073917634636e-05, |
|
"loss": 0.3653, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 9.963196754455566, |
|
"learning_rate": 9.662090813093982e-06, |
|
"loss": 0.3648, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 10.441061973571777, |
|
"learning_rate": 9.134107708553327e-06, |
|
"loss": 0.3671, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 11.808416366577148, |
|
"learning_rate": 8.606124604012672e-06, |
|
"loss": 0.3012, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 7.2837138175964355, |
|
"learning_rate": 8.078141499472017e-06, |
|
"loss": 0.4002, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 7.841160297393799, |
|
"learning_rate": 7.5501583949313625e-06, |
|
"loss": 0.424, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 11.717653274536133, |
|
"learning_rate": 7.022175290390708e-06, |
|
"loss": 0.3432, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 7.797212600708008, |
|
"learning_rate": 6.494192185850054e-06, |
|
"loss": 0.3618, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 9.33782958984375, |
|
"learning_rate": 5.966209081309398e-06, |
|
"loss": 0.3559, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 8.307150840759277, |
|
"learning_rate": 5.438225976768744e-06, |
|
"loss": 0.3601, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 8.53368854522705, |
|
"learning_rate": 4.910242872228089e-06, |
|
"loss": 0.3319, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 8.487812042236328, |
|
"learning_rate": 4.382259767687434e-06, |
|
"loss": 0.3331, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 10.636713027954102, |
|
"learning_rate": 3.854276663146779e-06, |
|
"loss": 0.359, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 13.220087051391602, |
|
"learning_rate": 3.326293558606125e-06, |
|
"loss": 0.3598, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 7.285949230194092, |
|
"learning_rate": 2.79831045406547e-06, |
|
"loss": 0.3503, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 8.310001373291016, |
|
"learning_rate": 2.2703273495248154e-06, |
|
"loss": 0.3566, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 9.105748176574707, |
|
"learning_rate": 1.7423442449841606e-06, |
|
"loss": 0.3345, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 7.4982008934021, |
|
"learning_rate": 1.2143611404435059e-06, |
|
"loss": 0.3398, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 11.533191680908203, |
|
"learning_rate": 6.863780359028511e-07, |
|
"loss": 0.3275, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"grad_norm": 7.991899013519287, |
|
"learning_rate": 1.5839493136219642e-07, |
|
"loss": 0.3196, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"eval_accuracy": 0.969, |
|
"eval_loss": 0.09590400755405426, |
|
"eval_runtime": 31.5821, |
|
"eval_samples_per_second": 158.318, |
|
"eval_steps_per_second": 4.971, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"step": 1053, |
|
"total_flos": 3.3497451642252165e+18, |
|
"train_loss": 0.5486761254009924, |
|
"train_runtime": 2094.2938, |
|
"train_samples_per_second": 64.461, |
|
"train_steps_per_second": 0.503 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 1053, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 3.3497451642252165e+18, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|