|
{ |
|
"best_metric": 0.714, |
|
"best_model_checkpoint": "swin-tiny-patch4-window7-224-finetuned-eurosat/checkpoint-843", |
|
"epoch": 2.997333333333333, |
|
"eval_steps": 500, |
|
"global_step": 843, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.035555555555555556, |
|
"grad_norm": 7.188250541687012, |
|
"learning_rate": 5.882352941176471e-06, |
|
"loss": 2.7793, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.07111111111111111, |
|
"grad_norm": 15.252310752868652, |
|
"learning_rate": 1.1764705882352942e-05, |
|
"loss": 2.7235, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.10666666666666667, |
|
"grad_norm": 20.026681900024414, |
|
"learning_rate": 1.7647058823529414e-05, |
|
"loss": 2.6203, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.14222222222222222, |
|
"grad_norm": 12.86706829071045, |
|
"learning_rate": 2.3529411764705884e-05, |
|
"loss": 2.4325, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.17777777777777778, |
|
"grad_norm": 22.171051025390625, |
|
"learning_rate": 2.9411764705882354e-05, |
|
"loss": 2.1824, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.21333333333333335, |
|
"grad_norm": 16.04732894897461, |
|
"learning_rate": 3.529411764705883e-05, |
|
"loss": 1.977, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.24888888888888888, |
|
"grad_norm": 11.828266143798828, |
|
"learning_rate": 4.11764705882353e-05, |
|
"loss": 1.8274, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.28444444444444444, |
|
"grad_norm": 21.65501594543457, |
|
"learning_rate": 4.705882352941177e-05, |
|
"loss": 1.7634, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 9.372465133666992, |
|
"learning_rate": 4.967018469656992e-05, |
|
"loss": 1.7128, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.35555555555555557, |
|
"grad_norm": 8.170567512512207, |
|
"learning_rate": 4.901055408970976e-05, |
|
"loss": 1.5845, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.39111111111111113, |
|
"grad_norm": 8.884113311767578, |
|
"learning_rate": 4.835092348284961e-05, |
|
"loss": 1.5652, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.4266666666666667, |
|
"grad_norm": 9.709837913513184, |
|
"learning_rate": 4.7691292875989446e-05, |
|
"loss": 1.6439, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.4622222222222222, |
|
"grad_norm": 12.577466011047363, |
|
"learning_rate": 4.703166226912929e-05, |
|
"loss": 1.6118, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.49777777777777776, |
|
"grad_norm": 8.617718696594238, |
|
"learning_rate": 4.6372031662269136e-05, |
|
"loss": 1.4722, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.5333333333333333, |
|
"grad_norm": 11.635614395141602, |
|
"learning_rate": 4.5712401055408974e-05, |
|
"loss": 1.4748, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.5688888888888889, |
|
"grad_norm": 10.609350204467773, |
|
"learning_rate": 4.505277044854881e-05, |
|
"loss": 1.4546, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.6044444444444445, |
|
"grad_norm": 9.702072143554688, |
|
"learning_rate": 4.439313984168866e-05, |
|
"loss": 1.4868, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 8.165207862854004, |
|
"learning_rate": 4.3733509234828496e-05, |
|
"loss": 1.4197, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.6755555555555556, |
|
"grad_norm": 8.345643043518066, |
|
"learning_rate": 4.307387862796834e-05, |
|
"loss": 1.4298, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.7111111111111111, |
|
"grad_norm": 9.1146821975708, |
|
"learning_rate": 4.2414248021108186e-05, |
|
"loss": 1.437, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.7466666666666667, |
|
"grad_norm": 10.016292572021484, |
|
"learning_rate": 4.1754617414248024e-05, |
|
"loss": 1.3961, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.7822222222222223, |
|
"grad_norm": 6.980156421661377, |
|
"learning_rate": 4.109498680738786e-05, |
|
"loss": 1.3571, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.8177777777777778, |
|
"grad_norm": 9.158329010009766, |
|
"learning_rate": 4.043535620052771e-05, |
|
"loss": 1.3691, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.8533333333333334, |
|
"grad_norm": 8.437979698181152, |
|
"learning_rate": 3.9775725593667545e-05, |
|
"loss": 1.2722, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 7.31761360168457, |
|
"learning_rate": 3.911609498680739e-05, |
|
"loss": 1.3382, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.9244444444444444, |
|
"grad_norm": 10.811269760131836, |
|
"learning_rate": 3.8456464379947235e-05, |
|
"loss": 1.3005, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 7.826028823852539, |
|
"learning_rate": 3.779683377308707e-05, |
|
"loss": 1.2971, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.9955555555555555, |
|
"grad_norm": 8.89599609375, |
|
"learning_rate": 3.713720316622691e-05, |
|
"loss": 1.3142, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.9991111111111111, |
|
"eval_accuracy": 0.629, |
|
"eval_loss": 1.176965594291687, |
|
"eval_runtime": 61.7496, |
|
"eval_samples_per_second": 64.778, |
|
"eval_steps_per_second": 2.024, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 1.031111111111111, |
|
"grad_norm": 8.035099029541016, |
|
"learning_rate": 3.6477572559366756e-05, |
|
"loss": 1.2879, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.0666666666666667, |
|
"grad_norm": 10.644525527954102, |
|
"learning_rate": 3.58179419525066e-05, |
|
"loss": 1.2803, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.1022222222222222, |
|
"grad_norm": 12.751725196838379, |
|
"learning_rate": 3.515831134564644e-05, |
|
"loss": 1.2765, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.1377777777777778, |
|
"grad_norm": 9.897214889526367, |
|
"learning_rate": 3.4498680738786285e-05, |
|
"loss": 1.234, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.1733333333333333, |
|
"grad_norm": 6.898780822753906, |
|
"learning_rate": 3.383905013192612e-05, |
|
"loss": 1.2247, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.208888888888889, |
|
"grad_norm": 9.826775550842285, |
|
"learning_rate": 3.317941952506596e-05, |
|
"loss": 1.302, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.2444444444444445, |
|
"grad_norm": 7.03531551361084, |
|
"learning_rate": 3.2519788918205806e-05, |
|
"loss": 1.2956, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 7.564468860626221, |
|
"learning_rate": 3.186015831134565e-05, |
|
"loss": 1.2087, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.3155555555555556, |
|
"grad_norm": 9.157870292663574, |
|
"learning_rate": 3.120052770448549e-05, |
|
"loss": 1.2794, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.3511111111111112, |
|
"grad_norm": 6.955115795135498, |
|
"learning_rate": 3.0540897097625334e-05, |
|
"loss": 1.2916, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.3866666666666667, |
|
"grad_norm": 8.055458068847656, |
|
"learning_rate": 2.9881266490765176e-05, |
|
"loss": 1.2643, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.4222222222222223, |
|
"grad_norm": 8.577414512634277, |
|
"learning_rate": 2.9221635883905014e-05, |
|
"loss": 1.2435, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.4577777777777778, |
|
"grad_norm": 8.334266662597656, |
|
"learning_rate": 2.8562005277044855e-05, |
|
"loss": 1.1744, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.4933333333333334, |
|
"grad_norm": 7.725725173950195, |
|
"learning_rate": 2.79023746701847e-05, |
|
"loss": 1.1909, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.528888888888889, |
|
"grad_norm": 10.035604476928711, |
|
"learning_rate": 2.724274406332454e-05, |
|
"loss": 1.1839, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.5644444444444443, |
|
"grad_norm": 8.928838729858398, |
|
"learning_rate": 2.658311345646438e-05, |
|
"loss": 1.2524, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 10.327030181884766, |
|
"learning_rate": 2.5923482849604225e-05, |
|
"loss": 1.2254, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.6355555555555554, |
|
"grad_norm": 7.943435192108154, |
|
"learning_rate": 2.5263852242744063e-05, |
|
"loss": 1.1067, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.6711111111111112, |
|
"grad_norm": 9.097575187683105, |
|
"learning_rate": 2.4604221635883905e-05, |
|
"loss": 1.1412, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.7066666666666666, |
|
"grad_norm": 8.329163551330566, |
|
"learning_rate": 2.3944591029023746e-05, |
|
"loss": 1.1277, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.7422222222222223, |
|
"grad_norm": 9.431710243225098, |
|
"learning_rate": 2.328496042216359e-05, |
|
"loss": 1.1697, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.7777777777777777, |
|
"grad_norm": 7.484973907470703, |
|
"learning_rate": 2.262532981530343e-05, |
|
"loss": 1.1522, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.8133333333333335, |
|
"grad_norm": 8.600388526916504, |
|
"learning_rate": 2.196569920844327e-05, |
|
"loss": 1.1673, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.8488888888888888, |
|
"grad_norm": 7.894708156585693, |
|
"learning_rate": 2.1306068601583116e-05, |
|
"loss": 1.1206, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.8844444444444446, |
|
"grad_norm": 9.636763572692871, |
|
"learning_rate": 2.0646437994722954e-05, |
|
"loss": 1.2176, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 9.740330696105957, |
|
"learning_rate": 1.9986807387862796e-05, |
|
"loss": 1.2078, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.9555555555555557, |
|
"grad_norm": 7.4491353034973145, |
|
"learning_rate": 1.932717678100264e-05, |
|
"loss": 1.1472, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.991111111111111, |
|
"grad_norm": 10.187119483947754, |
|
"learning_rate": 1.866754617414248e-05, |
|
"loss": 1.0761, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.9982222222222221, |
|
"eval_accuracy": 0.69825, |
|
"eval_loss": 1.0089702606201172, |
|
"eval_runtime": 60.6333, |
|
"eval_samples_per_second": 65.97, |
|
"eval_steps_per_second": 2.062, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 2.026666666666667, |
|
"grad_norm": 10.728060722351074, |
|
"learning_rate": 1.8007915567282324e-05, |
|
"loss": 1.1074, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.062222222222222, |
|
"grad_norm": 6.846221923828125, |
|
"learning_rate": 1.7348284960422166e-05, |
|
"loss": 1.1202, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.097777777777778, |
|
"grad_norm": 7.134582996368408, |
|
"learning_rate": 1.6688654353562007e-05, |
|
"loss": 1.1013, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.1333333333333333, |
|
"grad_norm": 6.4924397468566895, |
|
"learning_rate": 1.602902374670185e-05, |
|
"loss": 1.1429, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.168888888888889, |
|
"grad_norm": 7.079750061035156, |
|
"learning_rate": 1.536939313984169e-05, |
|
"loss": 1.1282, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.2044444444444444, |
|
"grad_norm": 11.857382774353027, |
|
"learning_rate": 1.470976253298153e-05, |
|
"loss": 1.0689, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 7.431779861450195, |
|
"learning_rate": 1.4050131926121373e-05, |
|
"loss": 1.1116, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.2755555555555556, |
|
"grad_norm": 8.456690788269043, |
|
"learning_rate": 1.3390501319261215e-05, |
|
"loss": 1.0721, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.311111111111111, |
|
"grad_norm": 10.243916511535645, |
|
"learning_rate": 1.2730870712401055e-05, |
|
"loss": 1.1046, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.3466666666666667, |
|
"grad_norm": 9.12217903137207, |
|
"learning_rate": 1.2071240105540896e-05, |
|
"loss": 1.1231, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.3822222222222225, |
|
"grad_norm": 6.960771560668945, |
|
"learning_rate": 1.141160949868074e-05, |
|
"loss": 1.0449, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.417777777777778, |
|
"grad_norm": 6.853394031524658, |
|
"learning_rate": 1.0751978891820581e-05, |
|
"loss": 1.0166, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.453333333333333, |
|
"grad_norm": 6.347965717315674, |
|
"learning_rate": 1.0092348284960421e-05, |
|
"loss": 1.0753, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.488888888888889, |
|
"grad_norm": 7.328911304473877, |
|
"learning_rate": 9.432717678100264e-06, |
|
"loss": 1.0528, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.5244444444444447, |
|
"grad_norm": 7.785583019256592, |
|
"learning_rate": 8.773087071240106e-06, |
|
"loss": 1.1255, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 8.781428337097168, |
|
"learning_rate": 8.113456464379948e-06, |
|
"loss": 1.0989, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.5955555555555554, |
|
"grad_norm": 6.994375228881836, |
|
"learning_rate": 7.453825857519789e-06, |
|
"loss": 1.0473, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.631111111111111, |
|
"grad_norm": 8.248795509338379, |
|
"learning_rate": 6.794195250659631e-06, |
|
"loss": 1.0968, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.6666666666666665, |
|
"grad_norm": 7.137526035308838, |
|
"learning_rate": 6.134564643799472e-06, |
|
"loss": 1.0421, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.7022222222222223, |
|
"grad_norm": 10.059906959533691, |
|
"learning_rate": 5.474934036939315e-06, |
|
"loss": 1.0794, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.7377777777777776, |
|
"grad_norm": 7.585984230041504, |
|
"learning_rate": 4.8153034300791555e-06, |
|
"loss": 1.0677, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.7733333333333334, |
|
"grad_norm": 7.5168137550354, |
|
"learning_rate": 4.155672823218998e-06, |
|
"loss": 0.9899, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.8088888888888888, |
|
"grad_norm": 8.242053985595703, |
|
"learning_rate": 3.496042216358839e-06, |
|
"loss": 1.1027, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.8444444444444446, |
|
"grad_norm": 7.243769645690918, |
|
"learning_rate": 2.836411609498681e-06, |
|
"loss": 1.1244, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 9.468960762023926, |
|
"learning_rate": 2.1767810026385226e-06, |
|
"loss": 1.076, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.9155555555555557, |
|
"grad_norm": 9.269329071044922, |
|
"learning_rate": 1.5171503957783642e-06, |
|
"loss": 1.0397, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.951111111111111, |
|
"grad_norm": 8.011164665222168, |
|
"learning_rate": 8.575197889182058e-07, |
|
"loss": 1.0792, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.986666666666667, |
|
"grad_norm": 7.399364471435547, |
|
"learning_rate": 1.9788918205804752e-07, |
|
"loss": 1.0439, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.997333333333333, |
|
"eval_accuracy": 0.714, |
|
"eval_loss": 0.9282281398773193, |
|
"eval_runtime": 56.4403, |
|
"eval_samples_per_second": 70.871, |
|
"eval_steps_per_second": 2.215, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 2.997333333333333, |
|
"step": 843, |
|
"total_flos": 2.6829109150955274e+18, |
|
"train_loss": 1.3250810132756352, |
|
"train_runtime": 2481.2668, |
|
"train_samples_per_second": 43.521, |
|
"train_steps_per_second": 0.34 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 843, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.6829109150955274e+18, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|