|
{ |
|
"best_metric": 0.98, |
|
"best_model_checkpoint": "swin-tiny-patch4-window7-224-finetuned-eurosat/checkpoint-570", |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 570, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 5.6030073165893555, |
|
"learning_rate": 8.771929824561403e-06, |
|
"loss": 2.3227, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.942467212677002, |
|
"learning_rate": 1.7543859649122806e-05, |
|
"loss": 2.114, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 9.006362915039062, |
|
"learning_rate": 2.6315789473684212e-05, |
|
"loss": 1.7361, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 14.675954818725586, |
|
"learning_rate": 3.508771929824561e-05, |
|
"loss": 1.1337, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 13.018393516540527, |
|
"learning_rate": 4.3859649122807014e-05, |
|
"loss": 0.6849, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 29.24870491027832, |
|
"learning_rate": 4.970760233918128e-05, |
|
"loss": 0.4908, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 17.498170852661133, |
|
"learning_rate": 4.8732943469785574e-05, |
|
"loss": 0.4472, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 30.422813415527344, |
|
"learning_rate": 4.7758284600389865e-05, |
|
"loss": 0.4151, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 15.24944019317627, |
|
"learning_rate": 4.678362573099415e-05, |
|
"loss": 0.3611, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 18.759361267089844, |
|
"learning_rate": 4.580896686159844e-05, |
|
"loss": 0.3719, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 14.161396026611328, |
|
"learning_rate": 4.483430799220273e-05, |
|
"loss": 0.3395, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 9.216828346252441, |
|
"learning_rate": 4.3859649122807014e-05, |
|
"loss": 0.3214, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 10.63085651397705, |
|
"learning_rate": 4.2884990253411305e-05, |
|
"loss": 0.327, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 14.545869827270508, |
|
"learning_rate": 4.1910331384015596e-05, |
|
"loss": 0.3075, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 14.948440551757812, |
|
"learning_rate": 4.093567251461988e-05, |
|
"loss": 0.2626, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 16.263952255249023, |
|
"learning_rate": 3.996101364522417e-05, |
|
"loss": 0.2731, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 8.593974113464355, |
|
"learning_rate": 3.898635477582846e-05, |
|
"loss": 0.2553, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 11.096720695495605, |
|
"learning_rate": 3.8011695906432746e-05, |
|
"loss": 0.2696, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 9.849555015563965, |
|
"learning_rate": 3.7037037037037037e-05, |
|
"loss": 0.2994, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.9603703703703703, |
|
"eval_loss": 0.12342998385429382, |
|
"eval_runtime": 17.2042, |
|
"eval_samples_per_second": 156.938, |
|
"eval_steps_per_second": 4.941, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 11.277251243591309, |
|
"learning_rate": 3.606237816764133e-05, |
|
"loss": 0.2387, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 8.723546028137207, |
|
"learning_rate": 3.508771929824561e-05, |
|
"loss": 0.2595, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 15.544955253601074, |
|
"learning_rate": 3.41130604288499e-05, |
|
"loss": 0.224, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 14.086812019348145, |
|
"learning_rate": 3.313840155945419e-05, |
|
"loss": 0.235, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 9.04621410369873, |
|
"learning_rate": 3.216374269005848e-05, |
|
"loss": 0.2281, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 11.80008602142334, |
|
"learning_rate": 3.118908382066277e-05, |
|
"loss": 0.2342, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 3.7134058475494385, |
|
"learning_rate": 3.0214424951267055e-05, |
|
"loss": 0.222, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 6.554859638214111, |
|
"learning_rate": 2.9239766081871346e-05, |
|
"loss": 0.2025, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 9.202827453613281, |
|
"learning_rate": 2.8265107212475634e-05, |
|
"loss": 0.1814, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 8.696596145629883, |
|
"learning_rate": 2.729044834307992e-05, |
|
"loss": 0.203, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 10.952228546142578, |
|
"learning_rate": 2.6315789473684212e-05, |
|
"loss": 0.1701, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 10.55245304107666, |
|
"learning_rate": 2.53411306042885e-05, |
|
"loss": 0.1732, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 12.811739921569824, |
|
"learning_rate": 2.4366471734892787e-05, |
|
"loss": 0.1906, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 9.685540199279785, |
|
"learning_rate": 2.3391812865497074e-05, |
|
"loss": 0.1975, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 8.672029495239258, |
|
"learning_rate": 2.2417153996101365e-05, |
|
"loss": 0.1704, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 15.387055397033691, |
|
"learning_rate": 2.1442495126705653e-05, |
|
"loss": 0.1845, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 9.411192893981934, |
|
"learning_rate": 2.046783625730994e-05, |
|
"loss": 0.1906, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 10.436761856079102, |
|
"learning_rate": 1.949317738791423e-05, |
|
"loss": 0.1715, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 12.051985740661621, |
|
"learning_rate": 1.8518518518518518e-05, |
|
"loss": 0.1853, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.9740740740740741, |
|
"eval_loss": 0.07047658413648605, |
|
"eval_runtime": 16.9051, |
|
"eval_samples_per_second": 159.715, |
|
"eval_steps_per_second": 5.028, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 7.126999855041504, |
|
"learning_rate": 1.7543859649122806e-05, |
|
"loss": 0.1961, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 9.795936584472656, |
|
"learning_rate": 1.6569200779727097e-05, |
|
"loss": 0.1833, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 8.326896667480469, |
|
"learning_rate": 1.5594541910331384e-05, |
|
"loss": 0.1612, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 11.762768745422363, |
|
"learning_rate": 1.4619883040935673e-05, |
|
"loss": 0.1729, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 7.951204299926758, |
|
"learning_rate": 1.364522417153996e-05, |
|
"loss": 0.1818, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 8.159992218017578, |
|
"learning_rate": 1.267056530214425e-05, |
|
"loss": 0.1607, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 11.383317947387695, |
|
"learning_rate": 1.1695906432748537e-05, |
|
"loss": 0.1588, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 4.525303363800049, |
|
"learning_rate": 1.0721247563352826e-05, |
|
"loss": 0.1509, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 5.270012855529785, |
|
"learning_rate": 9.746588693957115e-06, |
|
"loss": 0.157, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 6.473080158233643, |
|
"learning_rate": 8.771929824561403e-06, |
|
"loss": 0.1543, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 15.744648933410645, |
|
"learning_rate": 7.797270955165692e-06, |
|
"loss": 0.1291, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 9.5807523727417, |
|
"learning_rate": 6.82261208576998e-06, |
|
"loss": 0.1292, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 5.534010887145996, |
|
"learning_rate": 5.8479532163742686e-06, |
|
"loss": 0.1512, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 8.032577514648438, |
|
"learning_rate": 4.873294346978558e-06, |
|
"loss": 0.1347, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 24.584503173828125, |
|
"learning_rate": 3.898635477582846e-06, |
|
"loss": 0.1569, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 3.999915599822998, |
|
"learning_rate": 2.9239766081871343e-06, |
|
"loss": 0.1664, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 9.104424476623535, |
|
"learning_rate": 1.949317738791423e-06, |
|
"loss": 0.1938, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 8.999449729919434, |
|
"learning_rate": 9.746588693957115e-07, |
|
"loss": 0.1519, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 11.300539016723633, |
|
"learning_rate": 0.0, |
|
"loss": 0.158, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.98, |
|
"eval_loss": 0.06010868772864342, |
|
"eval_runtime": 16.8705, |
|
"eval_samples_per_second": 160.043, |
|
"eval_steps_per_second": 5.038, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 570, |
|
"total_flos": 1.8124066505760768e+18, |
|
"train_loss": 0.34462205903571946, |
|
"train_runtime": 1109.7931, |
|
"train_samples_per_second": 65.688, |
|
"train_steps_per_second": 0.514 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 570, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 1.8124066505760768e+18, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|