|
{ |
|
"best_metric": 0.9814814814814815, |
|
"best_model_checkpoint": "swin-tiny-patch4-window7-224-finetuned-eurosat/checkpoint-570", |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 570, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 5.7690863609313965, |
|
"learning_rate": 8.771929824561403e-06, |
|
"loss": 2.2623, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 7.180535316467285, |
|
"learning_rate": 1.7543859649122806e-05, |
|
"loss": 2.0467, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 12.502141952514648, |
|
"learning_rate": 2.6315789473684212e-05, |
|
"loss": 1.6134, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 12.182085037231445, |
|
"learning_rate": 3.508771929824561e-05, |
|
"loss": 1.0089, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 15.876863479614258, |
|
"learning_rate": 4.3859649122807014e-05, |
|
"loss": 0.6195, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 23.71845054626465, |
|
"learning_rate": 4.970760233918128e-05, |
|
"loss": 0.5156, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 15.944512367248535, |
|
"learning_rate": 4.8732943469785574e-05, |
|
"loss": 0.4736, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 22.624839782714844, |
|
"learning_rate": 4.7758284600389865e-05, |
|
"loss": 0.3892, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 13.724577903747559, |
|
"learning_rate": 4.678362573099415e-05, |
|
"loss": 0.3564, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 9.811408996582031, |
|
"learning_rate": 4.580896686159844e-05, |
|
"loss": 0.3488, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 7.73867654800415, |
|
"learning_rate": 4.483430799220273e-05, |
|
"loss": 0.3028, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 14.532332420349121, |
|
"learning_rate": 4.3859649122807014e-05, |
|
"loss": 0.2982, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 17.203603744506836, |
|
"learning_rate": 4.2884990253411305e-05, |
|
"loss": 0.2659, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 17.189725875854492, |
|
"learning_rate": 4.1910331384015596e-05, |
|
"loss": 0.2745, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 12.759488105773926, |
|
"learning_rate": 4.093567251461988e-05, |
|
"loss": 0.2586, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 15.634223937988281, |
|
"learning_rate": 3.996101364522417e-05, |
|
"loss": 0.2208, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 17.382017135620117, |
|
"learning_rate": 3.898635477582846e-05, |
|
"loss": 0.2191, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 23.635671615600586, |
|
"learning_rate": 3.8011695906432746e-05, |
|
"loss": 0.2517, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 17.320865631103516, |
|
"learning_rate": 3.7037037037037037e-05, |
|
"loss": 0.2747, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.9562962962962963, |
|
"eval_loss": 0.13819058239459991, |
|
"eval_runtime": 17.3473, |
|
"eval_samples_per_second": 155.644, |
|
"eval_steps_per_second": 4.9, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 6.783141613006592, |
|
"learning_rate": 3.606237816764133e-05, |
|
"loss": 0.1881, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 12.496849060058594, |
|
"learning_rate": 3.508771929824561e-05, |
|
"loss": 0.2501, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 11.41723346710205, |
|
"learning_rate": 3.41130604288499e-05, |
|
"loss": 0.2282, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 12.789288520812988, |
|
"learning_rate": 3.313840155945419e-05, |
|
"loss": 0.2056, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 15.091114044189453, |
|
"learning_rate": 3.216374269005848e-05, |
|
"loss": 0.1959, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 13.9872465133667, |
|
"learning_rate": 3.118908382066277e-05, |
|
"loss": 0.192, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 12.632612228393555, |
|
"learning_rate": 3.0214424951267055e-05, |
|
"loss": 0.2159, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 5.643543243408203, |
|
"learning_rate": 2.9239766081871346e-05, |
|
"loss": 0.2369, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 12.24279499053955, |
|
"learning_rate": 2.8265107212475634e-05, |
|
"loss": 0.2166, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 9.774775505065918, |
|
"learning_rate": 2.729044834307992e-05, |
|
"loss": 0.1558, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 11.163515090942383, |
|
"learning_rate": 2.6315789473684212e-05, |
|
"loss": 0.1835, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 15.248787879943848, |
|
"learning_rate": 2.53411306042885e-05, |
|
"loss": 0.2193, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 13.8126220703125, |
|
"learning_rate": 2.4366471734892787e-05, |
|
"loss": 0.1867, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 5.466386795043945, |
|
"learning_rate": 2.3391812865497074e-05, |
|
"loss": 0.1383, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 11.421280860900879, |
|
"learning_rate": 2.2417153996101365e-05, |
|
"loss": 0.1611, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 3.8385016918182373, |
|
"learning_rate": 2.1442495126705653e-05, |
|
"loss": 0.1518, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 15.16893196105957, |
|
"learning_rate": 2.046783625730994e-05, |
|
"loss": 0.198, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 8.695472717285156, |
|
"learning_rate": 1.949317738791423e-05, |
|
"loss": 0.1843, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 10.149725914001465, |
|
"learning_rate": 1.8518518518518518e-05, |
|
"loss": 0.1928, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.9714814814814815, |
|
"eval_loss": 0.09133487939834595, |
|
"eval_runtime": 17.3865, |
|
"eval_samples_per_second": 155.293, |
|
"eval_steps_per_second": 4.889, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 8.00893783569336, |
|
"learning_rate": 1.7543859649122806e-05, |
|
"loss": 0.1681, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 16.080177307128906, |
|
"learning_rate": 1.6569200779727097e-05, |
|
"loss": 0.1489, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 13.361900329589844, |
|
"learning_rate": 1.5594541910331384e-05, |
|
"loss": 0.1362, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 11.800583839416504, |
|
"learning_rate": 1.4619883040935673e-05, |
|
"loss": 0.1489, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 13.107502937316895, |
|
"learning_rate": 1.364522417153996e-05, |
|
"loss": 0.1813, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 8.709113121032715, |
|
"learning_rate": 1.267056530214425e-05, |
|
"loss": 0.1874, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 6.434211730957031, |
|
"learning_rate": 1.1695906432748537e-05, |
|
"loss": 0.1782, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 18.821304321289062, |
|
"learning_rate": 1.0721247563352826e-05, |
|
"loss": 0.1365, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 5.870109558105469, |
|
"learning_rate": 9.746588693957115e-06, |
|
"loss": 0.1518, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 3.519899845123291, |
|
"learning_rate": 8.771929824561403e-06, |
|
"loss": 0.139, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 23.94843292236328, |
|
"learning_rate": 7.797270955165692e-06, |
|
"loss": 0.1486, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 5.265621185302734, |
|
"learning_rate": 6.82261208576998e-06, |
|
"loss": 0.1329, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 4.568530559539795, |
|
"learning_rate": 5.8479532163742686e-06, |
|
"loss": 0.1513, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 8.631195068359375, |
|
"learning_rate": 4.873294346978558e-06, |
|
"loss": 0.1549, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 6.977248191833496, |
|
"learning_rate": 3.898635477582846e-06, |
|
"loss": 0.1465, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 12.340450286865234, |
|
"learning_rate": 2.9239766081871343e-06, |
|
"loss": 0.1662, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 6.968770980834961, |
|
"learning_rate": 1.949317738791423e-06, |
|
"loss": 0.1449, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 6.5286688804626465, |
|
"learning_rate": 9.746588693957115e-07, |
|
"loss": 0.1513, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 9.231646537780762, |
|
"learning_rate": 0.0, |
|
"loss": 0.1317, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.9814814814814815, |
|
"eval_loss": 0.06550158560276031, |
|
"eval_runtime": 17.1307, |
|
"eval_samples_per_second": 157.612, |
|
"eval_steps_per_second": 4.962, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 570, |
|
"total_flos": 1.8124066505760768e+18, |
|
"train_loss": 0.3264210811832495, |
|
"train_runtime": 1089.539, |
|
"train_samples_per_second": 66.909, |
|
"train_steps_per_second": 0.523 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 570, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 1.8124066505760768e+18, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|