|
{ |
|
"best_metric": 0.9740740740740741, |
|
"best_model_checkpoint": "swin-tiny-patch4-window7-224-finetuned-eurosat/checkpoint-570", |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 570, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 9.440265655517578, |
|
"learning_rate": 8.771929824561403e-06, |
|
"loss": 2.305, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 6.114067077636719, |
|
"learning_rate": 1.7543859649122806e-05, |
|
"loss": 2.0991, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 8.871339797973633, |
|
"learning_rate": 2.6315789473684212e-05, |
|
"loss": 1.6922, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 12.588781356811523, |
|
"learning_rate": 3.508771929824561e-05, |
|
"loss": 1.0473, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 32.035255432128906, |
|
"learning_rate": 4.3859649122807014e-05, |
|
"loss": 0.6879, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 18.531063079833984, |
|
"learning_rate": 4.970760233918128e-05, |
|
"loss": 0.5213, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 14.835468292236328, |
|
"learning_rate": 4.8732943469785574e-05, |
|
"loss": 0.4985, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 11.247536659240723, |
|
"learning_rate": 4.7758284600389865e-05, |
|
"loss": 0.3847, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 13.409842491149902, |
|
"learning_rate": 4.678362573099415e-05, |
|
"loss": 0.3478, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 12.523353576660156, |
|
"learning_rate": 4.580896686159844e-05, |
|
"loss": 0.3296, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 16.897123336791992, |
|
"learning_rate": 4.483430799220273e-05, |
|
"loss": 0.3466, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 14.646390914916992, |
|
"learning_rate": 4.3859649122807014e-05, |
|
"loss": 0.3356, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 14.801265716552734, |
|
"learning_rate": 4.2884990253411305e-05, |
|
"loss": 0.2372, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 10.752660751342773, |
|
"learning_rate": 4.1910331384015596e-05, |
|
"loss": 0.3163, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 8.766670227050781, |
|
"learning_rate": 4.093567251461988e-05, |
|
"loss": 0.2725, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 19.720884323120117, |
|
"learning_rate": 3.996101364522417e-05, |
|
"loss": 0.2708, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 15.061501502990723, |
|
"learning_rate": 3.898635477582846e-05, |
|
"loss": 0.2514, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 11.353556632995605, |
|
"learning_rate": 3.8011695906432746e-05, |
|
"loss": 0.2595, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 12.882011413574219, |
|
"learning_rate": 3.7037037037037037e-05, |
|
"loss": 0.2723, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.9633333333333334, |
|
"eval_loss": 0.11899825185537338, |
|
"eval_runtime": 31.0072, |
|
"eval_samples_per_second": 87.076, |
|
"eval_steps_per_second": 2.741, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 10.369582176208496, |
|
"learning_rate": 3.606237816764133e-05, |
|
"loss": 0.2288, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 11.584061622619629, |
|
"learning_rate": 3.508771929824561e-05, |
|
"loss": 0.252, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 9.439119338989258, |
|
"learning_rate": 3.41130604288499e-05, |
|
"loss": 0.2043, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 10.231059074401855, |
|
"learning_rate": 3.313840155945419e-05, |
|
"loss": 0.2835, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 10.476188659667969, |
|
"learning_rate": 3.216374269005848e-05, |
|
"loss": 0.2311, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 10.889532089233398, |
|
"learning_rate": 3.118908382066277e-05, |
|
"loss": 0.236, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 8.168774604797363, |
|
"learning_rate": 3.0214424951267055e-05, |
|
"loss": 0.2129, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 11.831040382385254, |
|
"learning_rate": 2.9239766081871346e-05, |
|
"loss": 0.2351, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 9.056485176086426, |
|
"learning_rate": 2.8265107212475634e-05, |
|
"loss": 0.2328, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 6.355861663818359, |
|
"learning_rate": 2.729044834307992e-05, |
|
"loss": 0.2351, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 7.177761077880859, |
|
"learning_rate": 2.6315789473684212e-05, |
|
"loss": 0.2055, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 9.945026397705078, |
|
"learning_rate": 2.53411306042885e-05, |
|
"loss": 0.1531, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 6.8051557540893555, |
|
"learning_rate": 2.4366471734892787e-05, |
|
"loss": 0.1811, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 10.681591033935547, |
|
"learning_rate": 2.3391812865497074e-05, |
|
"loss": 0.1561, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 12.849047660827637, |
|
"learning_rate": 2.2417153996101365e-05, |
|
"loss": 0.1875, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 7.480422496795654, |
|
"learning_rate": 2.1442495126705653e-05, |
|
"loss": 0.1912, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 10.37962818145752, |
|
"learning_rate": 2.046783625730994e-05, |
|
"loss": 0.179, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 5.792560577392578, |
|
"learning_rate": 1.949317738791423e-05, |
|
"loss": 0.1684, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 13.828116416931152, |
|
"learning_rate": 1.8518518518518518e-05, |
|
"loss": 0.1886, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.9718518518518519, |
|
"eval_loss": 0.07636851072311401, |
|
"eval_runtime": 31.1849, |
|
"eval_samples_per_second": 86.58, |
|
"eval_steps_per_second": 2.726, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 9.011467933654785, |
|
"learning_rate": 1.7543859649122806e-05, |
|
"loss": 0.2016, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 8.530644416809082, |
|
"learning_rate": 1.6569200779727097e-05, |
|
"loss": 0.1711, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 10.602960586547852, |
|
"learning_rate": 1.5594541910331384e-05, |
|
"loss": 0.1852, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 6.965076923370361, |
|
"learning_rate": 1.4619883040935673e-05, |
|
"loss": 0.1609, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 12.922185897827148, |
|
"learning_rate": 1.364522417153996e-05, |
|
"loss": 0.153, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 11.024358749389648, |
|
"learning_rate": 1.267056530214425e-05, |
|
"loss": 0.1704, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 9.979962348937988, |
|
"learning_rate": 1.1695906432748537e-05, |
|
"loss": 0.1789, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 12.759318351745605, |
|
"learning_rate": 1.0721247563352826e-05, |
|
"loss": 0.1593, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 10.141521453857422, |
|
"learning_rate": 9.746588693957115e-06, |
|
"loss": 0.1692, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 8.66246223449707, |
|
"learning_rate": 8.771929824561403e-06, |
|
"loss": 0.1508, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 16.992340087890625, |
|
"learning_rate": 7.797270955165692e-06, |
|
"loss": 0.1788, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 5.300870418548584, |
|
"learning_rate": 6.82261208576998e-06, |
|
"loss": 0.1596, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 6.27362060546875, |
|
"learning_rate": 5.8479532163742686e-06, |
|
"loss": 0.1566, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 7.903752326965332, |
|
"learning_rate": 4.873294346978558e-06, |
|
"loss": 0.1487, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 11.882786750793457, |
|
"learning_rate": 3.898635477582846e-06, |
|
"loss": 0.1951, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 9.480567932128906, |
|
"learning_rate": 2.9239766081871343e-06, |
|
"loss": 0.152, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 8.694555282592773, |
|
"learning_rate": 1.949317738791423e-06, |
|
"loss": 0.1469, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 4.170577526092529, |
|
"learning_rate": 9.746588693957115e-07, |
|
"loss": 0.1475, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 10.84870433807373, |
|
"learning_rate": 0.0, |
|
"loss": 0.1336, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.9740740740740741, |
|
"eval_loss": 0.06846870481967926, |
|
"eval_runtime": 31.5637, |
|
"eval_samples_per_second": 85.541, |
|
"eval_steps_per_second": 2.693, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 570, |
|
"total_flos": 1.8124066505760768e+18, |
|
"train_loss": 0.3430963271542599, |
|
"train_runtime": 1158.7002, |
|
"train_samples_per_second": 62.915, |
|
"train_steps_per_second": 0.492 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 570, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 1.8124066505760768e+18, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|