|
{ |
|
"best_metric": 0.951678171863842, |
|
"best_model_checkpoint": "swin-tiny-patch4-window7-224-finetuned-eurosat/checkpoint-266", |
|
"epoch": 0.9971883786316776, |
|
"global_step": 266, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.8518518518518518e-05, |
|
"loss": 2.3306, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 3.7037037037037037e-05, |
|
"loss": 2.259, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.93723849372385e-05, |
|
"loss": 2.0681, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.728033472803347e-05, |
|
"loss": 1.6807, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.5188284518828455e-05, |
|
"loss": 1.2692, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.3096234309623436e-05, |
|
"loss": 0.945, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.100418410041841e-05, |
|
"loss": 0.8267, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.891213389121339e-05, |
|
"loss": 0.6621, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.6820083682008375e-05, |
|
"loss": 0.6335, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.472803347280335e-05, |
|
"loss": 0.5838, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 3.263598326359833e-05, |
|
"loss": 0.5066, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 3.0543933054393306e-05, |
|
"loss": 0.5479, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.8451882845188288e-05, |
|
"loss": 0.5054, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.6359832635983266e-05, |
|
"loss": 0.4832, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.4267782426778244e-05, |
|
"loss": 0.4885, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 2.2175732217573222e-05, |
|
"loss": 0.4741, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 2.00836820083682e-05, |
|
"loss": 0.4313, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.799163179916318e-05, |
|
"loss": 0.4333, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.589958158995816e-05, |
|
"loss": 0.4025, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.3807531380753139e-05, |
|
"loss": 0.393, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.1715481171548117e-05, |
|
"loss": 0.4238, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.623430962343097e-06, |
|
"loss": 0.3701, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.531380753138076e-06, |
|
"loss": 0.3893, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.439330543933055e-06, |
|
"loss": 0.3764, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.3472803347280334e-06, |
|
"loss": 0.393, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.2552301255230125e-06, |
|
"loss": 0.3704, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.951678171863842, |
|
"eval_loss": 0.16736215353012085, |
|
"eval_runtime": 180.4734, |
|
"eval_samples_per_second": 23.278, |
|
"eval_steps_per_second": 0.731, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 266, |
|
"total_flos": 8.464858935365468e+17, |
|
"train_loss": 0.7695897181231276, |
|
"train_runtime": 1381.8449, |
|
"train_samples_per_second": 24.697, |
|
"train_steps_per_second": 0.192 |
|
} |
|
], |
|
"max_steps": 266, |
|
"num_train_epochs": 1, |
|
"total_flos": 8.464858935365468e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|