|
{ |
|
"best_metric": 0.6868686868686869, |
|
"best_model_checkpoint": "beit-base-patch16-224-pt22k-ft22k-finetuned-FER2013-9e-05/checkpoint-606", |
|
"epoch": 3.0, |
|
"global_step": 606, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.4754098360655737e-05, |
|
"loss": 1.9486, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 2.9508196721311474e-05, |
|
"loss": 1.8026, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.426229508196721e-05, |
|
"loss": 1.6145, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 5.901639344262295e-05, |
|
"loss": 1.4351, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 7.377049180327869e-05, |
|
"loss": 1.405, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 8.852459016393443e-05, |
|
"loss": 1.4148, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.851376146788992e-05, |
|
"loss": 1.3827, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.686238532110092e-05, |
|
"loss": 1.3682, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 8.521100917431194e-05, |
|
"loss": 1.2927, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8.355963302752294e-05, |
|
"loss": 1.3435, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 8.190825688073396e-05, |
|
"loss": 1.2354, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 8.025688073394496e-05, |
|
"loss": 1.2516, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 7.860550458715598e-05, |
|
"loss": 1.3105, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 7.695412844036698e-05, |
|
"loss": 1.2845, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 7.5302752293578e-05, |
|
"loss": 1.2567, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 7.3651376146789e-05, |
|
"loss": 1.2076, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 7.2e-05, |
|
"loss": 1.2077, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 7.034862385321102e-05, |
|
"loss": 1.2667, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 6.869724770642202e-05, |
|
"loss": 1.2037, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.704587155963304e-05, |
|
"loss": 1.1659, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.6245210727969349, |
|
"eval_loss": 0.977580726146698, |
|
"eval_runtime": 24.6556, |
|
"eval_samples_per_second": 116.444, |
|
"eval_steps_per_second": 3.65, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 6.539449541284404e-05, |
|
"loss": 1.1509, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 6.374311926605506e-05, |
|
"loss": 1.1316, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 6.209174311926606e-05, |
|
"loss": 1.1899, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 6.044036697247707e-05, |
|
"loss": 1.1514, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 5.878899082568808e-05, |
|
"loss": 1.1313, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 5.713761467889909e-05, |
|
"loss": 1.1409, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 5.54862385321101e-05, |
|
"loss": 1.1199, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 5.3834862385321106e-05, |
|
"loss": 1.0977, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 5.2183486238532116e-05, |
|
"loss": 1.0992, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 5.0532110091743125e-05, |
|
"loss": 1.188, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.8880733944954135e-05, |
|
"loss": 1.1191, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.7229357798165144e-05, |
|
"loss": 1.0916, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.5577981651376154e-05, |
|
"loss": 1.0855, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 4.3926605504587163e-05, |
|
"loss": 1.078, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 4.227522935779817e-05, |
|
"loss": 1.055, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 4.062385321100918e-05, |
|
"loss": 1.0762, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.8972477064220185e-05, |
|
"loss": 1.078, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 3.7321100917431195e-05, |
|
"loss": 1.0907, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 3.5669724770642204e-05, |
|
"loss": 1.0911, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 3.4018348623853214e-05, |
|
"loss": 1.0531, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.6732845698362939, |
|
"eval_loss": 0.891925036907196, |
|
"eval_runtime": 24.8114, |
|
"eval_samples_per_second": 115.713, |
|
"eval_steps_per_second": 3.627, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 3.236697247706422e-05, |
|
"loss": 1.0555, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 3.071559633027523e-05, |
|
"loss": 1.0806, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 2.9064220183486242e-05, |
|
"loss": 1.0574, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 2.7412844036697252e-05, |
|
"loss": 0.9992, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 2.576146788990826e-05, |
|
"loss": 1.0317, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.4110091743119267e-05, |
|
"loss": 1.0397, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.2458715596330277e-05, |
|
"loss": 1.0628, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.0807339449541286e-05, |
|
"loss": 1.0128, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.9155963302752296e-05, |
|
"loss": 1.0218, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.7504587155963305e-05, |
|
"loss": 1.0342, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.5853211009174315e-05, |
|
"loss": 1.0246, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.4201834862385323e-05, |
|
"loss": 0.9893, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.2550458715596332e-05, |
|
"loss": 1.0331, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 1.089908256880734e-05, |
|
"loss": 1.0342, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 9.24770642201835e-06, |
|
"loss": 0.9767, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 7.596330275229359e-06, |
|
"loss": 0.983, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 5.944954128440368e-06, |
|
"loss": 0.991, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 4.293577981651376e-06, |
|
"loss": 0.9894, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 2.642201834862386e-06, |
|
"loss": 0.9968, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 9.908256880733947e-07, |
|
"loss": 1.001, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.6868686868686869, |
|
"eval_loss": 0.841149091720581, |
|
"eval_runtime": 24.894, |
|
"eval_samples_per_second": 115.329, |
|
"eval_steps_per_second": 3.615, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 606, |
|
"total_flos": 6.004415924974301e+18, |
|
"train_loss": 1.1650643325088048, |
|
"train_runtime": 1992.3663, |
|
"train_samples_per_second": 38.905, |
|
"train_steps_per_second": 0.304 |
|
} |
|
], |
|
"max_steps": 606, |
|
"num_train_epochs": 3, |
|
"total_flos": 6.004415924974301e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|