|
{ |
|
"best_metric": 1.0, |
|
"best_model_checkpoint": "videomae-base-finetuned-basketball-subset-v2/checkpoint-404", |
|
"epoch": 4.009803921568627, |
|
"global_step": 816, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.0975609756097564e-06, |
|
"loss": 0.6571, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.2195121951219513e-05, |
|
"loss": 0.6573, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.8292682926829268e-05, |
|
"loss": 0.7806, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.4390243902439026e-05, |
|
"loss": 0.8591, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.048780487804878e-05, |
|
"loss": 0.6489, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 3.6585365853658535e-05, |
|
"loss": 0.7533, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.26829268292683e-05, |
|
"loss": 0.576, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.878048780487805e-05, |
|
"loss": 1.0713, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.945504087193461e-05, |
|
"loss": 0.4372, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.877384196185286e-05, |
|
"loss": 0.5813, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.809264305177112e-05, |
|
"loss": 1.1109, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.741144414168938e-05, |
|
"loss": 1.4496, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.673024523160763e-05, |
|
"loss": 0.6032, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.604904632152589e-05, |
|
"loss": 2.0923, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.5367847411444145e-05, |
|
"loss": 1.4158, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.46866485013624e-05, |
|
"loss": 1.5632, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.400544959128066e-05, |
|
"loss": 1.1707, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.332425068119891e-05, |
|
"loss": 0.4736, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.264305177111717e-05, |
|
"loss": 2.0593, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.196185286103542e-05, |
|
"loss": 1.7747, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"eval_accuracy": 0.5, |
|
"eval_loss": 2.5059080123901367, |
|
"eval_runtime": 0.4908, |
|
"eval_samples_per_second": 4.075, |
|
"eval_steps_per_second": 4.075, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.128065395095368e-05, |
|
"loss": 0.706, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0599455040871935e-05, |
|
"loss": 1.2504, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.9918256130790195e-05, |
|
"loss": 1.8594, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.923705722070845e-05, |
|
"loss": 0.8968, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.85558583106267e-05, |
|
"loss": 0.739, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.787465940054496e-05, |
|
"loss": 1.2789, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.719346049046322e-05, |
|
"loss": 0.8226, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.651226158038147e-05, |
|
"loss": 0.9322, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.583106267029973e-05, |
|
"loss": 0.4803, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.5149863760217985e-05, |
|
"loss": 0.4243, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.446866485013624e-05, |
|
"loss": 0.0077, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.37874659400545e-05, |
|
"loss": 0.5999, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.310626702997276e-05, |
|
"loss": 1.2867, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.242506811989101e-05, |
|
"loss": 2.3124, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.174386920980927e-05, |
|
"loss": 0.9884, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.106267029972752e-05, |
|
"loss": 0.2988, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.038147138964578e-05, |
|
"loss": 0.9146, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.9700272479564035e-05, |
|
"loss": 1.9739, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.901907356948229e-05, |
|
"loss": 0.7482, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.8337874659400547e-05, |
|
"loss": 0.0199, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"eval_accuracy": 1.0, |
|
"eval_loss": 0.008221009746193886, |
|
"eval_runtime": 0.4787, |
|
"eval_samples_per_second": 4.178, |
|
"eval_steps_per_second": 4.178, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 2.76566757493188e-05, |
|
"loss": 1.4992, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 2.6975476839237056e-05, |
|
"loss": 0.085, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 2.6294277929155313e-05, |
|
"loss": 1.2553, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 2.5613079019073572e-05, |
|
"loss": 0.5826, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 2.4931880108991825e-05, |
|
"loss": 1.4869, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 2.425068119891008e-05, |
|
"loss": 0.5711, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 2.356948228882834e-05, |
|
"loss": 0.0089, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 2.2888283378746594e-05, |
|
"loss": 0.5027, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 2.220708446866485e-05, |
|
"loss": 0.4668, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 2.1525885558583106e-05, |
|
"loss": 0.4391, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 2.0844686648501363e-05, |
|
"loss": 0.8851, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 2.016348773841962e-05, |
|
"loss": 0.3284, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.9482288828337875e-05, |
|
"loss": 0.5255, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.880108991825613e-05, |
|
"loss": 1.0862, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.8119891008174387e-05, |
|
"loss": 1.5676, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.7438692098092644e-05, |
|
"loss": 1.2709, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.67574931880109e-05, |
|
"loss": 0.4326, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.6076294277929156e-05, |
|
"loss": 0.0092, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.5395095367847412e-05, |
|
"loss": 0.7223, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.4713896457765669e-05, |
|
"loss": 0.4094, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"eval_accuracy": 1.0, |
|
"eval_loss": 0.03134175017476082, |
|
"eval_runtime": 0.4687, |
|
"eval_samples_per_second": 4.267, |
|
"eval_steps_per_second": 4.267, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 1.4032697547683923e-05, |
|
"loss": 0.2763, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 1.3351498637602181e-05, |
|
"loss": 0.8591, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 1.2670299727520437e-05, |
|
"loss": 0.0039, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 1.1989100817438692e-05, |
|
"loss": 0.0026, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 1.130790190735695e-05, |
|
"loss": 0.0036, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 1.0626702997275204e-05, |
|
"loss": 0.0025, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 9.94550408719346e-06, |
|
"loss": 1.1266, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 9.264305177111717e-06, |
|
"loss": 0.497, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 8.583106267029973e-06, |
|
"loss": 0.0028, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 7.90190735694823e-06, |
|
"loss": 0.901, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 7.220708446866485e-06, |
|
"loss": 0.2089, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 6.539509536784741e-06, |
|
"loss": 1.0508, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 5.858310626702997e-06, |
|
"loss": 0.004, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 5.1771117166212534e-06, |
|
"loss": 0.4922, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 4.49591280653951e-06, |
|
"loss": 0.3897, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 3.814713896457766e-06, |
|
"loss": 0.003, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 3.1335149863760217e-06, |
|
"loss": 0.0027, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 2.452316076294278e-06, |
|
"loss": 0.0159, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 1.7711171662125342e-06, |
|
"loss": 0.429, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 1.0899182561307902e-06, |
|
"loss": 0.2331, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"eval_accuracy": 0.5, |
|
"eval_loss": 2.790487051010132, |
|
"eval_runtime": 0.4807, |
|
"eval_samples_per_second": 4.161, |
|
"eval_steps_per_second": 4.161, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 4.087193460490463e-07, |
|
"loss": 0.0021, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"eval_accuracy": 0.5, |
|
"eval_loss": 2.7916269302368164, |
|
"eval_runtime": 0.9748, |
|
"eval_samples_per_second": 2.052, |
|
"eval_steps_per_second": 2.052, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"step": 816, |
|
"total_flos": 1.0167887050089431e+18, |
|
"train_loss": 0.734293045413306, |
|
"train_runtime": 501.5751, |
|
"train_samples_per_second": 1.627, |
|
"train_steps_per_second": 1.627 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"eval_accuracy": 1.0, |
|
"eval_loss": 0.008221009746193886, |
|
"eval_runtime": 0.5089, |
|
"eval_samples_per_second": 3.93, |
|
"eval_steps_per_second": 3.93, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"eval_accuracy": 1.0, |
|
"eval_loss": 0.008221009746193886, |
|
"eval_runtime": 0.5785, |
|
"eval_samples_per_second": 3.457, |
|
"eval_steps_per_second": 3.457, |
|
"step": 816 |
|
} |
|
], |
|
"max_steps": 816, |
|
"num_train_epochs": 9223372036854775807, |
|
"total_flos": 1.0167887050089431e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|