|
{ |
|
"best_metric": 0.8969686033922771, |
|
"best_model_checkpoint": "violation-classification-bantai-vit-v100ep/checkpoint-808", |
|
"epoch": 8.99753086419753, |
|
"global_step": 909, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.950495049504951e-07, |
|
"loss": 1.3728, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.900990099009902e-07, |
|
"loss": 1.37, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.4851485148514852e-06, |
|
"loss": 1.3698, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9801980198019803e-06, |
|
"loss": 1.3582, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.4752475247524753e-06, |
|
"loss": 1.348, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.9702970297029703e-06, |
|
"loss": 1.3355, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.4653465346534657e-06, |
|
"loss": 1.3186, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.960396039603961e-06, |
|
"loss": 1.3057, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.455445544554456e-06, |
|
"loss": 1.2756, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.950495049504951e-06, |
|
"loss": 1.2596, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.5615301335258029, |
|
"eval_loss": 1.222981333732605, |
|
"eval_runtime": 59.3749, |
|
"eval_samples_per_second": 93.339, |
|
"eval_steps_per_second": 2.931, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 5.445544554455446e-06, |
|
"loss": 1.2563, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 5.940594059405941e-06, |
|
"loss": 1.1843, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 6.4356435643564364e-06, |
|
"loss": 1.1572, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 6.9306930693069314e-06, |
|
"loss": 1.1153, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 7.4257425742574256e-06, |
|
"loss": 1.0653, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 7.920792079207921e-06, |
|
"loss": 1.0119, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.415841584158417e-06, |
|
"loss": 0.9734, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 8.910891089108911e-06, |
|
"loss": 0.9144, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 9.405940594059407e-06, |
|
"loss": 0.9037, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 9.900990099009901e-06, |
|
"loss": 0.8527, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.6840490797546013, |
|
"eval_loss": 0.8233989477157593, |
|
"eval_runtime": 59.669, |
|
"eval_samples_per_second": 92.879, |
|
"eval_steps_per_second": 2.916, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.0396039603960395e-05, |
|
"loss": 0.8402, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.0891089108910891e-05, |
|
"loss": 0.7847, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.1386138613861387e-05, |
|
"loss": 0.7738, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.1881188118811881e-05, |
|
"loss": 0.7346, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.2376237623762377e-05, |
|
"loss": 0.6992, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.2871287128712873e-05, |
|
"loss": 0.695, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 1.3366336633663367e-05, |
|
"loss": 0.6715, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 1.3861386138613863e-05, |
|
"loss": 0.6683, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.4356435643564355e-05, |
|
"loss": 0.6617, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 1.4851485148514851e-05, |
|
"loss": 0.6375, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.784554312522555, |
|
"eval_loss": 0.6000781655311584, |
|
"eval_runtime": 59.2996, |
|
"eval_samples_per_second": 93.458, |
|
"eval_steps_per_second": 2.934, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 1.534653465346535e-05, |
|
"loss": 0.6349, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 1.5841584158415843e-05, |
|
"loss": 0.5924, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 1.6336633663366337e-05, |
|
"loss": 0.5905, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 1.6831683168316834e-05, |
|
"loss": 0.5591, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 1.7326732673267325e-05, |
|
"loss": 0.582, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 1.7821782178217823e-05, |
|
"loss": 0.553, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 1.8316831683168317e-05, |
|
"loss": 0.5489, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 1.8811881188118814e-05, |
|
"loss": 0.5711, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 1.930693069306931e-05, |
|
"loss": 0.5161, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 1.9801980198019803e-05, |
|
"loss": 0.555, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.817755322988091, |
|
"eval_loss": 0.5038326978683472, |
|
"eval_runtime": 59.2865, |
|
"eval_samples_per_second": 93.478, |
|
"eval_steps_per_second": 2.935, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 2.02970297029703e-05, |
|
"loss": 0.5553, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 2.079207920792079e-05, |
|
"loss": 0.5075, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 2.128712871287129e-05, |
|
"loss": 0.4948, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 2.1782178217821783e-05, |
|
"loss": 0.4892, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 2.227722772277228e-05, |
|
"loss": 0.4861, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 2.2772277227722774e-05, |
|
"loss": 0.4922, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 2.326732673267327e-05, |
|
"loss": 0.498, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 2.3762376237623762e-05, |
|
"loss": 0.4722, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 2.4257425742574257e-05, |
|
"loss": 0.4619, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 2.4752475247524754e-05, |
|
"loss": 0.4433, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_accuracy": 0.843558282208589, |
|
"eval_loss": 0.4338299632072449, |
|
"eval_runtime": 59.1883, |
|
"eval_samples_per_second": 93.633, |
|
"eval_steps_per_second": 2.94, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 2.5247524752475248e-05, |
|
"loss": 0.4457, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"learning_rate": 2.5742574257425746e-05, |
|
"loss": 0.4402, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 2.623762376237624e-05, |
|
"loss": 0.4118, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 2.6732673267326734e-05, |
|
"loss": 0.4431, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 2.722772277227723e-05, |
|
"loss": 0.4308, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"learning_rate": 2.7722772277227726e-05, |
|
"loss": 0.4262, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 2.8217821782178216e-05, |
|
"loss": 0.4131, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 2.871287128712871e-05, |
|
"loss": 0.3946, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"learning_rate": 2.9207920792079208e-05, |
|
"loss": 0.4015, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 2.9702970297029702e-05, |
|
"loss": 0.406, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_accuracy": 0.8661133164922411, |
|
"eval_loss": 0.3765123784542084, |
|
"eval_runtime": 59.3663, |
|
"eval_samples_per_second": 93.353, |
|
"eval_steps_per_second": 2.931, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"learning_rate": 3.01980198019802e-05, |
|
"loss": 0.4215, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"learning_rate": 3.06930693069307e-05, |
|
"loss": 0.3587, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"learning_rate": 3.118811881188119e-05, |
|
"loss": 0.3565, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 3.1683168316831686e-05, |
|
"loss": 0.3848, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"learning_rate": 3.217821782178218e-05, |
|
"loss": 0.3508, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"learning_rate": 3.2673267326732674e-05, |
|
"loss": 0.3723, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"learning_rate": 3.3168316831683175e-05, |
|
"loss": 0.3643, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 3.366336633663367e-05, |
|
"loss": 0.4062, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"learning_rate": 3.415841584158416e-05, |
|
"loss": 0.3767, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 3.465346534653465e-05, |
|
"loss": 0.3517, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_accuracy": 0.8792854565138939, |
|
"eval_loss": 0.34659793972969055, |
|
"eval_runtime": 59.2647, |
|
"eval_samples_per_second": 93.513, |
|
"eval_steps_per_second": 2.936, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 3.514851485148515e-05, |
|
"loss": 0.3314, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"learning_rate": 3.5643564356435645e-05, |
|
"loss": 0.3319, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"learning_rate": 3.613861386138614e-05, |
|
"loss": 0.32, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"learning_rate": 3.6633663366336634e-05, |
|
"loss": 0.3448, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"learning_rate": 3.712871287128713e-05, |
|
"loss": 0.3242, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"learning_rate": 3.762376237623763e-05, |
|
"loss": 0.3241, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 7.62, |
|
"learning_rate": 3.811881188118812e-05, |
|
"loss": 0.3379, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"learning_rate": 3.861386138613862e-05, |
|
"loss": 0.3392, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 7.82, |
|
"learning_rate": 3.910891089108911e-05, |
|
"loss": 0.2918, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"learning_rate": 3.9603960396039605e-05, |
|
"loss": 0.312, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_accuracy": 0.8969686033922771, |
|
"eval_loss": 0.3010924756526947, |
|
"eval_runtime": 59.78, |
|
"eval_samples_per_second": 92.707, |
|
"eval_steps_per_second": 2.911, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 8.02, |
|
"learning_rate": 4.0099009900990106e-05, |
|
"loss": 0.2918, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"learning_rate": 4.05940594059406e-05, |
|
"loss": 0.308, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 8.22, |
|
"learning_rate": 4.108910891089109e-05, |
|
"loss": 0.2897, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"learning_rate": 4.158415841584158e-05, |
|
"loss": 0.2688, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 8.41, |
|
"learning_rate": 4.207920792079208e-05, |
|
"loss": 0.2767, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"learning_rate": 4.257425742574258e-05, |
|
"loss": 0.2737, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 8.61, |
|
"learning_rate": 4.306930693069307e-05, |
|
"loss": 0.2805, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 8.71, |
|
"learning_rate": 4.3564356435643565e-05, |
|
"loss": 0.2799, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 8.81, |
|
"learning_rate": 4.405940594059406e-05, |
|
"loss": 0.2634, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 8.91, |
|
"learning_rate": 4.455445544554456e-05, |
|
"loss": 0.2842, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_accuracy": 0.896066402020931, |
|
"eval_loss": 0.2943052351474762, |
|
"eval_runtime": 59.263, |
|
"eval_samples_per_second": 93.515, |
|
"eval_steps_per_second": 2.936, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"step": 909, |
|
"total_flos": 9.017904477803397e+18, |
|
"train_loss": 0.6144383929350196, |
|
"train_runtime": 3112.0693, |
|
"train_samples_per_second": 415.479, |
|
"train_steps_per_second": 3.245 |
|
} |
|
], |
|
"max_steps": 10100, |
|
"num_train_epochs": 100, |
|
"total_flos": 9.017904477803397e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|