|
{ |
|
"best_metric": 0.9373634377276038, |
|
"best_model_checkpoint": "./weather-base/checkpoint-342", |
|
"epoch": 2.995633187772926, |
|
"global_step": 513, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.615384615384616e-06, |
|
"loss": 2.5162, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.923076923076923e-05, |
|
"loss": 2.2089, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 2.8846153846153845e-05, |
|
"loss": 1.744, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 3.846153846153846e-05, |
|
"loss": 1.1145, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.8076923076923084e-05, |
|
"loss": 0.7667, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.9132321041214754e-05, |
|
"loss": 0.6373, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.804772234273319e-05, |
|
"loss": 0.4996, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.696312364425163e-05, |
|
"loss": 0.429, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.587852494577007e-05, |
|
"loss": 0.4243, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.47939262472885e-05, |
|
"loss": 0.3769, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.370932754880694e-05, |
|
"loss": 0.4072, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.262472885032538e-05, |
|
"loss": 0.3406, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.154013015184382e-05, |
|
"loss": 0.3224, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.045553145336225e-05, |
|
"loss": 0.4434, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.9370932754880696e-05, |
|
"loss": 0.3797, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.828633405639914e-05, |
|
"loss": 0.316, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.720173535791757e-05, |
|
"loss": 0.3277, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.9002184996358339, |
|
"eval_loss": 0.29298296570777893, |
|
"eval_runtime": 34.0636, |
|
"eval_samples_per_second": 40.307, |
|
"eval_steps_per_second": 5.049, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.611713665943601e-05, |
|
"loss": 0.3201, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.503253796095445e-05, |
|
"loss": 0.3002, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.394793926247289e-05, |
|
"loss": 0.2141, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.286334056399132e-05, |
|
"loss": 0.2718, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 3.1778741865509765e-05, |
|
"loss": 0.2551, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 3.06941431670282e-05, |
|
"loss": 0.2694, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.960954446854664e-05, |
|
"loss": 0.3507, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.8524945770065074e-05, |
|
"loss": 0.2558, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.7440347071583517e-05, |
|
"loss": 0.185, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.635574837310195e-05, |
|
"loss": 0.2202, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.5271149674620394e-05, |
|
"loss": 0.2468, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.418655097613883e-05, |
|
"loss": 0.2374, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.3101952277657267e-05, |
|
"loss": 0.2063, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 2.2017353579175707e-05, |
|
"loss": 0.1711, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 2.0932754880694143e-05, |
|
"loss": 0.2644, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.9848156182212583e-05, |
|
"loss": 0.2194, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.876355748373102e-05, |
|
"loss": 0.2401, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.9373634377276038, |
|
"eval_loss": 0.19143207371234894, |
|
"eval_runtime": 72.116, |
|
"eval_samples_per_second": 19.039, |
|
"eval_steps_per_second": 2.385, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.767895878524946e-05, |
|
"loss": 0.1665, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.6594360086767896e-05, |
|
"loss": 0.1463, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.5509761388286336e-05, |
|
"loss": 0.1939, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.4425162689804772e-05, |
|
"loss": 0.2001, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.334056399132321e-05, |
|
"loss": 0.2046, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.225596529284165e-05, |
|
"loss": 0.172, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.1171366594360088e-05, |
|
"loss": 0.1597, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.0086767895878525e-05, |
|
"loss": 0.1624, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 9.002169197396963e-06, |
|
"loss": 0.1609, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.917570498915401e-06, |
|
"loss": 0.1972, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.832971800433839e-06, |
|
"loss": 0.1007, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.748373101952278e-06, |
|
"loss": 0.0885, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.663774403470715e-06, |
|
"loss": 0.1321, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.5791757049891544e-06, |
|
"loss": 0.1699, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.4945770065075926e-06, |
|
"loss": 0.1647, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.4099783080260303e-06, |
|
"loss": 0.1175, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.2537960954446857e-07, |
|
"loss": 0.1036, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.9359067734887109, |
|
"eval_loss": 0.18145906925201416, |
|
"eval_runtime": 29.9478, |
|
"eval_samples_per_second": 45.846, |
|
"eval_steps_per_second": 5.743, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 513, |
|
"total_flos": 1.2743012151733248e+18, |
|
"train_loss": 0.393069394557332, |
|
"train_runtime": 1296.2409, |
|
"train_samples_per_second": 12.704, |
|
"train_steps_per_second": 0.396 |
|
} |
|
], |
|
"max_steps": 513, |
|
"num_train_epochs": 3, |
|
"total_flos": 1.2743012151733248e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|