|
{ |
|
"best_metric": 1.6441926956176758, |
|
"best_model_checkpoint": "detr-r101-cd45rb-8ah-6l-256d-4096ffn\\checkpoint-46060", |
|
"epoch": 10.0, |
|
"global_step": 46060, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.002171081198437e-06, |
|
"loss": 2.704, |
|
"step": 4606 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.9783554077148438, |
|
"eval_runtime": 221.5023, |
|
"eval_samples_per_second": 8.041, |
|
"eval_steps_per_second": 1.007, |
|
"step": 4606 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 8.002171081198438e-06, |
|
"loss": 2.4631, |
|
"step": 9212 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 1.8782960176467896, |
|
"eval_runtime": 220.8326, |
|
"eval_samples_per_second": 8.065, |
|
"eval_steps_per_second": 1.01, |
|
"step": 9212 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 7.002605297438124e-06, |
|
"loss": 2.3881, |
|
"step": 13818 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 1.827803134918213, |
|
"eval_runtime": 221.0372, |
|
"eval_samples_per_second": 8.057, |
|
"eval_steps_per_second": 1.009, |
|
"step": 13818 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 6.002822405557969e-06, |
|
"loss": 2.3314, |
|
"step": 18424 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 1.8200912475585938, |
|
"eval_runtime": 221.0219, |
|
"eval_samples_per_second": 8.058, |
|
"eval_steps_per_second": 1.009, |
|
"step": 18424 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 5.003039513677812e-06, |
|
"loss": 2.2787, |
|
"step": 23030 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_loss": 1.7909141778945923, |
|
"eval_runtime": 220.7952, |
|
"eval_samples_per_second": 8.066, |
|
"eval_steps_per_second": 1.01, |
|
"step": 23030 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 4.003690838037343e-06, |
|
"loss": 2.2413, |
|
"step": 27636 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_loss": 1.7557549476623535, |
|
"eval_runtime": 222.217, |
|
"eval_samples_per_second": 8.015, |
|
"eval_steps_per_second": 1.004, |
|
"step": 27636 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 3.0039079461571864e-06, |
|
"loss": 2.2084, |
|
"step": 32242 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_loss": 1.7492649555206299, |
|
"eval_runtime": 221.2301, |
|
"eval_samples_per_second": 8.05, |
|
"eval_steps_per_second": 1.008, |
|
"step": 32242 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"learning_rate": 2.0045592705167173e-06, |
|
"loss": 2.1547, |
|
"step": 36848 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_loss": 1.6853615045547485, |
|
"eval_runtime": 220.6551, |
|
"eval_samples_per_second": 8.071, |
|
"eval_steps_per_second": 1.011, |
|
"step": 36848 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"learning_rate": 1.0047763786365611e-06, |
|
"loss": 2.1162, |
|
"step": 41454 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_loss": 1.655694603919983, |
|
"eval_runtime": 220.4726, |
|
"eval_samples_per_second": 8.078, |
|
"eval_steps_per_second": 1.011, |
|
"step": 41454 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 4.99348675640469e-09, |
|
"loss": 2.0913, |
|
"step": 46060 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_loss": 1.6441926956176758, |
|
"eval_runtime": 220.3436, |
|
"eval_samples_per_second": 8.083, |
|
"eval_steps_per_second": 1.012, |
|
"step": 46060 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"step": 46060, |
|
"total_flos": 1.5497980548560637e+20, |
|
"train_loss": 2.297725559223024, |
|
"train_runtime": 34588.596, |
|
"train_samples_per_second": 5.326, |
|
"train_steps_per_second": 1.332 |
|
} |
|
], |
|
"max_steps": 46060, |
|
"num_train_epochs": 10, |
|
"total_flos": 1.5497980548560637e+20, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|