|
{ |
|
"best_metric": 0.1813586801290512, |
|
"best_model_checkpoint": "outputs/bart-base-detox/checkpoint-2430", |
|
"epoch": 17.995391705069125, |
|
"global_step": 2430, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9e-06, |
|
"loss": 0.5633, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.25235414505004883, |
|
"eval_runtime": 12.2006, |
|
"eval_samples_per_second": 254.085, |
|
"eval_steps_per_second": 31.802, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.2589, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 0.21927106380462646, |
|
"eval_runtime": 11.2792, |
|
"eval_samples_per_second": 274.842, |
|
"eval_steps_per_second": 34.4, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 7e-06, |
|
"loss": 0.2307, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 0.19929151237010956, |
|
"eval_runtime": 11.2313, |
|
"eval_samples_per_second": 276.016, |
|
"eval_steps_per_second": 34.546, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 6e-06, |
|
"loss": 0.2171, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 0.20016591250896454, |
|
"eval_runtime": 11.0923, |
|
"eval_samples_per_second": 279.473, |
|
"eval_steps_per_second": 34.979, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 5e-06, |
|
"loss": 0.2027, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_loss": 0.19366399943828583, |
|
"eval_runtime": 11.2232, |
|
"eval_samples_per_second": 276.213, |
|
"eval_steps_per_second": 34.571, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.1946, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_loss": 0.19715721905231476, |
|
"eval_runtime": 11.2739, |
|
"eval_samples_per_second": 274.973, |
|
"eval_steps_per_second": 34.416, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 3e-06, |
|
"loss": 0.1874, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_loss": 0.19172348082065582, |
|
"eval_runtime": 11.1804, |
|
"eval_samples_per_second": 277.27, |
|
"eval_steps_per_second": 34.704, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.1853, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_loss": 0.1867838054895401, |
|
"eval_runtime": 11.2543, |
|
"eval_samples_per_second": 275.451, |
|
"eval_steps_per_second": 34.476, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 0.1811, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_loss": 0.18896569311618805, |
|
"eval_runtime": 11.6639, |
|
"eval_samples_per_second": 265.777, |
|
"eval_steps_per_second": 33.265, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.1776, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_loss": 0.18712429702281952, |
|
"eval_runtime": 11.9321, |
|
"eval_samples_per_second": 259.803, |
|
"eval_steps_per_second": 32.517, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"learning_rate": 4.5e-06, |
|
"loss": 0.1798, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"eval_loss": 0.18577729165554047, |
|
"eval_runtime": 11.5191, |
|
"eval_samples_per_second": 269.117, |
|
"eval_steps_per_second": 33.683, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.1745, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"eval_loss": 0.18199852108955383, |
|
"eval_runtime": 11.54, |
|
"eval_samples_per_second": 268.63, |
|
"eval_steps_per_second": 33.622, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"learning_rate": 3.5e-06, |
|
"loss": 0.1689, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"eval_loss": 0.18266141414642334, |
|
"eval_runtime": 11.4865, |
|
"eval_samples_per_second": 269.881, |
|
"eval_steps_per_second": 33.779, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"learning_rate": 3e-06, |
|
"loss": 0.1707, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"eval_loss": 0.18427973985671997, |
|
"eval_runtime": 11.7088, |
|
"eval_samples_per_second": 264.757, |
|
"eval_steps_per_second": 33.137, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.1658, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"eval_loss": 0.18342117965221405, |
|
"eval_runtime": 11.3728, |
|
"eval_samples_per_second": 272.58, |
|
"eval_steps_per_second": 34.116, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.1647, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"eval_loss": 0.18202748894691467, |
|
"eval_runtime": 11.7571, |
|
"eval_samples_per_second": 263.669, |
|
"eval_steps_per_second": 33.001, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"learning_rate": 1.5e-06, |
|
"loss": 0.1645, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"eval_loss": 0.18365143239498138, |
|
"eval_runtime": 11.3384, |
|
"eval_samples_per_second": 273.408, |
|
"eval_steps_per_second": 34.22, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 0.1633, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"eval_loss": 0.1813586801290512, |
|
"eval_runtime": 11.7168, |
|
"eval_samples_per_second": 264.578, |
|
"eval_steps_per_second": 33.115, |
|
"step": 2430 |
|
} |
|
], |
|
"max_steps": 2700, |
|
"num_train_epochs": 20, |
|
"total_flos": 9680747638947840.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|