|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.5642544787699252, |
|
"global_step": 8000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.0211480362537768e-05, |
|
"loss": 1.4914, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.985424375207197e-05, |
|
"loss": 1.103, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 3.910964926929347e-05, |
|
"loss": 1.1117, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.775641735336502e-05, |
|
"loss": 1.1636, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.583764809409783e-05, |
|
"loss": 1.1947, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.34144538007923e-05, |
|
"loss": 1.1979, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.05640125908444e-05, |
|
"loss": 1.4767, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.1554332626816203e-05, |
|
"loss": 0.9765, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.9009680903546416e-05, |
|
"loss": 1.4107, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.6260730367115988e-05, |
|
"loss": 1.5024, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.9542651711125413e-05, |
|
"loss": 0.975, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.7394824453041223e-05, |
|
"loss": 1.1808, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.5133790365330005e-05, |
|
"loss": 1.1449, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.2799977645691646e-05, |
|
"loss": 1.4763, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.0426471341395805e-05, |
|
"loss": 1.6014, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8046919211584273e-05, |
|
"loss": 1.6065, |
|
"step": 8000 |
|
} |
|
], |
|
"max_steps": 14178, |
|
"num_train_epochs": 1, |
|
"total_flos": 8919658633003008.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|