jfranklin-foundry's picture
Upload folder using huggingface_hub
bc0eb9b verified
raw
history blame contribute delete
No virus
1.93 kB
{
"best_metric": 1.374252200126648,
"best_model_checkpoint": "outputs/checkpoint-104",
"epoch": 2.982078853046595,
"eval_steps": 500,
"global_step": 104,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.5734767025089605,
"grad_norm": 1.346787452697754,
"learning_rate": 2e-05,
"loss": 2.8792,
"step": 20
},
{
"epoch": 0.974910394265233,
"eval_loss": 2.1643929481506348,
"eval_runtime": 34.9442,
"eval_samples_per_second": 31.937,
"eval_steps_per_second": 4.006,
"step": 34
},
{
"epoch": 1.146953405017921,
"grad_norm": 0.6896965503692627,
"learning_rate": 4e-05,
"loss": 2.3839,
"step": 40
},
{
"epoch": 1.7204301075268817,
"grad_norm": 0.6072823405265808,
"learning_rate": 6e-05,
"loss": 1.8972,
"step": 60
},
{
"epoch": 1.978494623655914,
"eval_loss": 1.7010903358459473,
"eval_runtime": 34.9827,
"eval_samples_per_second": 31.902,
"eval_steps_per_second": 4.002,
"step": 69
},
{
"epoch": 2.293906810035842,
"grad_norm": 0.6996594071388245,
"learning_rate": 8e-05,
"loss": 1.7112,
"step": 80
},
{
"epoch": 2.867383512544803,
"grad_norm": 1.0495513677597046,
"learning_rate": 0.0001,
"loss": 1.5167,
"step": 100
},
{
"epoch": 2.982078853046595,
"eval_loss": 1.374252200126648,
"eval_runtime": 34.9766,
"eval_samples_per_second": 31.907,
"eval_steps_per_second": 4.003,
"step": 104
}
],
"logging_steps": 20,
"max_steps": 170,
"num_input_tokens_seen": 0,
"num_train_epochs": 5,
"save_steps": 500,
"total_flos": 5518564418224128.0,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}