zklmorales's picture
Upload 104 files
cee1077 verified
raw
history blame contribute delete
No virus
2.9 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.4506769825918762,
"eval_steps": 500,
"global_step": 1500,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.1,
"learning_rate": 4.8388136686009026e-05,
"loss": 0.6868,
"step": 100
},
{
"epoch": 0.19,
"learning_rate": 4.677627337201805e-05,
"loss": 0.5552,
"step": 200
},
{
"epoch": 0.29,
"learning_rate": 4.516441005802708e-05,
"loss": 0.5657,
"step": 300
},
{
"epoch": 0.39,
"learning_rate": 4.355254674403611e-05,
"loss": 0.4919,
"step": 400
},
{
"epoch": 0.48,
"learning_rate": 4.1940683430045136e-05,
"loss": 0.4673,
"step": 500
},
{
"epoch": 0.48,
"eval_loss": 0.4286854565143585,
"eval_runtime": 157.876,
"eval_samples_per_second": 13.093,
"eval_steps_per_second": 1.641,
"step": 500
},
{
"epoch": 0.58,
"learning_rate": 4.032882011605416e-05,
"loss": 0.4705,
"step": 600
},
{
"epoch": 0.68,
"learning_rate": 3.8716956802063184e-05,
"loss": 0.4717,
"step": 700
},
{
"epoch": 0.77,
"learning_rate": 3.7105093488072215e-05,
"loss": 0.4523,
"step": 800
},
{
"epoch": 0.87,
"learning_rate": 3.549323017408124e-05,
"loss": 0.4333,
"step": 900
},
{
"epoch": 0.97,
"learning_rate": 3.388136686009026e-05,
"loss": 0.4765,
"step": 1000
},
{
"epoch": 0.97,
"eval_loss": 0.3951832354068756,
"eval_runtime": 154.675,
"eval_samples_per_second": 13.364,
"eval_steps_per_second": 1.674,
"step": 1000
},
{
"epoch": 1.06,
"learning_rate": 3.226950354609929e-05,
"loss": 0.3246,
"step": 1100
},
{
"epoch": 1.16,
"learning_rate": 3.065764023210832e-05,
"loss": 0.3084,
"step": 1200
},
{
"epoch": 1.26,
"learning_rate": 2.9045776918117345e-05,
"loss": 0.3059,
"step": 1300
},
{
"epoch": 1.35,
"learning_rate": 2.7433913604126372e-05,
"loss": 0.3234,
"step": 1400
},
{
"epoch": 1.45,
"learning_rate": 2.5822050290135396e-05,
"loss": 0.3353,
"step": 1500
},
{
"epoch": 1.45,
"eval_loss": 0.5979052186012268,
"eval_runtime": 152.068,
"eval_samples_per_second": 13.593,
"eval_steps_per_second": 1.703,
"step": 1500
}
],
"logging_steps": 100,
"max_steps": 3102,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 500,
"total_flos": 597867456650040.0,
"train_batch_size": 8,
"trial_name": null,
"trial_params": null
}