Text Generation
Transformers
PyTorch
llama
Inference Endpoints
text-generation-inference
flacuna-13b-v1.0 / trainer_state.json
deepanway's picture
Upload files
57d1ccf
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.5759470128748155,
"global_step": 6000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.05,
"learning_rate": 1.999556017108351e-05,
"loss": 1.1139,
"step": 500
},
{
"epoch": 0.1,
"learning_rate": 1.9982028384441793e-05,
"loss": 1.1257,
"step": 1000
},
{
"epoch": 0.14,
"learning_rate": 1.9959416294461e-05,
"loss": 1.104,
"step": 1500
},
{
"epoch": 0.19,
"learning_rate": 1.9927744470571345e-05,
"loss": 1.1,
"step": 2000
},
{
"epoch": 0.24,
"learning_rate": 1.9887041723527222e-05,
"loss": 1.1052,
"step": 2500
},
{
"epoch": 0.29,
"learning_rate": 1.98373450791991e-05,
"loss": 1.0969,
"step": 3000
},
{
"epoch": 0.34,
"learning_rate": 1.9778699744892346e-05,
"loss": 1.092,
"step": 3500
},
{
"epoch": 0.38,
"learning_rate": 1.9711159068223743e-05,
"loss": 1.1149,
"step": 4000
},
{
"epoch": 0.43,
"learning_rate": 1.9634784488593005e-05,
"loss": 1.1047,
"step": 4500
},
{
"epoch": 0.48,
"learning_rate": 1.95496454812935e-05,
"loss": 1.0977,
"step": 5000
},
{
"epoch": 0.53,
"learning_rate": 1.9455819494313005e-05,
"loss": 1.1036,
"step": 5500
},
{
"epoch": 0.58,
"learning_rate": 1.9353391877881957e-05,
"loss": 1.0948,
"step": 6000
}
],
"max_steps": 52085,
"num_train_epochs": 5,
"total_flos": 7.584298167491887e+19,
"trial_name": null,
"trial_params": null
}