urdu_gahzal_gpt2 / trainer_state.json
obaidtambo's picture
Config files
d40a0d0
raw
history blame
1.82 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 7.702182284980744,
"global_step": 6000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.64,
"learning_rate": 4.598844672657253e-05,
"loss": 2.1906,
"step": 500
},
{
"epoch": 1.28,
"learning_rate": 4.197689345314506e-05,
"loss": 2.1043,
"step": 1000
},
{
"epoch": 1.93,
"learning_rate": 3.796534017971759e-05,
"loss": 2.0649,
"step": 1500
},
{
"epoch": 2.57,
"learning_rate": 3.395378690629012e-05,
"loss": 2.0252,
"step": 2000
},
{
"epoch": 3.21,
"learning_rate": 2.9942233632862648e-05,
"loss": 2.0077,
"step": 2500
},
{
"epoch": 3.85,
"learning_rate": 2.5930680359435173e-05,
"loss": 1.9865,
"step": 3000
},
{
"epoch": 4.49,
"learning_rate": 2.1919127086007704e-05,
"loss": 1.9667,
"step": 3500
},
{
"epoch": 5.13,
"learning_rate": 1.7907573812580232e-05,
"loss": 1.9602,
"step": 4000
},
{
"epoch": 5.78,
"learning_rate": 1.389602053915276e-05,
"loss": 1.9431,
"step": 4500
},
{
"epoch": 6.42,
"learning_rate": 9.884467265725289e-06,
"loss": 1.9363,
"step": 5000
},
{
"epoch": 7.06,
"learning_rate": 5.8729139922978185e-06,
"loss": 1.9312,
"step": 5500
},
{
"epoch": 7.7,
"learning_rate": 1.8613607188703468e-06,
"loss": 1.9242,
"step": 6000
}
],
"max_steps": 6232,
"num_train_epochs": 8,
"total_flos": 1.2529214226432e+16,
"trial_name": null,
"trial_params": null
}