File size: 1,892 Bytes
0622377 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 |
{
"best_metric": 0.93,
"best_model_checkpoint": "mikecho/NTQAI_pedestrian_gender_recognition_v1/checkpoint-25",
"epoch": 2.88,
"eval_steps": 500,
"global_step": 36,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.8,
"grad_norm": 5.530843734741211,
"learning_rate": 7.3125e-05,
"loss": 0.4561,
"step": 10
},
{
"epoch": 0.96,
"eval_accuracy": 0.92,
"eval_loss": 0.2296125739812851,
"eval_runtime": 60.9008,
"eval_samples_per_second": 1.642,
"eval_steps_per_second": 0.115,
"step": 12
},
{
"epoch": 1.6,
"grad_norm": 4.8732476234436035,
"learning_rate": 4.5e-05,
"loss": 0.3486,
"step": 20
},
{
"epoch": 2.0,
"eval_accuracy": 0.93,
"eval_loss": 0.18732059001922607,
"eval_runtime": 60.9809,
"eval_samples_per_second": 1.64,
"eval_steps_per_second": 0.115,
"step": 25
},
{
"epoch": 2.4,
"grad_norm": 2.4024288654327393,
"learning_rate": 1.6875e-05,
"loss": 0.2995,
"step": 30
},
{
"epoch": 2.88,
"eval_accuracy": 0.92,
"eval_loss": 0.19184868037700653,
"eval_runtime": 61.0368,
"eval_samples_per_second": 1.638,
"eval_steps_per_second": 0.115,
"step": 36
},
{
"epoch": 2.88,
"step": 36,
"total_flos": 1.784652333412516e+17,
"train_loss": 0.3550843662685818,
"train_runtime": 4732.18,
"train_samples_per_second": 0.507,
"train_steps_per_second": 0.008
}
],
"logging_steps": 10,
"max_steps": 36,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 500,
"total_flos": 1.784652333412516e+17,
"train_batch_size": 16,
"trial_name": null,
"trial_params": null
}
|