|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 35.0, |
|
"global_step": 19705, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.8731286475513835e-05, |
|
"loss": 3.7437, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 4.746257295102766e-05, |
|
"loss": 3.203, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 4.619385942654149e-05, |
|
"loss": 2.9759, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 4.492514590205532e-05, |
|
"loss": 2.7917, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 4.365643237756914e-05, |
|
"loss": 2.6427, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"learning_rate": 4.2387718853082975e-05, |
|
"loss": 2.5541, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 4.111900532859681e-05, |
|
"loss": 2.4485, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"learning_rate": 3.985029180411063e-05, |
|
"loss": 2.3436, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"learning_rate": 3.8581578279624465e-05, |
|
"loss": 2.2573, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 8.88, |
|
"learning_rate": 3.73128647551383e-05, |
|
"loss": 2.1701, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 9.77, |
|
"learning_rate": 3.6044151230652115e-05, |
|
"loss": 2.0915, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 10.66, |
|
"learning_rate": 3.477543770616595e-05, |
|
"loss": 2.0424, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 11.55, |
|
"learning_rate": 3.350672418167978e-05, |
|
"loss": 1.9703, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 12.43, |
|
"learning_rate": 3.2238010657193605e-05, |
|
"loss": 1.9065, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 13.32, |
|
"learning_rate": 3.096929713270744e-05, |
|
"loss": 1.877, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 14.21, |
|
"learning_rate": 2.9700583608221266e-05, |
|
"loss": 1.8305, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 15.1, |
|
"learning_rate": 2.8431870083735095e-05, |
|
"loss": 1.7644, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 15.99, |
|
"learning_rate": 2.7163156559248927e-05, |
|
"loss": 1.7179, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 16.87, |
|
"learning_rate": 2.589444303476275e-05, |
|
"loss": 1.6708, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 17.76, |
|
"learning_rate": 2.462572951027658e-05, |
|
"loss": 1.6338, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 18.65, |
|
"learning_rate": 2.335701598579041e-05, |
|
"loss": 1.5861, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 19.54, |
|
"learning_rate": 2.208830246130424e-05, |
|
"loss": 1.553, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 20.43, |
|
"learning_rate": 2.0819588936818067e-05, |
|
"loss": 1.538, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 21.31, |
|
"learning_rate": 1.9550875412331896e-05, |
|
"loss": 1.5036, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 22.2, |
|
"learning_rate": 1.8282161887845725e-05, |
|
"loss": 1.4526, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 23.09, |
|
"learning_rate": 1.7013448363359553e-05, |
|
"loss": 1.4547, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 23.98, |
|
"learning_rate": 1.5744734838873386e-05, |
|
"loss": 1.4221, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 24.87, |
|
"learning_rate": 1.4476021314387211e-05, |
|
"loss": 1.3794, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 25.75, |
|
"learning_rate": 1.3207307789901041e-05, |
|
"loss": 1.3785, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 26.64, |
|
"learning_rate": 1.193859426541487e-05, |
|
"loss": 1.345, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 27.53, |
|
"learning_rate": 1.0669880740928699e-05, |
|
"loss": 1.326, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 28.42, |
|
"learning_rate": 9.401167216442528e-06, |
|
"loss": 1.3241, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 29.31, |
|
"learning_rate": 8.132453691956356e-06, |
|
"loss": 1.2962, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 30.2, |
|
"learning_rate": 6.863740167470185e-06, |
|
"loss": 1.2871, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 31.08, |
|
"learning_rate": 5.595026642984015e-06, |
|
"loss": 1.2761, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 31.97, |
|
"learning_rate": 4.3263131184978435e-06, |
|
"loss": 1.2549, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 32.86, |
|
"learning_rate": 3.0575995940116723e-06, |
|
"loss": 1.252, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 33.75, |
|
"learning_rate": 1.7888860695255013e-06, |
|
"loss": 1.2429, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 34.64, |
|
"learning_rate": 5.201725450393302e-07, |
|
"loss": 1.2436, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 35.0, |
|
"step": 19705, |
|
"total_flos": 9895714212962304.0, |
|
"train_loss": 1.8335362397363058, |
|
"train_runtime": 2725.9313, |
|
"train_samples_per_second": 216.554, |
|
"train_steps_per_second": 7.229 |
|
} |
|
], |
|
"max_steps": 19705, |
|
"num_train_epochs": 35, |
|
"total_flos": 9895714212962304.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|