|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.9999490445859873, |
|
"global_step": 29436, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.915069982334556e-05, |
|
"loss": 1.8335, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.830139964669113e-05, |
|
"loss": 1.3358, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.745209947003669e-05, |
|
"loss": 1.2093, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.660279929338225e-05, |
|
"loss": 1.1204, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.5753499116727814e-05, |
|
"loss": 1.0652, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.490419894007338e-05, |
|
"loss": 0.9992, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.405489876341894e-05, |
|
"loss": 0.9696, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.32055985867645e-05, |
|
"loss": 0.9374, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.235629841011007e-05, |
|
"loss": 0.9147, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.150699823345564e-05, |
|
"loss": 0.8813, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.06576980568012e-05, |
|
"loss": 0.8565, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.980839788014676e-05, |
|
"loss": 0.8328, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.895909770349233e-05, |
|
"loss": 0.8021, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.810979752683789e-05, |
|
"loss": 0.7917, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.726049735018345e-05, |
|
"loss": 0.7935, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.641119717352902e-05, |
|
"loss": 0.7608, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.556189699687458e-05, |
|
"loss": 0.7619, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.471259682022014e-05, |
|
"loss": 0.7359, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.386329664356571e-05, |
|
"loss": 0.7315, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.301399646691127e-05, |
|
"loss": 0.7218, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.216469629025683e-05, |
|
"loss": 0.7217, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.131539611360239e-05, |
|
"loss": 0.7214, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.0466095936947957e-05, |
|
"loss": 0.6976, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.961679576029352e-05, |
|
"loss": 0.6782, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.8767495583639082e-05, |
|
"loss": 0.6743, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.791819540698465e-05, |
|
"loss": 0.6816, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.706889523033021e-05, |
|
"loss": 0.6558, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.621959505367577e-05, |
|
"loss": 0.6545, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5370294877021332e-05, |
|
"loss": 0.6515, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4520994700366897e-05, |
|
"loss": 0.6412, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.367169452371246e-05, |
|
"loss": 0.6214, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.2822394347058025e-05, |
|
"loss": 0.6273, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.197309417040359e-05, |
|
"loss": 0.6249, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.1123793993749154e-05, |
|
"loss": 0.6123, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.0274493817094715e-05, |
|
"loss": 0.6152, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.942519364044028e-05, |
|
"loss": 0.6166, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.857589346378584e-05, |
|
"loss": 0.6075, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.7726593287131404e-05, |
|
"loss": 0.5962, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.687729311047697e-05, |
|
"loss": 0.5921, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.602799293382253e-05, |
|
"loss": 0.5915, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5178692757168095e-05, |
|
"loss": 0.5669, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.4329392580513656e-05, |
|
"loss": 0.5725, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.348009240385922e-05, |
|
"loss": 0.5728, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2630792227204785e-05, |
|
"loss": 0.5588, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1781492050550347e-05, |
|
"loss": 0.5648, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.093219187389591e-05, |
|
"loss": 0.5581, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.0082891697241474e-05, |
|
"loss": 0.5553, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.233591520587037e-06, |
|
"loss": 0.5439, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.3842913439326e-06, |
|
"loss": 0.5367, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.534991167278163e-06, |
|
"loss": 0.5401, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.685690990623726e-06, |
|
"loss": 0.5302, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.83639081396929e-06, |
|
"loss": 0.541, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.987090637314853e-06, |
|
"loss": 0.5218, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.137790460660416e-06, |
|
"loss": 0.5149, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.2884902840059795e-06, |
|
"loss": 0.5358, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.4391901073515425e-06, |
|
"loss": 0.5353, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.589889930697106e-06, |
|
"loss": 0.5171, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 7.405897540426689e-07, |
|
"loss": 0.528, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 29436, |
|
"total_flos": 2.745512583065764e+17, |
|
"train_loss": 0.7166476119453937, |
|
"train_runtime": 30706.9194, |
|
"train_samples_per_second": 9.586, |
|
"train_steps_per_second": 0.959 |
|
} |
|
], |
|
"max_steps": 29436, |
|
"num_train_epochs": 3, |
|
"total_flos": 2.745512583065764e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|