|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"global_step": 31212, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.9519415609381006e-05, |
|
"loss": 2.3707, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.9038831218762015e-05, |
|
"loss": 1.8657, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 2.8558246828143025e-05, |
|
"loss": 1.8001, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2.807766243752403e-05, |
|
"loss": 1.8241, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 2.759707804690504e-05, |
|
"loss": 1.8568, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.7116493656286045e-05, |
|
"loss": 1.7976, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.663590926566705e-05, |
|
"loss": 1.7969, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.6155324875048056e-05, |
|
"loss": 1.772, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.567474048442907e-05, |
|
"loss": 1.7746, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.5194156093810074e-05, |
|
"loss": 1.7575, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.471357170319108e-05, |
|
"loss": 1.7777, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.423298731257209e-05, |
|
"loss": 1.7731, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.3752402921953095e-05, |
|
"loss": 1.7775, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.32718185313341e-05, |
|
"loss": 1.7627, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.2791234140715113e-05, |
|
"loss": 1.7435, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.231064975009612e-05, |
|
"loss": 1.7182, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.1830065359477124e-05, |
|
"loss": 1.7493, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.1349480968858133e-05, |
|
"loss": 1.715, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 2.086889657823914e-05, |
|
"loss": 1.7109, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 2.0388312187620145e-05, |
|
"loss": 1.651, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.9907727797001154e-05, |
|
"loss": 1.7383, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.9427143406382163e-05, |
|
"loss": 1.6754, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.8946559015763168e-05, |
|
"loss": 1.5948, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.8465974625144174e-05, |
|
"loss": 1.6431, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7985390234525183e-05, |
|
"loss": 1.6536, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.7504805843906192e-05, |
|
"loss": 1.5941, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.7024221453287198e-05, |
|
"loss": 1.6606, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6543637062668207e-05, |
|
"loss": 1.6621, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6063052672049212e-05, |
|
"loss": 1.622, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5582468281430218e-05, |
|
"loss": 1.6216, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5101883890811225e-05, |
|
"loss": 1.618, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.4621299500192235e-05, |
|
"loss": 1.5934, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.414071510957324e-05, |
|
"loss": 1.602, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.366013071895425e-05, |
|
"loss": 1.5458, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.3179546328335255e-05, |
|
"loss": 1.5039, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.2698961937716264e-05, |
|
"loss": 1.5694, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.2218377547097271e-05, |
|
"loss": 1.5937, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.1737793156478277e-05, |
|
"loss": 1.5364, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.1257208765859286e-05, |
|
"loss": 1.5487, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.0776624375240293e-05, |
|
"loss": 1.5565, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.0296039984621299e-05, |
|
"loss": 1.5503, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 9.815455594002308e-06, |
|
"loss": 1.577, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 9.334871203383314e-06, |
|
"loss": 1.5346, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 8.854286812764321e-06, |
|
"loss": 1.5686, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 8.37370242214533e-06, |
|
"loss": 1.5372, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 7.893118031526336e-06, |
|
"loss": 1.5534, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 7.412533640907344e-06, |
|
"loss": 1.5061, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 6.9319492502883506e-06, |
|
"loss": 1.5766, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 6.451364859669358e-06, |
|
"loss": 1.5215, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 5.970780469050365e-06, |
|
"loss": 1.5602, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 5.490196078431373e-06, |
|
"loss": 1.5311, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 5.00961168781238e-06, |
|
"loss": 1.5018, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 4.529027297193387e-06, |
|
"loss": 1.5673, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 4.048442906574395e-06, |
|
"loss": 1.5459, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 3.5678585159554016e-06, |
|
"loss": 1.5127, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 3.0872741253364094e-06, |
|
"loss": 1.4852, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 2.6066897347174164e-06, |
|
"loss": 1.5609, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 2.1261053440984237e-06, |
|
"loss": 1.4866, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.645520953479431e-06, |
|
"loss": 1.5449, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.1649365628604384e-06, |
|
"loss": 1.4855, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 6.843521722414456e-07, |
|
"loss": 1.5881, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 2.037677816224529e-07, |
|
"loss": 1.5224, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"step": 31212, |
|
"total_flos": 3943477040569344.0, |
|
"train_loss": 1.6433022868307863, |
|
"train_runtime": 3227.0969, |
|
"train_samples_per_second": 116.057, |
|
"train_steps_per_second": 9.672 |
|
} |
|
], |
|
"max_steps": 31212, |
|
"num_train_epochs": 2, |
|
"total_flos": 3943477040569344.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|