|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 9.795748228428511, |
|
"global_step": 23500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.8957899124635265e-05, |
|
"loss": 4.6229, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.7915798249270535e-05, |
|
"loss": 3.8572, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.68736973739058e-05, |
|
"loss": 3.574, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.583159649854106e-05, |
|
"loss": 3.2686, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 4.4789495623176324e-05, |
|
"loss": 3.0761, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 4.374739474781159e-05, |
|
"loss": 2.8529, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.2705293872446856e-05, |
|
"loss": 2.8138, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.166319299708212e-05, |
|
"loss": 2.7723, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 4.062109212171739e-05, |
|
"loss": 2.7547, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 3.9578991246352645e-05, |
|
"loss": 2.6349, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 3.8536890370987915e-05, |
|
"loss": 2.5105, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 3.749478949562318e-05, |
|
"loss": 2.4989, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 3.645268862025844e-05, |
|
"loss": 2.509, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.541058774489371e-05, |
|
"loss": 2.5043, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 3.436848686952897e-05, |
|
"loss": 2.3714, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 3.3326385994164236e-05, |
|
"loss": 2.3006, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 3.22842851187995e-05, |
|
"loss": 2.3216, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 3.124218424343476e-05, |
|
"loss": 2.3245, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 3.020008336807003e-05, |
|
"loss": 2.3094, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 2.9157982492705294e-05, |
|
"loss": 2.1743, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 2.811588161734056e-05, |
|
"loss": 2.1446, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 2.7073780741975824e-05, |
|
"loss": 2.1617, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 2.6031679866611093e-05, |
|
"loss": 2.1707, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 2.4989578991246353e-05, |
|
"loss": 2.1719, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 2.394747811588162e-05, |
|
"loss": 2.0137, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"learning_rate": 2.2905377240516885e-05, |
|
"loss": 2.0293, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 2.1863276365152145e-05, |
|
"loss": 2.0414, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"learning_rate": 2.082117548978741e-05, |
|
"loss": 2.032, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"learning_rate": 1.9779074614422678e-05, |
|
"loss": 2.009, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 1.873697373905794e-05, |
|
"loss": 1.9163, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"learning_rate": 1.7694872863693207e-05, |
|
"loss": 1.9179, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 1.6652771988328473e-05, |
|
"loss": 1.9323, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 1.5610671112963736e-05, |
|
"loss": 1.9234, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"learning_rate": 1.4568570237598999e-05, |
|
"loss": 1.8964, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"learning_rate": 1.3526469362234265e-05, |
|
"loss": 1.8314, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 1.248436848686953e-05, |
|
"loss": 1.8313, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 7.71, |
|
"learning_rate": 1.1442267611504794e-05, |
|
"loss": 1.8317, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"learning_rate": 1.0400166736140059e-05, |
|
"loss": 1.8431, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 8.13, |
|
"learning_rate": 9.358065860775324e-06, |
|
"loss": 1.791, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 8.34, |
|
"learning_rate": 8.315964985410588e-06, |
|
"loss": 1.7665, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 8.55, |
|
"learning_rate": 7.273864110045853e-06, |
|
"loss": 1.7617, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"learning_rate": 6.2317632346811174e-06, |
|
"loss": 1.7643, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 8.96, |
|
"learning_rate": 5.189662359316382e-06, |
|
"loss": 1.7838, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 9.17, |
|
"learning_rate": 4.147561483951647e-06, |
|
"loss": 1.722, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 9.38, |
|
"learning_rate": 3.1054606085869112e-06, |
|
"loss": 1.7245, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"learning_rate": 2.0633597332221763e-06, |
|
"loss": 1.7171, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 9.8, |
|
"learning_rate": 1.0212588578574407e-06, |
|
"loss": 1.7165, |
|
"step": 23500 |
|
} |
|
], |
|
"max_steps": 23990, |
|
"num_train_epochs": 10, |
|
"total_flos": 1687120822272000.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|