|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"global_step": 22340, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.9328558639212178e-05, |
|
"loss": 2.0835, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.865711727842435e-05, |
|
"loss": 1.2634, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2.798567591763653e-05, |
|
"loss": 1.1116, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 2.7314234556848706e-05, |
|
"loss": 1.0549, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.6642793196060876e-05, |
|
"loss": 1.0491, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.5971351835273053e-05, |
|
"loss": 1.0099, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.529991047448523e-05, |
|
"loss": 0.9937, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.4628469113697404e-05, |
|
"loss": 0.9877, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.395702775290958e-05, |
|
"loss": 0.9661, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.3285586392121754e-05, |
|
"loss": 0.9319, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.261414503133393e-05, |
|
"loss": 0.9603, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.194270367054611e-05, |
|
"loss": 0.9003, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.1271262309758282e-05, |
|
"loss": 0.9247, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 2.0599820948970456e-05, |
|
"loss": 0.8945, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.9928379588182633e-05, |
|
"loss": 0.8796, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.9256938227394806e-05, |
|
"loss": 0.8889, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.8585496866606983e-05, |
|
"loss": 0.874, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.791405550581916e-05, |
|
"loss": 0.8714, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.7242614145031334e-05, |
|
"loss": 0.8597, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.657117278424351e-05, |
|
"loss": 0.8679, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.5899731423455688e-05, |
|
"loss": 0.8457, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.522829006266786e-05, |
|
"loss": 0.845, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.4556848701880037e-05, |
|
"loss": 0.6936, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.3885407341092211e-05, |
|
"loss": 0.5779, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.3213965980304386e-05, |
|
"loss": 0.5945, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.2542524619516563e-05, |
|
"loss": 0.6197, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.1871083258728739e-05, |
|
"loss": 0.5957, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.1199641897940914e-05, |
|
"loss": 0.6061, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.0528200537153088e-05, |
|
"loss": 0.6015, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 9.856759176365265e-06, |
|
"loss": 0.581, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 9.18531781557744e-06, |
|
"loss": 0.5711, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 8.513876454789615e-06, |
|
"loss": 0.6045, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 7.84243509400179e-06, |
|
"loss": 0.5616, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 7.170993733213966e-06, |
|
"loss": 0.5823, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 6.4995523724261414e-06, |
|
"loss": 0.5662, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 5.828111011638317e-06, |
|
"loss": 0.557, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 5.156669650850493e-06, |
|
"loss": 0.5472, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 4.4852282900626675e-06, |
|
"loss": 0.5643, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 3.8137869292748437e-06, |
|
"loss": 0.5451, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 3.142345568487019e-06, |
|
"loss": 0.5431, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 2.4709042076991944e-06, |
|
"loss": 0.568, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.7994628469113698e-06, |
|
"loss": 0.5439, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.1280214861235453e-06, |
|
"loss": 0.5323, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 4.565801253357207e-07, |
|
"loss": 0.5353, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"step": 22340, |
|
"total_flos": 1.2447970604337254e+17, |
|
"train_loss": 0.7862384369251657, |
|
"train_runtime": 27653.2865, |
|
"train_samples_per_second": 6.463, |
|
"train_steps_per_second": 0.808 |
|
} |
|
], |
|
"max_steps": 22340, |
|
"num_train_epochs": 2, |
|
"total_flos": 1.2447970604337254e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|