|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.9914638001896936, |
|
"global_step": 294, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 7.5e-06, |
|
"loss": 0.481, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.5e-05, |
|
"loss": 0.5091, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 2.25e-05, |
|
"loss": 0.4303, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 3e-05, |
|
"loss": 0.4055, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.998662940889891e-05, |
|
"loss": 0.4338, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.9946541471956496e-05, |
|
"loss": 0.4793, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.9879807655761145e-05, |
|
"loss": 0.4291, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.9786546929722055e-05, |
|
"loss": 0.4908, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.966692555397705e-05, |
|
"loss": 0.463, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.9521156782993066e-05, |
|
"loss": 0.528, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.9349500485387718e-05, |
|
"loss": 0.5178, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 2.9152262680649704e-05, |
|
"loss": 0.4602, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 2.8929794993583937e-05, |
|
"loss": 0.5044, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.8682494027454e-05, |
|
"loss": 0.4217, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.8410800656939512e-05, |
|
"loss": 0.502, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.811519924216873e-05, |
|
"loss": 0.4549, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.779621676522777e-05, |
|
"loss": 0.5692, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.7454421890685647e-05, |
|
"loss": 0.4312, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.709042395181008e-05, |
|
"loss": 0.4938, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 2.6704871864281377e-05, |
|
"loss": 0.5433, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 2.6298452969340952e-05, |
|
"loss": 0.3459, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 2.58718918084368e-05, |
|
"loss": 0.3739, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 2.5425948831550528e-05, |
|
"loss": 0.375, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.496141904150859e-05, |
|
"loss": 0.3809, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.447913057669456e-05, |
|
"loss": 0.4183, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.3979943234689226e-05, |
|
"loss": 0.4207, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.3464746939470288e-05, |
|
"loss": 0.3767, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.2934460154904436e-05, |
|
"loss": 0.4248, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.2390028247360042e-05, |
|
"loss": 0.3374, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.183242180035951e-05, |
|
"loss": 0.4582, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.1262634884275948e-05, |
|
"loss": 0.4153, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.068168328415864e-05, |
|
"loss": 0.409, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.0090602688846884e-05, |
|
"loss": 0.4023, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.9490446844600375e-05, |
|
"loss": 0.3426, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.888228567653781e-05, |
|
"loss": 0.4059, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.8267203381232774e-05, |
|
"loss": 0.4449, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.764629649386713e-05, |
|
"loss": 0.4362, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.7020671933387917e-05, |
|
"loss": 0.4874, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.63914450291526e-05, |
|
"loss": 0.3326, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.5633197410233404e-05, |
|
"loss": 0.4035, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5e-05, |
|
"loss": 0.3291, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.4366802589766598e-05, |
|
"loss": 0.353, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.373473400935433e-05, |
|
"loss": 0.319, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.3104921076168065e-05, |
|
"loss": 0.341, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.247848658636778e-05, |
|
"loss": 0.3276, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.185654731320877e-05, |
|
"loss": 0.3628, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.124021201611919e-05, |
|
"loss": 0.2727, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.0630579464064182e-05, |
|
"loss": 0.3466, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.0028736476720464e-05, |
|
"loss": 0.3187, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 9.435755986953485e-06, |
|
"loss": 0.3837, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 8.852695128051192e-06, |
|
"loss": 0.2955, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 8.280593349124432e-06, |
|
"loss": 0.3793, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 7.720470562033787e-06, |
|
"loss": 0.3443, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 7.17332532314626e-06, |
|
"loss": 0.2915, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 6.640133053163455e-06, |
|
"loss": 0.3514, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 6.12184429819474e-06, |
|
"loss": 0.3221, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 5.619383035175448e-06, |
|
"loss": 0.2903, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 5.133645024651171e-06, |
|
"loss": 0.3397, |
|
"step": 290 |
|
} |
|
], |
|
"max_steps": 392, |
|
"num_train_epochs": 4, |
|
"total_flos": 1.821325738775675e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|