|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.96969696969697, |
|
"eval_steps": 500, |
|
"global_step": 615, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9674796747967483e-05, |
|
"loss": 0.6473, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.934959349593496e-05, |
|
"loss": 0.414, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.902439024390244e-05, |
|
"loss": 0.3542, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.869918699186992e-05, |
|
"loss": 0.3396, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.83739837398374e-05, |
|
"loss": 0.3274, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.804878048780488e-05, |
|
"loss": 0.3238, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.772357723577236e-05, |
|
"loss": 0.3495, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.739837398373984e-05, |
|
"loss": 0.3068, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7073170731707317e-05, |
|
"loss": 0.3164, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.6747967479674798e-05, |
|
"loss": 0.2989, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.642276422764228e-05, |
|
"loss": 0.287, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.6097560975609757e-05, |
|
"loss": 0.2903, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"eval_f1": 0.40107752170008987, |
|
"eval_loss": 0.26861757040023804, |
|
"eval_runtime": 43.0151, |
|
"eval_samples_per_second": 22.992, |
|
"eval_steps_per_second": 1.441, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5772357723577235e-05, |
|
"loss": 0.2869, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.5447154471544717e-05, |
|
"loss": 0.2709, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.5121951219512196e-05, |
|
"loss": 0.2599, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.4796747967479676e-05, |
|
"loss": 0.2565, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.4471544715447157e-05, |
|
"loss": 0.2597, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.4146341463414635e-05, |
|
"loss": 0.2525, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.3821138211382115e-05, |
|
"loss": 0.2519, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.3495934959349594e-05, |
|
"loss": 0.2366, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.3170731707317076e-05, |
|
"loss": 0.2292, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.2845528455284555e-05, |
|
"loss": 0.2214, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.2520325203252033e-05, |
|
"loss": 0.2193, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.2195121951219513e-05, |
|
"loss": 0.2171, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_f1": 0.6493130874909617, |
|
"eval_loss": 0.2168290913105011, |
|
"eval_runtime": 43.1471, |
|
"eval_samples_per_second": 22.922, |
|
"eval_steps_per_second": 1.437, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.1869918699186992e-05, |
|
"loss": 0.1994, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.1544715447154474e-05, |
|
"loss": 0.204, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.1219512195121953e-05, |
|
"loss": 0.1909, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.0894308943089431e-05, |
|
"loss": 0.1906, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.0569105691056911e-05, |
|
"loss": 0.1919, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.024390243902439e-05, |
|
"loss": 0.1825, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 9.91869918699187e-06, |
|
"loss": 0.1839, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.59349593495935e-06, |
|
"loss": 0.189, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 9.268292682926831e-06, |
|
"loss": 0.178, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 8.94308943089431e-06, |
|
"loss": 0.1903, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 8.617886178861789e-06, |
|
"loss": 0.1835, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 8.292682926829268e-06, |
|
"loss": 0.1722, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 7.967479674796748e-06, |
|
"loss": 0.1879, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_f1": 0.6612224696857214, |
|
"eval_loss": 0.19899873435497284, |
|
"eval_runtime": 43.1679, |
|
"eval_samples_per_second": 22.911, |
|
"eval_steps_per_second": 1.436, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 7.64227642276423e-06, |
|
"loss": 0.1593, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 7.317073170731707e-06, |
|
"loss": 0.1553, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 6.991869918699188e-06, |
|
"loss": 0.1529, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.1589, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 6.341463414634147e-06, |
|
"loss": 0.1514, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 6.016260162601627e-06, |
|
"loss": 0.1524, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 5.691056910569106e-06, |
|
"loss": 0.1577, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 5.365853658536586e-06, |
|
"loss": 0.1504, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 5.040650406504065e-06, |
|
"loss": 0.151, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 4.715447154471545e-06, |
|
"loss": 0.1539, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 4.390243902439025e-06, |
|
"loss": 0.1575, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 4.0650406504065046e-06, |
|
"loss": 0.1476, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_f1": 0.7060445874511607, |
|
"eval_loss": 0.18790604174137115, |
|
"eval_runtime": 43.1492, |
|
"eval_samples_per_second": 22.92, |
|
"eval_steps_per_second": 1.437, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 3.7398373983739838e-06, |
|
"loss": 0.1412, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 3.414634146341464e-06, |
|
"loss": 0.1274, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 3.0894308943089435e-06, |
|
"loss": 0.1266, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 2.764227642276423e-06, |
|
"loss": 0.1275, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 2.4390243902439027e-06, |
|
"loss": 0.1222, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 2.1138211382113824e-06, |
|
"loss": 0.1275, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 1.788617886178862e-06, |
|
"loss": 0.1248, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 1.4634146341463414e-06, |
|
"loss": 0.1308, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 1.1382113821138213e-06, |
|
"loss": 0.1272, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 8.130081300813009e-07, |
|
"loss": 0.1261, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 4.878048780487805e-07, |
|
"loss": 0.1289, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 1.6260162601626018e-07, |
|
"loss": 0.1279, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"eval_f1": 0.7058281501958075, |
|
"eval_loss": 0.1904972940683365, |
|
"eval_runtime": 43.1284, |
|
"eval_samples_per_second": 22.932, |
|
"eval_steps_per_second": 1.438, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"step": 615, |
|
"total_flos": 2.6435427934519296e+16, |
|
"train_loss": 0.2132105562745071, |
|
"train_runtime": 4145.08, |
|
"train_samples_per_second": 9.546, |
|
"train_steps_per_second": 0.148 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 615, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"total_flos": 2.6435427934519296e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|