|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 10.0, |
|
"global_step": 30790, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.91880480675544e-05, |
|
"loss": 2.9405, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.8376096135108804e-05, |
|
"loss": 2.2896, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.7564144202663205e-05, |
|
"loss": 2.0941, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.6752192270217606e-05, |
|
"loss": 2.0077, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.5940240337772006e-05, |
|
"loss": 1.9077, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.512828840532641e-05, |
|
"loss": 1.8605, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 4.431633647288081e-05, |
|
"loss": 1.4874, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.350438454043521e-05, |
|
"loss": 1.4309, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.269243260798961e-05, |
|
"loss": 1.4089, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.188048067554401e-05, |
|
"loss": 1.4353, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 4.106852874309841e-05, |
|
"loss": 1.4294, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 4.025657681065281e-05, |
|
"loss": 1.3979, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 3.944462487820721e-05, |
|
"loss": 1.1252, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 3.8632672945761614e-05, |
|
"loss": 1.0013, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 3.7820721013316015e-05, |
|
"loss": 1.0108, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 3.7008769080870415e-05, |
|
"loss": 1.0181, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.6196817148424816e-05, |
|
"loss": 1.0822, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.538486521597922e-05, |
|
"loss": 1.0647, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 3.457291328353362e-05, |
|
"loss": 0.8441, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 3.376096135108802e-05, |
|
"loss": 0.6773, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 3.294900941864242e-05, |
|
"loss": 0.6928, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 3.213705748619682e-05, |
|
"loss": 0.6959, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 3.132510555375122e-05, |
|
"loss": 0.7052, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 3.051315362130562e-05, |
|
"loss": 0.7032, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 2.970120168886002e-05, |
|
"loss": 0.5988, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 2.888924975641442e-05, |
|
"loss": 0.4047, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 2.8077297823968824e-05, |
|
"loss": 0.4227, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 2.7265345891523225e-05, |
|
"loss": 0.4319, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 2.6453393959077623e-05, |
|
"loss": 0.4593, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 2.5641442026632023e-05, |
|
"loss": 0.4808, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 2.4829490094186424e-05, |
|
"loss": 0.4197, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 2.401753816174083e-05, |
|
"loss": 0.2503, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 2.3205586229295226e-05, |
|
"loss": 0.3904, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 2.2393634296849627e-05, |
|
"loss": 0.2675, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 2.158168236440403e-05, |
|
"loss": 0.2714, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 2.0769730431958428e-05, |
|
"loss": 0.2952, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 1.995777849951283e-05, |
|
"loss": 0.3054, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 1.914582656706723e-05, |
|
"loss": 0.1653, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"learning_rate": 1.833387463462163e-05, |
|
"loss": 0.1711, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 1.752192270217603e-05, |
|
"loss": 0.1768, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 1.6709970769730432e-05, |
|
"loss": 0.1753, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"learning_rate": 1.5898018837284833e-05, |
|
"loss": 0.165, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"learning_rate": 1.5086066904839236e-05, |
|
"loss": 0.2948, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"learning_rate": 1.4274114972393635e-05, |
|
"loss": 0.1176, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"learning_rate": 1.3462163039948036e-05, |
|
"loss": 0.123, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"learning_rate": 1.2650211107502435e-05, |
|
"loss": 0.1669, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"learning_rate": 1.1838259175056837e-05, |
|
"loss": 0.1026, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"learning_rate": 1.1026307242611238e-05, |
|
"loss": 0.1035, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 7.96, |
|
"learning_rate": 1.0214355310165639e-05, |
|
"loss": 0.1048, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"learning_rate": 9.40240337772004e-06, |
|
"loss": 0.0831, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 8.28, |
|
"learning_rate": 8.59045144527444e-06, |
|
"loss": 0.0641, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 8.44, |
|
"learning_rate": 7.778499512828841e-06, |
|
"loss": 0.062, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 8.61, |
|
"learning_rate": 6.966547580383241e-06, |
|
"loss": 0.0621, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 8.77, |
|
"learning_rate": 6.154595647937642e-06, |
|
"loss": 0.0597, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"learning_rate": 5.342643715492044e-06, |
|
"loss": 0.0621, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 9.09, |
|
"learning_rate": 4.530691783046444e-06, |
|
"loss": 0.051, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 9.26, |
|
"learning_rate": 3.718739850600845e-06, |
|
"loss": 0.043, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 9.42, |
|
"learning_rate": 2.9067879181552453e-06, |
|
"loss": 0.0455, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 9.58, |
|
"learning_rate": 2.094835985709646e-06, |
|
"loss": 0.1287, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 9.74, |
|
"learning_rate": 1.2828840532640467e-06, |
|
"loss": 0.1933, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"learning_rate": 4.7093212081844755e-07, |
|
"loss": 0.1462, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"step": 30790, |
|
"total_flos": 8.428451462293094e+16, |
|
"train_loss": 0.6537484721883469, |
|
"train_runtime": 11168.7196, |
|
"train_samples_per_second": 27.561, |
|
"train_steps_per_second": 2.757 |
|
} |
|
], |
|
"max_steps": 30790, |
|
"num_train_epochs": 10, |
|
"total_flos": 8.428451462293094e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|