|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 10.0, |
|
"global_step": 24100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 92.256, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 52.0764, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 6e-06, |
|
"loss": 41.3627, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 37.237, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1e-05, |
|
"loss": 34.3031, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.2e-05, |
|
"loss": 31.7147, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.4000000000000001e-05, |
|
"loss": 29.5475, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 27.914, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8e-05, |
|
"loss": 26.1499, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2e-05, |
|
"loss": 24.1629, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.2000000000000003e-05, |
|
"loss": 22.1829, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.4e-05, |
|
"loss": 20.2111, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.6000000000000002e-05, |
|
"loss": 18.6642, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 16.7946, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3e-05, |
|
"loss": 15.0783, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 13.366, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.4000000000000007e-05, |
|
"loss": 11.706, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.6e-05, |
|
"loss": 10.2113, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.8e-05, |
|
"loss": 8.5921, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4e-05, |
|
"loss": 7.2445, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"eval_bleu": 1.523, |
|
"eval_em": 0.0, |
|
"eval_gen_len": 204.48, |
|
"eval_loss": 7.561939239501953, |
|
"eval_rm": NaN, |
|
"eval_runtime": 100.5427, |
|
"eval_samples_per_second": 0.497, |
|
"eval_steps_per_second": 0.07, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.2e-05, |
|
"loss": 6.8461, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 6.223, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 6.1167, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8e-05, |
|
"loss": 5.7623, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 5e-05, |
|
"loss": 5.5056, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.976851851851852e-05, |
|
"loss": 5.2979, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 4.9537037037037035e-05, |
|
"loss": 4.9765, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.930555555555556e-05, |
|
"loss": 4.7721, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 4.9074074074074075e-05, |
|
"loss": 4.6957, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 4.8842592592592595e-05, |
|
"loss": 4.7368, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 4.8611111111111115e-05, |
|
"loss": 4.6074, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 4.837962962962963e-05, |
|
"loss": 4.3678, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.814814814814815e-05, |
|
"loss": 4.19, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.791666666666667e-05, |
|
"loss": 4.0959, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 4.768518518518519e-05, |
|
"loss": 4.0323, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 4.745370370370371e-05, |
|
"loss": 3.7777, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.722222222222222e-05, |
|
"loss": 3.8014, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.699074074074074e-05, |
|
"loss": 3.6346, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.675925925925926e-05, |
|
"loss": 3.4876, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.652777777777778e-05, |
|
"loss": 3.4458, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"eval_bleu": 16.0372, |
|
"eval_em": 0.0, |
|
"eval_gen_len": 76.36, |
|
"eval_loss": 3.4201648235321045, |
|
"eval_rm": NaN, |
|
"eval_runtime": 91.7678, |
|
"eval_samples_per_second": 0.545, |
|
"eval_steps_per_second": 0.076, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 4.62962962962963e-05, |
|
"loss": 3.4425, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 4.6064814814814814e-05, |
|
"loss": 3.3346, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 4.5833333333333334e-05, |
|
"loss": 3.2582, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 4.5601851851851854e-05, |
|
"loss": 3.1174, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 4.5370370370370374e-05, |
|
"loss": 2.9873, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 4.5138888888888894e-05, |
|
"loss": 3.1041, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 4.490740740740741e-05, |
|
"loss": 2.9365, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 4.467592592592593e-05, |
|
"loss": 2.9494, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 4.4444444444444447e-05, |
|
"loss": 2.8309, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.4212962962962966e-05, |
|
"loss": 2.6814, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.3981481481481486e-05, |
|
"loss": 2.6331, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 4.375e-05, |
|
"loss": 2.599, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 4.351851851851852e-05, |
|
"loss": 2.5576, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 4.328703703703704e-05, |
|
"loss": 2.4838, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 4.305555555555556e-05, |
|
"loss": 2.4532, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 4.282407407407408e-05, |
|
"loss": 2.3583, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 4.259259259259259e-05, |
|
"loss": 2.4245, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 4.236111111111111e-05, |
|
"loss": 2.3121, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 4.212962962962963e-05, |
|
"loss": 2.3254, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 4.1898148148148145e-05, |
|
"loss": 2.2571, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"eval_bleu": 34.5826, |
|
"eval_em": 0.0, |
|
"eval_gen_len": 48.98, |
|
"eval_loss": 2.197143316268921, |
|
"eval_rm": 0.8125, |
|
"eval_runtime": 79.1278, |
|
"eval_samples_per_second": 0.632, |
|
"eval_steps_per_second": 0.088, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 4.166666666666667e-05, |
|
"loss": 2.2655, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 4.1435185185185185e-05, |
|
"loss": 2.1366, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 4.1203703703703705e-05, |
|
"loss": 2.1777, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 4.0972222222222225e-05, |
|
"loss": 2.1166, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.074074074074074e-05, |
|
"loss": 2.0804, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.0509259259259265e-05, |
|
"loss": 2.1024, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 4.027777777777778e-05, |
|
"loss": 2.0787, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 4.00462962962963e-05, |
|
"loss": 1.9875, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 3.981481481481482e-05, |
|
"loss": 1.9653, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 3.958333333333333e-05, |
|
"loss": 1.9254, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 3.935185185185186e-05, |
|
"loss": 1.9232, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 3.912037037037037e-05, |
|
"loss": 1.8927, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 3.888888888888889e-05, |
|
"loss": 1.7833, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 3.865740740740741e-05, |
|
"loss": 1.6375, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 3.8425925925925924e-05, |
|
"loss": 1.7233, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 3.8194444444444444e-05, |
|
"loss": 1.6997, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 3.7962962962962964e-05, |
|
"loss": 1.746, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 3.7731481481481484e-05, |
|
"loss": 1.6235, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 1.564, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 3.726851851851852e-05, |
|
"loss": 1.5696, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"eval_bleu": 53.8822, |
|
"eval_em": 0.0, |
|
"eval_gen_len": 48.7, |
|
"eval_loss": 1.680190086364746, |
|
"eval_rm": 1.0, |
|
"eval_runtime": 52.3216, |
|
"eval_samples_per_second": 0.956, |
|
"eval_steps_per_second": 0.134, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 3.7037037037037037e-05, |
|
"loss": 1.5516, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 3.6805555555555556e-05, |
|
"loss": 1.5438, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 3.6574074074074076e-05, |
|
"loss": 1.5262, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 3.6342592592592596e-05, |
|
"loss": 1.5738, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 3.611111111111111e-05, |
|
"loss": 1.5799, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 3.587962962962963e-05, |
|
"loss": 1.5116, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 3.564814814814815e-05, |
|
"loss": 1.5699, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 3.541666666666667e-05, |
|
"loss": 1.4994, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 3.518518518518519e-05, |
|
"loss": 1.3772, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 3.49537037037037e-05, |
|
"loss": 1.4322, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 3.472222222222222e-05, |
|
"loss": 1.4604, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 3.449074074074074e-05, |
|
"loss": 1.3924, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 3.425925925925926e-05, |
|
"loss": 1.348, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 3.402777777777778e-05, |
|
"loss": 1.3014, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 3.3796296296296295e-05, |
|
"loss": 1.3731, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 3.3564814814814815e-05, |
|
"loss": 1.39, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 1.2038, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 3.3101851851851855e-05, |
|
"loss": 1.1698, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 3.2870370370370375e-05, |
|
"loss": 1.1514, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 3.263888888888889e-05, |
|
"loss": 1.1359, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"eval_bleu": 64.5591, |
|
"eval_em": 0.02, |
|
"eval_gen_len": 45.2, |
|
"eval_loss": 1.346365213394165, |
|
"eval_rm": 0.973, |
|
"eval_runtime": 47.5326, |
|
"eval_samples_per_second": 1.052, |
|
"eval_steps_per_second": 0.147, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 3.240740740740741e-05, |
|
"loss": 1.1396, |
|
"step": 10100 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 3.217592592592593e-05, |
|
"loss": 1.1749, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 3.194444444444444e-05, |
|
"loss": 1.1098, |
|
"step": 10300 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 3.171296296296297e-05, |
|
"loss": 1.0741, |
|
"step": 10400 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 3.148148148148148e-05, |
|
"loss": 1.1388, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 3.125e-05, |
|
"loss": 1.0168, |
|
"step": 10600 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 3.101851851851852e-05, |
|
"loss": 1.0862, |
|
"step": 10700 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 3.0787037037037034e-05, |
|
"loss": 1.0689, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 3.055555555555556e-05, |
|
"loss": 1.0804, |
|
"step": 10900 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 3.0324074074074077e-05, |
|
"loss": 1.0312, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 3.0092592592592593e-05, |
|
"loss": 1.0184, |
|
"step": 11100 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 2.9861111111111113e-05, |
|
"loss": 0.992, |
|
"step": 11200 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 2.962962962962963e-05, |
|
"loss": 1.0191, |
|
"step": 11300 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 2.9398148148148146e-05, |
|
"loss": 1.0842, |
|
"step": 11400 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 2.916666666666667e-05, |
|
"loss": 1.0508, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 2.8935185185185186e-05, |
|
"loss": 0.9815, |
|
"step": 11600 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 2.8703703703703706e-05, |
|
"loss": 0.9645, |
|
"step": 11700 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 2.8472222222222223e-05, |
|
"loss": 0.9826, |
|
"step": 11800 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 2.824074074074074e-05, |
|
"loss": 1.0036, |
|
"step": 11900 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 2.8009259259259263e-05, |
|
"loss": 0.9994, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"eval_bleu": 68.0869, |
|
"eval_em": 0.02, |
|
"eval_gen_len": 47.76, |
|
"eval_loss": 1.0576136112213135, |
|
"eval_rm": 0.8889, |
|
"eval_runtime": 48.2259, |
|
"eval_samples_per_second": 1.037, |
|
"eval_steps_per_second": 0.145, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"learning_rate": 2.777777777777778e-05, |
|
"loss": 0.8445, |
|
"step": 12100 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 2.75462962962963e-05, |
|
"loss": 0.833, |
|
"step": 12200 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 2.7314814814814816e-05, |
|
"loss": 0.8034, |
|
"step": 12300 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"learning_rate": 2.7083333333333332e-05, |
|
"loss": 0.8154, |
|
"step": 12400 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 2.6851851851851855e-05, |
|
"loss": 0.8026, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 2.6620370370370372e-05, |
|
"loss": 0.8147, |
|
"step": 12600 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 2.6388888888888892e-05, |
|
"loss": 0.8466, |
|
"step": 12700 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 2.615740740740741e-05, |
|
"loss": 0.7881, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 2.5925925925925925e-05, |
|
"loss": 0.7856, |
|
"step": 12900 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"learning_rate": 2.5694444444444445e-05, |
|
"loss": 0.8058, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 2.5462962962962965e-05, |
|
"loss": 0.7704, |
|
"step": 13100 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 2.5231481481481485e-05, |
|
"loss": 0.7572, |
|
"step": 13200 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.7757, |
|
"step": 13300 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 2.4768518518518518e-05, |
|
"loss": 0.7598, |
|
"step": 13400 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 2.4537037037037038e-05, |
|
"loss": 0.7268, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 2.4305555555555558e-05, |
|
"loss": 0.7455, |
|
"step": 13600 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 2.4074074074074074e-05, |
|
"loss": 0.7525, |
|
"step": 13700 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 2.3842592592592594e-05, |
|
"loss": 0.7205, |
|
"step": 13800 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"learning_rate": 2.361111111111111e-05, |
|
"loss": 0.7741, |
|
"step": 13900 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"learning_rate": 2.337962962962963e-05, |
|
"loss": 0.7275, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"eval_bleu": 74.1032, |
|
"eval_em": 0.02, |
|
"eval_gen_len": 46.52, |
|
"eval_loss": 0.952226996421814, |
|
"eval_rm": 0.9556, |
|
"eval_runtime": 45.6891, |
|
"eval_samples_per_second": 1.094, |
|
"eval_steps_per_second": 0.153, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 2.314814814814815e-05, |
|
"loss": 0.7429, |
|
"step": 14100 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 2.2916666666666667e-05, |
|
"loss": 0.7032, |
|
"step": 14200 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"learning_rate": 2.2685185185185187e-05, |
|
"loss": 0.749, |
|
"step": 14300 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"learning_rate": 2.2453703703703703e-05, |
|
"loss": 0.7122, |
|
"step": 14400 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"learning_rate": 2.2222222222222223e-05, |
|
"loss": 0.6559, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"learning_rate": 2.1990740740740743e-05, |
|
"loss": 0.5757, |
|
"step": 14600 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 2.175925925925926e-05, |
|
"loss": 0.5756, |
|
"step": 14700 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"learning_rate": 2.152777777777778e-05, |
|
"loss": 0.5777, |
|
"step": 14800 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"learning_rate": 2.1296296296296296e-05, |
|
"loss": 0.5904, |
|
"step": 14900 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 2.1064814814814816e-05, |
|
"loss": 0.5798, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"learning_rate": 2.0833333333333336e-05, |
|
"loss": 0.5939, |
|
"step": 15100 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"learning_rate": 2.0601851851851853e-05, |
|
"loss": 0.6095, |
|
"step": 15200 |
|
}, |
|
{ |
|
"epoch": 6.35, |
|
"learning_rate": 2.037037037037037e-05, |
|
"loss": 0.6163, |
|
"step": 15300 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 2.013888888888889e-05, |
|
"loss": 0.5634, |
|
"step": 15400 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"learning_rate": 1.990740740740741e-05, |
|
"loss": 0.5909, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"learning_rate": 1.967592592592593e-05, |
|
"loss": 0.5408, |
|
"step": 15600 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"learning_rate": 1.9444444444444445e-05, |
|
"loss": 0.5435, |
|
"step": 15700 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 1.9212962962962962e-05, |
|
"loss": 0.5753, |
|
"step": 15800 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 1.8981481481481482e-05, |
|
"loss": 0.5491, |
|
"step": 15900 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 1.8750000000000002e-05, |
|
"loss": 0.5868, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"eval_bleu": 71.6124, |
|
"eval_em": 0.02, |
|
"eval_gen_len": 47.52, |
|
"eval_loss": 0.9307076930999756, |
|
"eval_rm": 0.9556, |
|
"eval_runtime": 330.299, |
|
"eval_samples_per_second": 0.151, |
|
"eval_steps_per_second": 0.021, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 6.68, |
|
"learning_rate": 1.8518518518518518e-05, |
|
"loss": 0.5088, |
|
"step": 16100 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"learning_rate": 1.8287037037037038e-05, |
|
"loss": 0.6068, |
|
"step": 16200 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"learning_rate": 1.8055555555555555e-05, |
|
"loss": 0.5327, |
|
"step": 16300 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"learning_rate": 1.7824074074074075e-05, |
|
"loss": 0.5759, |
|
"step": 16400 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"learning_rate": 1.7592592592592595e-05, |
|
"loss": 0.5849, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 6.89, |
|
"learning_rate": 1.736111111111111e-05, |
|
"loss": 0.5648, |
|
"step": 16600 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 1.712962962962963e-05, |
|
"loss": 0.5632, |
|
"step": 16700 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 1.6898148148148148e-05, |
|
"loss": 0.5093, |
|
"step": 16800 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 0.5295, |
|
"step": 16900 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 1.6435185185185187e-05, |
|
"loss": 0.433, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"learning_rate": 1.6203703703703704e-05, |
|
"loss": 0.4289, |
|
"step": 17100 |
|
}, |
|
{ |
|
"epoch": 7.14, |
|
"learning_rate": 1.597222222222222e-05, |
|
"loss": 0.4619, |
|
"step": 17200 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"learning_rate": 1.574074074074074e-05, |
|
"loss": 0.458, |
|
"step": 17300 |
|
}, |
|
{ |
|
"epoch": 7.22, |
|
"learning_rate": 1.550925925925926e-05, |
|
"loss": 0.4155, |
|
"step": 17400 |
|
}, |
|
{ |
|
"epoch": 7.26, |
|
"learning_rate": 1.527777777777778e-05, |
|
"loss": 0.4391, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"learning_rate": 1.5046296296296297e-05, |
|
"loss": 0.4264, |
|
"step": 17600 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"learning_rate": 1.4814814814814815e-05, |
|
"loss": 0.418, |
|
"step": 17700 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 1.4583333333333335e-05, |
|
"loss": 0.4285, |
|
"step": 17800 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"learning_rate": 1.4351851851851853e-05, |
|
"loss": 0.4466, |
|
"step": 17900 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"learning_rate": 1.412037037037037e-05, |
|
"loss": 0.4499, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"eval_bleu": 77.237, |
|
"eval_em": 0.06, |
|
"eval_gen_len": 46.0, |
|
"eval_loss": 0.8866045475006104, |
|
"eval_rm": 0.9574, |
|
"eval_runtime": 280.5418, |
|
"eval_samples_per_second": 0.178, |
|
"eval_steps_per_second": 0.025, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 7.51, |
|
"learning_rate": 1.388888888888889e-05, |
|
"loss": 0.4415, |
|
"step": 18100 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"learning_rate": 1.3657407407407408e-05, |
|
"loss": 0.4209, |
|
"step": 18200 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"learning_rate": 1.3425925925925928e-05, |
|
"loss": 0.4357, |
|
"step": 18300 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"learning_rate": 1.3194444444444446e-05, |
|
"loss": 0.437, |
|
"step": 18400 |
|
}, |
|
{ |
|
"epoch": 7.68, |
|
"learning_rate": 1.2962962962962962e-05, |
|
"loss": 0.4319, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"learning_rate": 1.2731481481481482e-05, |
|
"loss": 0.4578, |
|
"step": 18600 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"learning_rate": 1.25e-05, |
|
"loss": 0.4244, |
|
"step": 18700 |
|
}, |
|
{ |
|
"epoch": 7.8, |
|
"learning_rate": 1.2268518518518519e-05, |
|
"loss": 0.4046, |
|
"step": 18800 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"learning_rate": 1.2037037037037037e-05, |
|
"loss": 0.411, |
|
"step": 18900 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 1.1805555555555555e-05, |
|
"loss": 0.4219, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 7.93, |
|
"learning_rate": 1.1574074074074075e-05, |
|
"loss": 0.3956, |
|
"step": 19100 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 1.1342592592592593e-05, |
|
"loss": 0.4333, |
|
"step": 19200 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"learning_rate": 1.1111111111111112e-05, |
|
"loss": 0.4141, |
|
"step": 19300 |
|
}, |
|
{ |
|
"epoch": 8.05, |
|
"learning_rate": 1.087962962962963e-05, |
|
"loss": 0.3199, |
|
"step": 19400 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"learning_rate": 1.0648148148148148e-05, |
|
"loss": 0.3236, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 8.13, |
|
"learning_rate": 1.0416666666666668e-05, |
|
"loss": 0.3405, |
|
"step": 19600 |
|
}, |
|
{ |
|
"epoch": 8.17, |
|
"learning_rate": 1.0185185185185185e-05, |
|
"loss": 0.3411, |
|
"step": 19700 |
|
}, |
|
{ |
|
"epoch": 8.22, |
|
"learning_rate": 9.953703703703704e-06, |
|
"loss": 0.3166, |
|
"step": 19800 |
|
}, |
|
{ |
|
"epoch": 8.26, |
|
"learning_rate": 9.722222222222223e-06, |
|
"loss": 0.3463, |
|
"step": 19900 |
|
}, |
|
{ |
|
"epoch": 8.3, |
|
"learning_rate": 9.490740740740741e-06, |
|
"loss": 0.3515, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 8.3, |
|
"eval_bleu": 77.5798, |
|
"eval_em": 0.08, |
|
"eval_gen_len": 47.5, |
|
"eval_loss": 0.9069581627845764, |
|
"eval_rm": 0.9574, |
|
"eval_runtime": 312.8421, |
|
"eval_samples_per_second": 0.16, |
|
"eval_steps_per_second": 0.022, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 8.34, |
|
"learning_rate": 9.259259259259259e-06, |
|
"loss": 0.3363, |
|
"step": 20100 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"learning_rate": 9.027777777777777e-06, |
|
"loss": 0.3477, |
|
"step": 20200 |
|
}, |
|
{ |
|
"epoch": 8.42, |
|
"learning_rate": 8.796296296296297e-06, |
|
"loss": 0.3454, |
|
"step": 20300 |
|
}, |
|
{ |
|
"epoch": 8.46, |
|
"learning_rate": 8.564814814814816e-06, |
|
"loss": 0.3267, |
|
"step": 20400 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 0.3406, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 8.55, |
|
"learning_rate": 8.101851851851852e-06, |
|
"loss": 0.3189, |
|
"step": 20600 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"learning_rate": 7.87037037037037e-06, |
|
"loss": 0.3355, |
|
"step": 20700 |
|
}, |
|
{ |
|
"epoch": 8.63, |
|
"learning_rate": 7.63888888888889e-06, |
|
"loss": 0.3315, |
|
"step": 20800 |
|
}, |
|
{ |
|
"epoch": 8.67, |
|
"learning_rate": 7.4074074074074075e-06, |
|
"loss": 0.345, |
|
"step": 20900 |
|
}, |
|
{ |
|
"epoch": 8.71, |
|
"learning_rate": 7.1759259259259266e-06, |
|
"loss": 0.3332, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 8.76, |
|
"learning_rate": 6.944444444444445e-06, |
|
"loss": 0.3491, |
|
"step": 21100 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"learning_rate": 6.712962962962964e-06, |
|
"loss": 0.3268, |
|
"step": 21200 |
|
}, |
|
{ |
|
"epoch": 8.84, |
|
"learning_rate": 6.481481481481481e-06, |
|
"loss": 0.3328, |
|
"step": 21300 |
|
}, |
|
{ |
|
"epoch": 8.88, |
|
"learning_rate": 6.25e-06, |
|
"loss": 0.3151, |
|
"step": 21400 |
|
}, |
|
{ |
|
"epoch": 8.92, |
|
"learning_rate": 6.0185185185185185e-06, |
|
"loss": 0.3529, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 8.96, |
|
"learning_rate": 5.787037037037038e-06, |
|
"loss": 0.3087, |
|
"step": 21600 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"learning_rate": 5.555555555555556e-06, |
|
"loss": 0.3228, |
|
"step": 21700 |
|
}, |
|
{ |
|
"epoch": 9.05, |
|
"learning_rate": 5.324074074074074e-06, |
|
"loss": 0.2613, |
|
"step": 21800 |
|
}, |
|
{ |
|
"epoch": 9.09, |
|
"learning_rate": 5.092592592592592e-06, |
|
"loss": 0.2721, |
|
"step": 21900 |
|
}, |
|
{ |
|
"epoch": 9.13, |
|
"learning_rate": 4.861111111111111e-06, |
|
"loss": 0.292, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 9.13, |
|
"eval_bleu": 78.649, |
|
"eval_em": 0.06, |
|
"eval_gen_len": 47.96, |
|
"eval_loss": 0.8904868364334106, |
|
"eval_rm": 0.9574, |
|
"eval_runtime": 309.7604, |
|
"eval_samples_per_second": 0.161, |
|
"eval_steps_per_second": 0.023, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 9.17, |
|
"learning_rate": 4.6296296296296296e-06, |
|
"loss": 0.2614, |
|
"step": 22100 |
|
}, |
|
{ |
|
"epoch": 9.21, |
|
"learning_rate": 4.398148148148149e-06, |
|
"loss": 0.2522, |
|
"step": 22200 |
|
}, |
|
{ |
|
"epoch": 9.25, |
|
"learning_rate": 4.166666666666667e-06, |
|
"loss": 0.289, |
|
"step": 22300 |
|
}, |
|
{ |
|
"epoch": 9.29, |
|
"learning_rate": 3.935185185185185e-06, |
|
"loss": 0.2875, |
|
"step": 22400 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"learning_rate": 3.7037037037037037e-06, |
|
"loss": 0.2451, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 9.38, |
|
"learning_rate": 3.4722222222222224e-06, |
|
"loss": 0.2855, |
|
"step": 22600 |
|
}, |
|
{ |
|
"epoch": 9.42, |
|
"learning_rate": 3.2407407407407406e-06, |
|
"loss": 0.2632, |
|
"step": 22700 |
|
}, |
|
{ |
|
"epoch": 9.46, |
|
"learning_rate": 3.0092592592592593e-06, |
|
"loss": 0.2726, |
|
"step": 22800 |
|
}, |
|
{ |
|
"epoch": 9.5, |
|
"learning_rate": 2.777777777777778e-06, |
|
"loss": 0.2768, |
|
"step": 22900 |
|
}, |
|
{ |
|
"epoch": 9.54, |
|
"learning_rate": 2.546296296296296e-06, |
|
"loss": 0.2729, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"learning_rate": 2.3148148148148148e-06, |
|
"loss": 0.2809, |
|
"step": 23100 |
|
}, |
|
{ |
|
"epoch": 9.63, |
|
"learning_rate": 2.0833333333333334e-06, |
|
"loss": 0.2714, |
|
"step": 23200 |
|
}, |
|
{ |
|
"epoch": 9.67, |
|
"learning_rate": 1.8518518518518519e-06, |
|
"loss": 0.2693, |
|
"step": 23300 |
|
}, |
|
{ |
|
"epoch": 9.71, |
|
"learning_rate": 1.6203703703703703e-06, |
|
"loss": 0.2639, |
|
"step": 23400 |
|
}, |
|
{ |
|
"epoch": 9.75, |
|
"learning_rate": 1.388888888888889e-06, |
|
"loss": 0.2667, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 9.79, |
|
"learning_rate": 1.1574074074074074e-06, |
|
"loss": 0.2565, |
|
"step": 23600 |
|
}, |
|
{ |
|
"epoch": 9.83, |
|
"learning_rate": 9.259259259259259e-07, |
|
"loss": 0.2855, |
|
"step": 23700 |
|
}, |
|
{ |
|
"epoch": 9.88, |
|
"learning_rate": 6.944444444444445e-07, |
|
"loss": 0.2615, |
|
"step": 23800 |
|
}, |
|
{ |
|
"epoch": 9.92, |
|
"learning_rate": 4.6296296296296297e-07, |
|
"loss": 0.2709, |
|
"step": 23900 |
|
}, |
|
{ |
|
"epoch": 9.96, |
|
"learning_rate": 2.3148148148148148e-07, |
|
"loss": 0.2658, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 9.96, |
|
"eval_bleu": 79.5906, |
|
"eval_em": 0.08, |
|
"eval_gen_len": 48.48, |
|
"eval_loss": 0.9002240896224976, |
|
"eval_rm": 0.9565, |
|
"eval_runtime": 351.0056, |
|
"eval_samples_per_second": 0.142, |
|
"eval_steps_per_second": 0.02, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.2602, |
|
"step": 24100 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"step": 24100, |
|
"total_flos": 7011428612298048.0, |
|
"train_loss": 0.1476078914309933, |
|
"train_runtime": 5095.148, |
|
"train_samples_per_second": 37.832, |
|
"train_steps_per_second": 4.73 |
|
} |
|
], |
|
"max_steps": 24100, |
|
"num_train_epochs": 10, |
|
"total_flos": 7011428612298048.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|