|
{ |
|
"best_metric": 11.4438961596224, |
|
"best_model_checkpoint": "./output/small/yt-special-batch8-base/checkpoint-5000", |
|
"epoch": 7.9239302694136295, |
|
"global_step": 5000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.4e-07, |
|
"loss": 1.7491, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.400000000000001e-07, |
|
"loss": 1.4857, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.44e-06, |
|
"loss": 1.1541, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.94e-06, |
|
"loss": 1.0995, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.4400000000000004e-06, |
|
"loss": 2.2131, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.9400000000000002e-06, |
|
"loss": 3.4199, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.4200000000000007e-06, |
|
"loss": 4.7849, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.920000000000001e-06, |
|
"loss": 9.5528, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.42e-06, |
|
"loss": 12.497, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.9000000000000005e-06, |
|
"loss": 12.4801, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.380000000000001e-06, |
|
"loss": 14.1177, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.86e-06, |
|
"loss": 15.3981, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.360000000000001e-06, |
|
"loss": 18.7705, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.860000000000001e-06, |
|
"loss": 23.5459, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.360000000000001e-06, |
|
"loss": 31.1971, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 7.860000000000001e-06, |
|
"loss": 33.355, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 8.36e-06, |
|
"loss": 45.9219, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.860000000000002e-06, |
|
"loss": 46.7898, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 9.360000000000002e-06, |
|
"loss": 63.6532, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 9.86e-06, |
|
"loss": 59.3715, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.960000000000001e-06, |
|
"loss": 61.0177, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.904444444444445e-06, |
|
"loss": 67.4881, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 9.851111111111112e-06, |
|
"loss": 68.3865, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 9.795555555555556e-06, |
|
"loss": 79.6374, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 9.74e-06, |
|
"loss": 64.3938, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.684444444444446e-06, |
|
"loss": 70.0958, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 9.62888888888889e-06, |
|
"loss": 69.7785, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 9.584444444444445e-06, |
|
"loss": 64.8585, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 9.535555555555557e-06, |
|
"loss": 61.1897, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 9.482222222222223e-06, |
|
"loss": 64.2252, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 9.426666666666667e-06, |
|
"loss": 61.7694, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 9.371111111111111e-06, |
|
"loss": 60.8694, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 9.315555555555557e-06, |
|
"loss": 59.7583, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 9.260000000000001e-06, |
|
"loss": 55.3815, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 9.204444444444445e-06, |
|
"loss": 51.8172, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 9.14888888888889e-06, |
|
"loss": 48.4673, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 9.093333333333333e-06, |
|
"loss": 45.7032, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 9.037777777777779e-06, |
|
"loss": 54.2089, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 8.982222222222223e-06, |
|
"loss": 47.5647, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 8.926666666666669e-06, |
|
"loss": 41.113, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"eval_loss": 42.97588348388672, |
|
"eval_runtime": 1037.3254, |
|
"eval_samples_per_second": 4.862, |
|
"eval_steps_per_second": 1.216, |
|
"eval_wer": 107.56275477365371, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 8.871111111111111e-06, |
|
"loss": 47.9835, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.815555555555557e-06, |
|
"loss": 42.2165, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.76e-06, |
|
"loss": 43.1054, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.704444444444445e-06, |
|
"loss": 40.2444, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 8.64888888888889e-06, |
|
"loss": 38.3884, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 8.593333333333333e-06, |
|
"loss": 41.0448, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 8.537777777777779e-06, |
|
"loss": 37.6895, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 8.482222222222223e-06, |
|
"loss": 39.4216, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 8.426666666666667e-06, |
|
"loss": 43.0472, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 8.371111111111112e-06, |
|
"loss": 37.6302, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 8.315555555555557e-06, |
|
"loss": 33.0809, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 8.26e-06, |
|
"loss": 38.3013, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 8.204444444444445e-06, |
|
"loss": 32.9067, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 8.14888888888889e-06, |
|
"loss": 34.4728, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 8.093333333333334e-06, |
|
"loss": 34.53, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 8.037777777777778e-06, |
|
"loss": 33.0867, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 7.982222222222224e-06, |
|
"loss": 30.1566, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 7.926666666666666e-06, |
|
"loss": 32.9418, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 7.871111111111112e-06, |
|
"loss": 31.0219, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 7.815555555555556e-06, |
|
"loss": 29.2539, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 7.76e-06, |
|
"loss": 30.4172, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 7.704444444444446e-06, |
|
"loss": 30.1107, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 7.648888888888888e-06, |
|
"loss": 30.2495, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.593333333333334e-06, |
|
"loss": 28.5465, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 7.537777777777778e-06, |
|
"loss": 29.2619, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 7.482222222222223e-06, |
|
"loss": 29.9081, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 7.426666666666668e-06, |
|
"loss": 27.4152, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 7.371111111111112e-06, |
|
"loss": 31.0304, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 7.315555555555556e-06, |
|
"loss": 28.8473, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 7.260000000000001e-06, |
|
"loss": 24.3152, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 7.204444444444445e-06, |
|
"loss": 30.2247, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 7.14888888888889e-06, |
|
"loss": 26.9633, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 7.093333333333335e-06, |
|
"loss": 24.6396, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 7.037777777777778e-06, |
|
"loss": 30.4137, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 6.982222222222223e-06, |
|
"loss": 26.6009, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 6.926666666666667e-06, |
|
"loss": 24.5352, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 6.871111111111112e-06, |
|
"loss": 23.6272, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 6.8155555555555565e-06, |
|
"loss": 21.0893, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 6.760000000000001e-06, |
|
"loss": 21.3604, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 6.7044444444444445e-06, |
|
"loss": 17.3442, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"eval_loss": 18.70366668701172, |
|
"eval_runtime": 1280.3674, |
|
"eval_samples_per_second": 3.939, |
|
"eval_steps_per_second": 0.985, |
|
"eval_wer": 144.10641493241795, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 6.648888888888889e-06, |
|
"loss": 21.8499, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 6.5933333333333335e-06, |
|
"loss": 20.0117, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 6.537777777777778e-06, |
|
"loss": 21.6871, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 6.482222222222223e-06, |
|
"loss": 18.9966, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 6.426666666666668e-06, |
|
"loss": 17.4701, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 6.371111111111111e-06, |
|
"loss": 22.0492, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 6.317777777777778e-06, |
|
"loss": 16.6105, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 6.262222222222223e-06, |
|
"loss": 17.567, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 6.211111111111111e-06, |
|
"loss": 17.675, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 6.155555555555556e-06, |
|
"loss": 20.1354, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 6.1e-06, |
|
"loss": 18.9197, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 6.044444444444445e-06, |
|
"loss": 17.748, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 5.98888888888889e-06, |
|
"loss": 19.3784, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 5.933333333333335e-06, |
|
"loss": 18.7225, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 5.877777777777778e-06, |
|
"loss": 18.112, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 5.822222222222223e-06, |
|
"loss": 16.86, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 5.766666666666667e-06, |
|
"loss": 20.3351, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 5.711111111111112e-06, |
|
"loss": 17.9965, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 5.6555555555555566e-06, |
|
"loss": 19.2315, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 16.2915, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 5.544444444444445e-06, |
|
"loss": 15.6132, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 5.4888888888888895e-06, |
|
"loss": 12.7693, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 5.4333333333333335e-06, |
|
"loss": 12.2693, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 5.3777777777777784e-06, |
|
"loss": 10.6931, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 5.322222222222223e-06, |
|
"loss": 12.7438, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 5.2666666666666665e-06, |
|
"loss": 11.1154, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 5.211111111111111e-06, |
|
"loss": 12.2588, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 5.155555555555556e-06, |
|
"loss": 11.8185, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 5.1e-06, |
|
"loss": 11.9872, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 5.044444444444445e-06, |
|
"loss": 10.9318, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 4.988888888888889e-06, |
|
"loss": 10.5515, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 4.933333333333334e-06, |
|
"loss": 10.555, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 4.880000000000001e-06, |
|
"loss": 12.2332, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 4.824444444444445e-06, |
|
"loss": 11.013, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 4.768888888888889e-06, |
|
"loss": 10.5057, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 4.713333333333334e-06, |
|
"loss": 11.5147, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 4.6577777777777785e-06, |
|
"loss": 10.1689, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 4.602222222222223e-06, |
|
"loss": 10.7969, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 4.546666666666667e-06, |
|
"loss": 10.6932, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 4.4911111111111115e-06, |
|
"loss": 10.8061, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"eval_loss": 7.153076648712158, |
|
"eval_runtime": 931.4057, |
|
"eval_samples_per_second": 5.414, |
|
"eval_steps_per_second": 1.354, |
|
"eval_wer": 52.55095473074447, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 4.4355555555555555e-06, |
|
"loss": 10.9146, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 4.38e-06, |
|
"loss": 9.9721, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 4.324444444444445e-06, |
|
"loss": 11.8253, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 4.268888888888889e-06, |
|
"loss": 9.9704, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 4.213333333333333e-06, |
|
"loss": 10.1412, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 4.157777777777778e-06, |
|
"loss": 9.0687, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 4.102222222222222e-06, |
|
"loss": 6.6716, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"learning_rate": 4.046666666666667e-06, |
|
"loss": 7.9673, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"learning_rate": 3.991111111111112e-06, |
|
"loss": 5.9019, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"learning_rate": 3.935555555555556e-06, |
|
"loss": 6.3131, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 3.88e-06, |
|
"loss": 6.243, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 3.824444444444444e-06, |
|
"loss": 6.7379, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 3.768888888888889e-06, |
|
"loss": 6.1734, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 3.713333333333334e-06, |
|
"loss": 5.9483, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 3.657777777777778e-06, |
|
"loss": 5.7135, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"learning_rate": 3.6022222222222224e-06, |
|
"loss": 6.0274, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 3.5466666666666673e-06, |
|
"loss": 5.4355, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 3.4911111111111113e-06, |
|
"loss": 6.2385, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 3.435555555555556e-06, |
|
"loss": 5.4162, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 3.3800000000000007e-06, |
|
"loss": 5.6809, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 3.3244444444444447e-06, |
|
"loss": 6.5519, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 3.268888888888889e-06, |
|
"loss": 6.0984, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"learning_rate": 3.213333333333334e-06, |
|
"loss": 5.8196, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"learning_rate": 3.157777777777778e-06, |
|
"loss": 6.6902, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 3.1022222222222225e-06, |
|
"loss": 5.5926, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 3.0466666666666666e-06, |
|
"loss": 5.16, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 2.9911111111111115e-06, |
|
"loss": 5.036, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"learning_rate": 2.935555555555556e-06, |
|
"loss": 5.5856, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 2.88e-06, |
|
"loss": 4.9832, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 2.824444444444445e-06, |
|
"loss": 5.1377, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"learning_rate": 2.7688888888888893e-06, |
|
"loss": 4.9745, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"learning_rate": 2.7133333333333333e-06, |
|
"loss": 4.0952, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"learning_rate": 2.6577777777777782e-06, |
|
"loss": 2.9451, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 2.6022222222222227e-06, |
|
"loss": 2.7328, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"learning_rate": 2.5466666666666667e-06, |
|
"loss": 3.0824, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"learning_rate": 2.491111111111111e-06, |
|
"loss": 2.7726, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 2.4355555555555556e-06, |
|
"loss": 3.2206, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"learning_rate": 2.38e-06, |
|
"loss": 3.4037, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"learning_rate": 2.3244444444444445e-06, |
|
"loss": 3.4738, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 2.268888888888889e-06, |
|
"loss": 3.3269, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"eval_loss": 3.1034955978393555, |
|
"eval_runtime": 930.155, |
|
"eval_samples_per_second": 5.422, |
|
"eval_steps_per_second": 1.356, |
|
"eval_wer": 47.058571122076806, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 2.2133333333333335e-06, |
|
"loss": 3.2691, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"learning_rate": 2.157777777777778e-06, |
|
"loss": 3.1905, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"learning_rate": 2.1022222222222224e-06, |
|
"loss": 3.0545, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 2.046666666666667e-06, |
|
"loss": 2.8343, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"learning_rate": 1.9911111111111113e-06, |
|
"loss": 3.0692, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 1.9355555555555558e-06, |
|
"loss": 2.4305, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"learning_rate": 1.8800000000000002e-06, |
|
"loss": 2.9358, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 1.8244444444444445e-06, |
|
"loss": 3.1364, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 1.7688888888888891e-06, |
|
"loss": 2.2167, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 6.74, |
|
"learning_rate": 1.7133333333333336e-06, |
|
"loss": 2.2972, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"learning_rate": 1.6577777777777778e-06, |
|
"loss": 2.4747, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 1.6022222222222223e-06, |
|
"loss": 1.9043, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"learning_rate": 1.546666666666667e-06, |
|
"loss": 2.297, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 6.89, |
|
"learning_rate": 1.4911111111111112e-06, |
|
"loss": 2.6366, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 1.4355555555555557e-06, |
|
"loss": 2.4637, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 1.3800000000000001e-06, |
|
"loss": 3.0088, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"learning_rate": 1.3244444444444446e-06, |
|
"loss": 1.8821, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 1.268888888888889e-06, |
|
"loss": 1.2078, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"learning_rate": 1.2133333333333335e-06, |
|
"loss": 1.1169, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"learning_rate": 1.1577777777777778e-06, |
|
"loss": 1.0951, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 7.17, |
|
"learning_rate": 1.1022222222222222e-06, |
|
"loss": 1.2286, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 1.0466666666666669e-06, |
|
"loss": 1.3975, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 9.911111111111111e-07, |
|
"loss": 1.5158, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"learning_rate": 9.355555555555557e-07, |
|
"loss": 1.1447, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"learning_rate": 8.8e-07, |
|
"loss": 0.9339, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"learning_rate": 8.244444444444445e-07, |
|
"loss": 0.9653, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 7.41, |
|
"learning_rate": 7.688888888888891e-07, |
|
"loss": 0.9988, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"learning_rate": 7.133333333333334e-07, |
|
"loss": 0.935, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 7.49, |
|
"learning_rate": 6.577777777777779e-07, |
|
"loss": 1.0832, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 7.53, |
|
"learning_rate": 6.022222222222223e-07, |
|
"loss": 1.079, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"learning_rate": 5.466666666666667e-07, |
|
"loss": 0.8524, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"learning_rate": 4.911111111111112e-07, |
|
"loss": 0.8658, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"learning_rate": 4.355555555555556e-07, |
|
"loss": 0.8488, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"learning_rate": 3.8e-07, |
|
"loss": 0.8372, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 7.73, |
|
"learning_rate": 3.2444444444444447e-07, |
|
"loss": 0.7349, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 7.77, |
|
"learning_rate": 2.6888888888888893e-07, |
|
"loss": 0.9287, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 2.1333333333333334e-07, |
|
"loss": 0.6612, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"learning_rate": 1.577777777777778e-07, |
|
"loss": 0.7809, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 1.0222222222222224e-07, |
|
"loss": 0.794, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"learning_rate": 4.6666666666666674e-08, |
|
"loss": 0.7405, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"eval_loss": 0.41551142930984497, |
|
"eval_runtime": 890.3201, |
|
"eval_samples_per_second": 5.664, |
|
"eval_steps_per_second": 1.416, |
|
"eval_wer": 11.4438961596224, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"step": 5000, |
|
"total_flos": 2.5921297465344e+18, |
|
"train_loss": 18.966414898300172, |
|
"train_runtime": 10244.3749, |
|
"train_samples_per_second": 3.905, |
|
"train_steps_per_second": 0.488 |
|
} |
|
], |
|
"max_steps": 5000, |
|
"num_train_epochs": 8, |
|
"total_flos": 2.5921297465344e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|