roberta-large-wechsel-ukrainian / trainer_state.json
benjamin's picture
initial commit
da6967f
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 18.293569925912376,
"global_step": 250000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.04,
"learning_rate": 2.0000000000000003e-06,
"loss": 8.2868,
"step": 500
},
{
"epoch": 0.07,
"learning_rate": 4.000000000000001e-06,
"loss": 7.0346,
"step": 1000
},
{
"epoch": 0.11,
"learning_rate": 6e-06,
"loss": 6.3237,
"step": 1500
},
{
"epoch": 0.15,
"learning_rate": 8.000000000000001e-06,
"loss": 5.9019,
"step": 2000
},
{
"epoch": 0.18,
"learning_rate": 1e-05,
"loss": 5.5596,
"step": 2500
},
{
"epoch": 0.22,
"learning_rate": 1.2e-05,
"loss": 5.2412,
"step": 3000
},
{
"epoch": 0.26,
"learning_rate": 1.4000000000000001e-05,
"loss": 4.9692,
"step": 3500
},
{
"epoch": 0.29,
"learning_rate": 1.6000000000000003e-05,
"loss": 4.7473,
"step": 4000
},
{
"epoch": 0.33,
"learning_rate": 1.8e-05,
"loss": 4.5555,
"step": 4500
},
{
"epoch": 0.37,
"learning_rate": 2e-05,
"loss": 4.3848,
"step": 5000
},
{
"epoch": 0.4,
"learning_rate": 2.2000000000000003e-05,
"loss": 4.2289,
"step": 5500
},
{
"epoch": 0.44,
"learning_rate": 2.4e-05,
"loss": 4.0925,
"step": 6000
},
{
"epoch": 0.48,
"learning_rate": 2.6000000000000002e-05,
"loss": 3.963,
"step": 6500
},
{
"epoch": 0.51,
"learning_rate": 2.8000000000000003e-05,
"loss": 3.84,
"step": 7000
},
{
"epoch": 0.55,
"learning_rate": 3e-05,
"loss": 3.7383,
"step": 7500
},
{
"epoch": 0.59,
"learning_rate": 3.2000000000000005e-05,
"loss": 3.6421,
"step": 8000
},
{
"epoch": 0.62,
"learning_rate": 3.4000000000000007e-05,
"loss": 3.5555,
"step": 8500
},
{
"epoch": 0.66,
"learning_rate": 3.6e-05,
"loss": 3.4731,
"step": 9000
},
{
"epoch": 0.7,
"learning_rate": 3.8e-05,
"loss": 3.4076,
"step": 9500
},
{
"epoch": 0.73,
"learning_rate": 4e-05,
"loss": 3.3376,
"step": 10000
},
{
"epoch": 0.77,
"learning_rate": 4.2e-05,
"loss": 3.2779,
"step": 10500
},
{
"epoch": 0.8,
"learning_rate": 4.4000000000000006e-05,
"loss": 3.2225,
"step": 11000
},
{
"epoch": 0.84,
"learning_rate": 4.600000000000001e-05,
"loss": 3.1723,
"step": 11500
},
{
"epoch": 0.88,
"learning_rate": 4.8e-05,
"loss": 3.1241,
"step": 12000
},
{
"epoch": 0.91,
"learning_rate": 5e-05,
"loss": 3.0853,
"step": 12500
},
{
"epoch": 0.95,
"learning_rate": 5.2000000000000004e-05,
"loss": 3.0444,
"step": 13000
},
{
"epoch": 0.99,
"learning_rate": 5.4000000000000005e-05,
"loss": 3.0089,
"step": 13500
},
{
"epoch": 1.02,
"learning_rate": 5.6000000000000006e-05,
"loss": 2.9769,
"step": 14000
},
{
"epoch": 1.06,
"learning_rate": 5.8e-05,
"loss": 2.9401,
"step": 14500
},
{
"epoch": 1.1,
"learning_rate": 6e-05,
"loss": 2.9119,
"step": 15000
},
{
"epoch": 1.13,
"learning_rate": 6.2e-05,
"loss": 2.886,
"step": 15500
},
{
"epoch": 1.17,
"learning_rate": 6.400000000000001e-05,
"loss": 2.8613,
"step": 16000
},
{
"epoch": 1.21,
"learning_rate": 6.6e-05,
"loss": 2.8364,
"step": 16500
},
{
"epoch": 1.24,
"learning_rate": 6.800000000000001e-05,
"loss": 2.8127,
"step": 17000
},
{
"epoch": 1.28,
"learning_rate": 7e-05,
"loss": 2.793,
"step": 17500
},
{
"epoch": 1.32,
"learning_rate": 7.2e-05,
"loss": 2.7703,
"step": 18000
},
{
"epoch": 1.35,
"learning_rate": 7.4e-05,
"loss": 2.7524,
"step": 18500
},
{
"epoch": 1.39,
"learning_rate": 7.6e-05,
"loss": 2.7307,
"step": 19000
},
{
"epoch": 1.43,
"learning_rate": 7.800000000000001e-05,
"loss": 2.7182,
"step": 19500
},
{
"epoch": 1.46,
"learning_rate": 8e-05,
"loss": 2.7033,
"step": 20000
},
{
"epoch": 1.5,
"learning_rate": 8.2e-05,
"loss": 2.6856,
"step": 20500
},
{
"epoch": 1.54,
"learning_rate": 8.4e-05,
"loss": 2.6691,
"step": 21000
},
{
"epoch": 1.57,
"learning_rate": 8.6e-05,
"loss": 2.6539,
"step": 21500
},
{
"epoch": 1.61,
"learning_rate": 8.800000000000001e-05,
"loss": 2.6432,
"step": 22000
},
{
"epoch": 1.65,
"learning_rate": 9e-05,
"loss": 2.6266,
"step": 22500
},
{
"epoch": 1.68,
"learning_rate": 9.200000000000001e-05,
"loss": 2.6167,
"step": 23000
},
{
"epoch": 1.72,
"learning_rate": 9.4e-05,
"loss": 2.6054,
"step": 23500
},
{
"epoch": 1.76,
"learning_rate": 9.6e-05,
"loss": 2.5957,
"step": 24000
},
{
"epoch": 1.79,
"learning_rate": 9.8e-05,
"loss": 2.585,
"step": 24500
},
{
"epoch": 1.83,
"learning_rate": 0.0001,
"loss": 2.5727,
"step": 25000
},
{
"epoch": 1.87,
"learning_rate": 9.977777777777779e-05,
"loss": 2.5645,
"step": 25500
},
{
"epoch": 1.9,
"learning_rate": 9.955555555555556e-05,
"loss": 2.5532,
"step": 26000
},
{
"epoch": 1.94,
"learning_rate": 9.933333333333334e-05,
"loss": 2.5404,
"step": 26500
},
{
"epoch": 1.98,
"learning_rate": 9.911111111111112e-05,
"loss": 2.533,
"step": 27000
},
{
"epoch": 2.01,
"learning_rate": 9.888888888888889e-05,
"loss": 2.5224,
"step": 27500
},
{
"epoch": 2.05,
"learning_rate": 9.866666666666668e-05,
"loss": 2.5087,
"step": 28000
},
{
"epoch": 2.09,
"learning_rate": 9.844444444444444e-05,
"loss": 2.5015,
"step": 28500
},
{
"epoch": 2.12,
"learning_rate": 9.822222222222223e-05,
"loss": 2.4935,
"step": 29000
},
{
"epoch": 2.16,
"learning_rate": 9.8e-05,
"loss": 2.483,
"step": 29500
},
{
"epoch": 2.2,
"learning_rate": 9.777777777777778e-05,
"loss": 2.477,
"step": 30000
},
{
"epoch": 2.23,
"learning_rate": 9.755555555555555e-05,
"loss": 2.4694,
"step": 30500
},
{
"epoch": 2.27,
"learning_rate": 9.733333333333335e-05,
"loss": 2.4643,
"step": 31000
},
{
"epoch": 2.3,
"learning_rate": 9.711111111111111e-05,
"loss": 2.4559,
"step": 31500
},
{
"epoch": 2.34,
"learning_rate": 9.68888888888889e-05,
"loss": 2.4481,
"step": 32000
},
{
"epoch": 2.38,
"learning_rate": 9.666666666666667e-05,
"loss": 2.4412,
"step": 32500
},
{
"epoch": 2.41,
"learning_rate": 9.644444444444445e-05,
"loss": 2.4328,
"step": 33000
},
{
"epoch": 2.45,
"learning_rate": 9.622222222222222e-05,
"loss": 2.43,
"step": 33500
},
{
"epoch": 2.49,
"learning_rate": 9.6e-05,
"loss": 2.4229,
"step": 34000
},
{
"epoch": 2.52,
"learning_rate": 9.577777777777777e-05,
"loss": 2.4186,
"step": 34500
},
{
"epoch": 2.56,
"learning_rate": 9.555555555555557e-05,
"loss": 2.408,
"step": 35000
},
{
"epoch": 2.6,
"learning_rate": 9.533333333333334e-05,
"loss": 2.4097,
"step": 35500
},
{
"epoch": 2.63,
"learning_rate": 9.511111111111112e-05,
"loss": 2.4018,
"step": 36000
},
{
"epoch": 2.67,
"learning_rate": 9.488888888888889e-05,
"loss": 2.3952,
"step": 36500
},
{
"epoch": 2.71,
"learning_rate": 9.466666666666667e-05,
"loss": 2.3904,
"step": 37000
},
{
"epoch": 2.74,
"learning_rate": 9.444444444444444e-05,
"loss": 2.389,
"step": 37500
},
{
"epoch": 2.78,
"learning_rate": 9.422222222222223e-05,
"loss": 2.384,
"step": 38000
},
{
"epoch": 2.82,
"learning_rate": 9.4e-05,
"loss": 2.3774,
"step": 38500
},
{
"epoch": 2.85,
"learning_rate": 9.377777777777779e-05,
"loss": 2.3734,
"step": 39000
},
{
"epoch": 2.89,
"learning_rate": 9.355555555555556e-05,
"loss": 2.3707,
"step": 39500
},
{
"epoch": 2.93,
"learning_rate": 9.333333333333334e-05,
"loss": 2.3663,
"step": 40000
},
{
"epoch": 2.96,
"learning_rate": 9.311111111111111e-05,
"loss": 2.3637,
"step": 40500
},
{
"epoch": 3.0,
"learning_rate": 9.28888888888889e-05,
"loss": 2.3629,
"step": 41000
},
{
"epoch": 3.04,
"learning_rate": 9.266666666666666e-05,
"loss": 2.3533,
"step": 41500
},
{
"epoch": 3.07,
"learning_rate": 9.244444444444445e-05,
"loss": 2.3457,
"step": 42000
},
{
"epoch": 3.11,
"learning_rate": 9.222222222222223e-05,
"loss": 2.3458,
"step": 42500
},
{
"epoch": 3.15,
"learning_rate": 9.200000000000001e-05,
"loss": 2.3399,
"step": 43000
},
{
"epoch": 3.18,
"learning_rate": 9.177777777777778e-05,
"loss": 2.338,
"step": 43500
},
{
"epoch": 3.22,
"learning_rate": 9.155555555555557e-05,
"loss": 2.3369,
"step": 44000
},
{
"epoch": 3.26,
"learning_rate": 9.133333333333334e-05,
"loss": 2.3334,
"step": 44500
},
{
"epoch": 3.29,
"learning_rate": 9.111111111111112e-05,
"loss": 2.3283,
"step": 45000
},
{
"epoch": 3.33,
"learning_rate": 9.088888888888889e-05,
"loss": 2.3248,
"step": 45500
},
{
"epoch": 3.37,
"learning_rate": 9.066666666666667e-05,
"loss": 2.3227,
"step": 46000
},
{
"epoch": 3.4,
"learning_rate": 9.044444444444445e-05,
"loss": 2.3173,
"step": 46500
},
{
"epoch": 3.44,
"learning_rate": 9.022222222222224e-05,
"loss": 2.3136,
"step": 47000
},
{
"epoch": 3.48,
"learning_rate": 9e-05,
"loss": 2.3086,
"step": 47500
},
{
"epoch": 3.51,
"learning_rate": 8.977777777777779e-05,
"loss": 2.3104,
"step": 48000
},
{
"epoch": 3.55,
"learning_rate": 8.955555555555556e-05,
"loss": 2.3057,
"step": 48500
},
{
"epoch": 3.59,
"learning_rate": 8.933333333333334e-05,
"loss": 2.3022,
"step": 49000
},
{
"epoch": 3.62,
"learning_rate": 8.911111111111111e-05,
"loss": 2.3034,
"step": 49500
},
{
"epoch": 3.66,
"learning_rate": 8.888888888888889e-05,
"loss": 2.3004,
"step": 50000
},
{
"epoch": 3.7,
"learning_rate": 8.866666666666668e-05,
"loss": 2.2976,
"step": 50500
},
{
"epoch": 3.73,
"learning_rate": 8.844444444444445e-05,
"loss": 2.2931,
"step": 51000
},
{
"epoch": 3.77,
"learning_rate": 8.822222222222223e-05,
"loss": 2.2914,
"step": 51500
},
{
"epoch": 3.81,
"learning_rate": 8.800000000000001e-05,
"loss": 2.2904,
"step": 52000
},
{
"epoch": 3.84,
"learning_rate": 8.777777777777778e-05,
"loss": 2.2915,
"step": 52500
},
{
"epoch": 3.88,
"learning_rate": 8.755555555555556e-05,
"loss": 2.2859,
"step": 53000
},
{
"epoch": 3.91,
"learning_rate": 8.733333333333333e-05,
"loss": 2.283,
"step": 53500
},
{
"epoch": 3.95,
"learning_rate": 8.711111111111112e-05,
"loss": 2.2805,
"step": 54000
},
{
"epoch": 3.99,
"learning_rate": 8.68888888888889e-05,
"loss": 2.2778,
"step": 54500
},
{
"epoch": 4.02,
"learning_rate": 8.666666666666667e-05,
"loss": 2.2739,
"step": 55000
},
{
"epoch": 4.06,
"learning_rate": 8.644444444444445e-05,
"loss": 2.2688,
"step": 55500
},
{
"epoch": 4.1,
"learning_rate": 8.622222222222222e-05,
"loss": 2.2649,
"step": 56000
},
{
"epoch": 4.13,
"learning_rate": 8.6e-05,
"loss": 2.2664,
"step": 56500
},
{
"epoch": 4.17,
"learning_rate": 8.577777777777777e-05,
"loss": 2.2609,
"step": 57000
},
{
"epoch": 4.21,
"learning_rate": 8.555555555555556e-05,
"loss": 2.2622,
"step": 57500
},
{
"epoch": 4.24,
"learning_rate": 8.533333333333334e-05,
"loss": 2.2579,
"step": 58000
},
{
"epoch": 4.28,
"learning_rate": 8.511111111111112e-05,
"loss": 2.2563,
"step": 58500
},
{
"epoch": 4.32,
"learning_rate": 8.488888888888889e-05,
"loss": 2.2559,
"step": 59000
},
{
"epoch": 4.35,
"learning_rate": 8.466666666666667e-05,
"loss": 2.2518,
"step": 59500
},
{
"epoch": 4.39,
"learning_rate": 8.444444444444444e-05,
"loss": 2.2519,
"step": 60000
},
{
"epoch": 4.43,
"learning_rate": 8.422222222222223e-05,
"loss": 2.2515,
"step": 60500
},
{
"epoch": 4.46,
"learning_rate": 8.4e-05,
"loss": 2.2459,
"step": 61000
},
{
"epoch": 4.5,
"learning_rate": 8.377777777777778e-05,
"loss": 2.2462,
"step": 61500
},
{
"epoch": 4.54,
"learning_rate": 8.355555555555556e-05,
"loss": 2.2426,
"step": 62000
},
{
"epoch": 4.57,
"learning_rate": 8.333333333333334e-05,
"loss": 2.2426,
"step": 62500
},
{
"epoch": 4.61,
"learning_rate": 8.311111111111111e-05,
"loss": 2.2418,
"step": 63000
},
{
"epoch": 4.65,
"learning_rate": 8.28888888888889e-05,
"loss": 2.2364,
"step": 63500
},
{
"epoch": 4.68,
"learning_rate": 8.266666666666667e-05,
"loss": 2.2392,
"step": 64000
},
{
"epoch": 4.72,
"learning_rate": 8.244444444444445e-05,
"loss": 2.2345,
"step": 64500
},
{
"epoch": 4.76,
"learning_rate": 8.222222222222222e-05,
"loss": 2.2321,
"step": 65000
},
{
"epoch": 4.79,
"learning_rate": 8.2e-05,
"loss": 2.2322,
"step": 65500
},
{
"epoch": 4.83,
"learning_rate": 8.177777777777778e-05,
"loss": 2.2308,
"step": 66000
},
{
"epoch": 4.87,
"learning_rate": 8.155555555555557e-05,
"loss": 2.2277,
"step": 66500
},
{
"epoch": 4.9,
"learning_rate": 8.133333333333334e-05,
"loss": 2.2274,
"step": 67000
},
{
"epoch": 4.94,
"learning_rate": 8.111111111111112e-05,
"loss": 2.2252,
"step": 67500
},
{
"epoch": 4.98,
"learning_rate": 8.088888888888889e-05,
"loss": 2.2235,
"step": 68000
},
{
"epoch": 5.01,
"learning_rate": 8.066666666666667e-05,
"loss": 2.2193,
"step": 68500
},
{
"epoch": 5.05,
"learning_rate": 8.044444444444444e-05,
"loss": 2.2156,
"step": 69000
},
{
"epoch": 5.09,
"learning_rate": 8.022222222222222e-05,
"loss": 2.2144,
"step": 69500
},
{
"epoch": 5.12,
"learning_rate": 8e-05,
"loss": 2.2107,
"step": 70000
},
{
"epoch": 5.16,
"learning_rate": 7.977777777777779e-05,
"loss": 2.212,
"step": 70500
},
{
"epoch": 5.2,
"learning_rate": 7.955555555555556e-05,
"loss": 2.2068,
"step": 71000
},
{
"epoch": 5.23,
"learning_rate": 7.933333333333334e-05,
"loss": 2.2069,
"step": 71500
},
{
"epoch": 5.27,
"learning_rate": 7.911111111111111e-05,
"loss": 2.2063,
"step": 72000
},
{
"epoch": 5.31,
"learning_rate": 7.88888888888889e-05,
"loss": 2.2058,
"step": 72500
},
{
"epoch": 5.34,
"learning_rate": 7.866666666666666e-05,
"loss": 2.2026,
"step": 73000
},
{
"epoch": 5.38,
"learning_rate": 7.844444444444446e-05,
"loss": 2.2018,
"step": 73500
},
{
"epoch": 5.41,
"learning_rate": 7.822222222222223e-05,
"loss": 2.1993,
"step": 74000
},
{
"epoch": 5.45,
"learning_rate": 7.800000000000001e-05,
"loss": 2.1989,
"step": 74500
},
{
"epoch": 5.49,
"learning_rate": 7.777777777777778e-05,
"loss": 2.1972,
"step": 75000
},
{
"epoch": 5.52,
"learning_rate": 7.755555555555556e-05,
"loss": 2.1949,
"step": 75500
},
{
"epoch": 5.56,
"learning_rate": 7.733333333333333e-05,
"loss": 2.195,
"step": 76000
},
{
"epoch": 5.6,
"learning_rate": 7.711111111111112e-05,
"loss": 2.1895,
"step": 76500
},
{
"epoch": 5.63,
"learning_rate": 7.688888888888889e-05,
"loss": 2.194,
"step": 77000
},
{
"epoch": 5.67,
"learning_rate": 7.666666666666667e-05,
"loss": 2.1911,
"step": 77500
},
{
"epoch": 5.71,
"learning_rate": 7.644444444444445e-05,
"loss": 2.1908,
"step": 78000
},
{
"epoch": 5.74,
"learning_rate": 7.622222222222223e-05,
"loss": 2.1868,
"step": 78500
},
{
"epoch": 5.78,
"learning_rate": 7.6e-05,
"loss": 2.1877,
"step": 79000
},
{
"epoch": 5.82,
"learning_rate": 7.577777777777779e-05,
"loss": 2.1854,
"step": 79500
},
{
"epoch": 5.85,
"learning_rate": 7.555555555555556e-05,
"loss": 2.1851,
"step": 80000
},
{
"epoch": 5.89,
"learning_rate": 7.533333333333334e-05,
"loss": 2.1827,
"step": 80500
},
{
"epoch": 5.93,
"learning_rate": 7.511111111111111e-05,
"loss": 2.1821,
"step": 81000
},
{
"epoch": 5.96,
"learning_rate": 7.488888888888889e-05,
"loss": 2.1799,
"step": 81500
},
{
"epoch": 6.0,
"learning_rate": 7.466666666666667e-05,
"loss": 2.1791,
"step": 82000
},
{
"epoch": 6.04,
"learning_rate": 7.444444444444444e-05,
"loss": 2.1724,
"step": 82500
},
{
"epoch": 6.07,
"learning_rate": 7.422222222222223e-05,
"loss": 2.1735,
"step": 83000
},
{
"epoch": 6.11,
"learning_rate": 7.4e-05,
"loss": 2.1735,
"step": 83500
},
{
"epoch": 6.15,
"learning_rate": 7.377777777777778e-05,
"loss": 2.1702,
"step": 84000
},
{
"epoch": 6.18,
"learning_rate": 7.355555555555556e-05,
"loss": 2.171,
"step": 84500
},
{
"epoch": 6.22,
"learning_rate": 7.333333333333333e-05,
"loss": 2.1689,
"step": 85000
},
{
"epoch": 6.26,
"learning_rate": 7.311111111111111e-05,
"loss": 2.165,
"step": 85500
},
{
"epoch": 6.29,
"learning_rate": 7.28888888888889e-05,
"loss": 2.1686,
"step": 86000
},
{
"epoch": 6.33,
"learning_rate": 7.266666666666667e-05,
"loss": 2.164,
"step": 86500
},
{
"epoch": 6.37,
"learning_rate": 7.244444444444445e-05,
"loss": 2.1638,
"step": 87000
},
{
"epoch": 6.4,
"learning_rate": 7.222222222222222e-05,
"loss": 2.1661,
"step": 87500
},
{
"epoch": 6.44,
"learning_rate": 7.2e-05,
"loss": 2.1642,
"step": 88000
},
{
"epoch": 6.48,
"learning_rate": 7.177777777777777e-05,
"loss": 2.1597,
"step": 88500
},
{
"epoch": 6.51,
"learning_rate": 7.155555555555555e-05,
"loss": 2.1623,
"step": 89000
},
{
"epoch": 6.55,
"learning_rate": 7.133333333333334e-05,
"loss": 2.1554,
"step": 89500
},
{
"epoch": 6.59,
"learning_rate": 7.111111111111112e-05,
"loss": 2.1551,
"step": 90000
},
{
"epoch": 6.62,
"learning_rate": 7.088888888888889e-05,
"loss": 2.1567,
"step": 90500
},
{
"epoch": 6.66,
"learning_rate": 7.066666666666667e-05,
"loss": 2.1576,
"step": 91000
},
{
"epoch": 6.7,
"learning_rate": 7.044444444444444e-05,
"loss": 2.154,
"step": 91500
},
{
"epoch": 6.73,
"learning_rate": 7.022222222222222e-05,
"loss": 2.1533,
"step": 92000
},
{
"epoch": 6.77,
"learning_rate": 7e-05,
"loss": 2.1551,
"step": 92500
},
{
"epoch": 6.81,
"learning_rate": 6.977777777777779e-05,
"loss": 2.1522,
"step": 93000
},
{
"epoch": 6.84,
"learning_rate": 6.955555555555556e-05,
"loss": 2.1516,
"step": 93500
},
{
"epoch": 6.88,
"learning_rate": 6.933333333333334e-05,
"loss": 2.1506,
"step": 94000
},
{
"epoch": 6.91,
"learning_rate": 6.911111111111111e-05,
"loss": 2.1466,
"step": 94500
},
{
"epoch": 6.95,
"learning_rate": 6.88888888888889e-05,
"loss": 2.1487,
"step": 95000
},
{
"epoch": 6.99,
"learning_rate": 6.866666666666666e-05,
"loss": 2.1456,
"step": 95500
},
{
"epoch": 7.02,
"learning_rate": 6.844444444444445e-05,
"loss": 2.1422,
"step": 96000
},
{
"epoch": 7.06,
"learning_rate": 6.822222222222222e-05,
"loss": 2.1407,
"step": 96500
},
{
"epoch": 7.1,
"learning_rate": 6.800000000000001e-05,
"loss": 2.1404,
"step": 97000
},
{
"epoch": 7.13,
"learning_rate": 6.777777777777778e-05,
"loss": 2.14,
"step": 97500
},
{
"epoch": 7.17,
"learning_rate": 6.755555555555557e-05,
"loss": 2.1376,
"step": 98000
},
{
"epoch": 7.21,
"learning_rate": 6.733333333333333e-05,
"loss": 2.135,
"step": 98500
},
{
"epoch": 7.24,
"learning_rate": 6.711111111111112e-05,
"loss": 2.138,
"step": 99000
},
{
"epoch": 7.28,
"learning_rate": 6.688888888888889e-05,
"loss": 2.1374,
"step": 99500
},
{
"epoch": 7.32,
"learning_rate": 6.666666666666667e-05,
"loss": 2.1367,
"step": 100000
},
{
"epoch": 7.35,
"learning_rate": 6.644444444444444e-05,
"loss": 2.1343,
"step": 100500
},
{
"epoch": 7.39,
"learning_rate": 6.622222222222224e-05,
"loss": 2.1339,
"step": 101000
},
{
"epoch": 7.43,
"learning_rate": 6.6e-05,
"loss": 2.1298,
"step": 101500
},
{
"epoch": 7.46,
"learning_rate": 6.577777777777779e-05,
"loss": 2.1321,
"step": 102000
},
{
"epoch": 7.5,
"learning_rate": 6.555555555555556e-05,
"loss": 2.1324,
"step": 102500
},
{
"epoch": 7.54,
"learning_rate": 6.533333333333334e-05,
"loss": 2.1303,
"step": 103000
},
{
"epoch": 7.57,
"learning_rate": 6.511111111111111e-05,
"loss": 2.1304,
"step": 103500
},
{
"epoch": 7.61,
"learning_rate": 6.488888888888889e-05,
"loss": 2.1289,
"step": 104000
},
{
"epoch": 7.65,
"learning_rate": 6.466666666666666e-05,
"loss": 2.1266,
"step": 104500
},
{
"epoch": 7.68,
"learning_rate": 6.444444444444446e-05,
"loss": 2.128,
"step": 105000
},
{
"epoch": 7.72,
"learning_rate": 6.422222222222223e-05,
"loss": 2.1277,
"step": 105500
},
{
"epoch": 7.76,
"learning_rate": 6.400000000000001e-05,
"loss": 2.1228,
"step": 106000
},
{
"epoch": 7.79,
"learning_rate": 6.377777777777778e-05,
"loss": 2.1255,
"step": 106500
},
{
"epoch": 7.83,
"learning_rate": 6.355555555555556e-05,
"loss": 2.1246,
"step": 107000
},
{
"epoch": 7.87,
"learning_rate": 6.333333333333333e-05,
"loss": 2.1265,
"step": 107500
},
{
"epoch": 7.9,
"learning_rate": 6.311111111111112e-05,
"loss": 2.1242,
"step": 108000
},
{
"epoch": 7.94,
"learning_rate": 6.28888888888889e-05,
"loss": 2.1216,
"step": 108500
},
{
"epoch": 7.98,
"learning_rate": 6.266666666666667e-05,
"loss": 2.1206,
"step": 109000
},
{
"epoch": 8.01,
"learning_rate": 6.244444444444445e-05,
"loss": 2.1199,
"step": 109500
},
{
"epoch": 8.05,
"learning_rate": 6.222222222222222e-05,
"loss": 2.114,
"step": 110000
},
{
"epoch": 8.09,
"learning_rate": 6.2e-05,
"loss": 2.1153,
"step": 110500
},
{
"epoch": 8.12,
"learning_rate": 6.177777777777779e-05,
"loss": 2.1138,
"step": 111000
},
{
"epoch": 8.16,
"learning_rate": 6.155555555555555e-05,
"loss": 2.1115,
"step": 111500
},
{
"epoch": 8.2,
"learning_rate": 6.133333333333334e-05,
"loss": 2.1114,
"step": 112000
},
{
"epoch": 8.23,
"learning_rate": 6.111111111111112e-05,
"loss": 2.111,
"step": 112500
},
{
"epoch": 8.27,
"learning_rate": 6.08888888888889e-05,
"loss": 2.1072,
"step": 113000
},
{
"epoch": 8.31,
"learning_rate": 6.066666666666667e-05,
"loss": 2.1112,
"step": 113500
},
{
"epoch": 8.34,
"learning_rate": 6.044444444444445e-05,
"loss": 2.108,
"step": 114000
},
{
"epoch": 8.38,
"learning_rate": 6.0222222222222225e-05,
"loss": 2.1073,
"step": 114500
},
{
"epoch": 8.42,
"learning_rate": 6e-05,
"loss": 2.1066,
"step": 115000
},
{
"epoch": 8.45,
"learning_rate": 5.977777777777778e-05,
"loss": 2.1055,
"step": 115500
},
{
"epoch": 8.49,
"learning_rate": 5.9555555555555554e-05,
"loss": 2.1093,
"step": 116000
},
{
"epoch": 8.52,
"learning_rate": 5.9333333333333343e-05,
"loss": 2.1047,
"step": 116500
},
{
"epoch": 8.56,
"learning_rate": 5.911111111111112e-05,
"loss": 2.1035,
"step": 117000
},
{
"epoch": 8.6,
"learning_rate": 5.8888888888888896e-05,
"loss": 2.1025,
"step": 117500
},
{
"epoch": 8.63,
"learning_rate": 5.866666666666667e-05,
"loss": 2.1056,
"step": 118000
},
{
"epoch": 8.67,
"learning_rate": 5.844444444444445e-05,
"loss": 2.1025,
"step": 118500
},
{
"epoch": 8.71,
"learning_rate": 5.8222222222222224e-05,
"loss": 2.1044,
"step": 119000
},
{
"epoch": 8.74,
"learning_rate": 5.8e-05,
"loss": 2.1005,
"step": 119500
},
{
"epoch": 8.78,
"learning_rate": 5.7777777777777776e-05,
"loss": 2.0996,
"step": 120000
},
{
"epoch": 8.82,
"learning_rate": 5.755555555555556e-05,
"loss": 2.1009,
"step": 120500
},
{
"epoch": 8.85,
"learning_rate": 5.7333333333333336e-05,
"loss": 2.0993,
"step": 121000
},
{
"epoch": 8.89,
"learning_rate": 5.711111111111112e-05,
"loss": 2.0981,
"step": 121500
},
{
"epoch": 8.93,
"learning_rate": 5.6888888888888895e-05,
"loss": 2.0992,
"step": 122000
},
{
"epoch": 8.96,
"learning_rate": 5.666666666666667e-05,
"loss": 2.0964,
"step": 122500
},
{
"epoch": 9.0,
"learning_rate": 5.644444444444445e-05,
"loss": 2.1007,
"step": 123000
},
{
"epoch": 9.04,
"learning_rate": 5.622222222222222e-05,
"loss": 2.0935,
"step": 123500
},
{
"epoch": 9.07,
"learning_rate": 5.6000000000000006e-05,
"loss": 2.0924,
"step": 124000
},
{
"epoch": 9.11,
"learning_rate": 5.577777777777778e-05,
"loss": 2.0905,
"step": 124500
},
{
"epoch": 9.15,
"learning_rate": 5.555555555555556e-05,
"loss": 2.0914,
"step": 125000
},
{
"epoch": 9.18,
"learning_rate": 5.5333333333333334e-05,
"loss": 2.09,
"step": 125500
},
{
"epoch": 9.22,
"learning_rate": 5.511111111111111e-05,
"loss": 2.0902,
"step": 126000
},
{
"epoch": 9.26,
"learning_rate": 5.488888888888889e-05,
"loss": 2.0914,
"step": 126500
},
{
"epoch": 9.29,
"learning_rate": 5.466666666666666e-05,
"loss": 2.088,
"step": 127000
},
{
"epoch": 9.33,
"learning_rate": 5.4444444444444446e-05,
"loss": 2.0877,
"step": 127500
},
{
"epoch": 9.37,
"learning_rate": 5.422222222222223e-05,
"loss": 2.0871,
"step": 128000
},
{
"epoch": 9.4,
"learning_rate": 5.4000000000000005e-05,
"loss": 2.086,
"step": 128500
},
{
"epoch": 9.44,
"learning_rate": 5.377777777777778e-05,
"loss": 2.085,
"step": 129000
},
{
"epoch": 9.48,
"learning_rate": 5.355555555555556e-05,
"loss": 2.0868,
"step": 129500
},
{
"epoch": 9.51,
"learning_rate": 5.333333333333333e-05,
"loss": 2.0851,
"step": 130000
},
{
"epoch": 9.55,
"learning_rate": 5.311111111111111e-05,
"loss": 2.083,
"step": 130500
},
{
"epoch": 9.59,
"learning_rate": 5.2888888888888885e-05,
"loss": 2.0824,
"step": 131000
},
{
"epoch": 9.62,
"learning_rate": 5.266666666666666e-05,
"loss": 2.0834,
"step": 131500
},
{
"epoch": 9.66,
"learning_rate": 5.244444444444445e-05,
"loss": 2.0808,
"step": 132000
},
{
"epoch": 9.7,
"learning_rate": 5.222222222222223e-05,
"loss": 2.0793,
"step": 132500
},
{
"epoch": 9.73,
"learning_rate": 5.2000000000000004e-05,
"loss": 2.0823,
"step": 133000
},
{
"epoch": 9.77,
"learning_rate": 5.177777777777778e-05,
"loss": 2.081,
"step": 133500
},
{
"epoch": 9.81,
"learning_rate": 5.1555555555555556e-05,
"loss": 2.0801,
"step": 134000
},
{
"epoch": 9.84,
"learning_rate": 5.133333333333333e-05,
"loss": 2.0796,
"step": 134500
},
{
"epoch": 9.88,
"learning_rate": 5.111111111111111e-05,
"loss": 2.0782,
"step": 135000
},
{
"epoch": 9.92,
"learning_rate": 5.0888888888888884e-05,
"loss": 2.0749,
"step": 135500
},
{
"epoch": 9.95,
"learning_rate": 5.0666666666666674e-05,
"loss": 2.0785,
"step": 136000
},
{
"epoch": 9.99,
"learning_rate": 5.044444444444445e-05,
"loss": 2.0768,
"step": 136500
},
{
"epoch": 10.02,
"learning_rate": 5.0222222222222226e-05,
"loss": 2.0746,
"step": 137000
},
{
"epoch": 10.06,
"learning_rate": 5e-05,
"loss": 2.0742,
"step": 137500
},
{
"epoch": 10.1,
"learning_rate": 4.977777777777778e-05,
"loss": 2.0716,
"step": 138000
},
{
"epoch": 10.13,
"learning_rate": 4.955555555555556e-05,
"loss": 2.0716,
"step": 138500
},
{
"epoch": 10.17,
"learning_rate": 4.933333333333334e-05,
"loss": 2.0697,
"step": 139000
},
{
"epoch": 10.21,
"learning_rate": 4.9111111111111114e-05,
"loss": 2.0685,
"step": 139500
},
{
"epoch": 10.24,
"learning_rate": 4.888888888888889e-05,
"loss": 2.0704,
"step": 140000
},
{
"epoch": 10.28,
"learning_rate": 4.866666666666667e-05,
"loss": 2.0688,
"step": 140500
},
{
"epoch": 10.32,
"learning_rate": 4.844444444444445e-05,
"loss": 2.0708,
"step": 141000
},
{
"epoch": 10.35,
"learning_rate": 4.8222222222222225e-05,
"loss": 2.0658,
"step": 141500
},
{
"epoch": 10.39,
"learning_rate": 4.8e-05,
"loss": 2.0673,
"step": 142000
},
{
"epoch": 10.43,
"learning_rate": 4.7777777777777784e-05,
"loss": 2.0694,
"step": 142500
},
{
"epoch": 10.46,
"learning_rate": 4.755555555555556e-05,
"loss": 2.0673,
"step": 143000
},
{
"epoch": 10.5,
"learning_rate": 4.7333333333333336e-05,
"loss": 2.0663,
"step": 143500
},
{
"epoch": 10.54,
"learning_rate": 4.711111111111111e-05,
"loss": 2.0659,
"step": 144000
},
{
"epoch": 10.57,
"learning_rate": 4.6888888888888895e-05,
"loss": 2.0657,
"step": 144500
},
{
"epoch": 10.61,
"learning_rate": 4.666666666666667e-05,
"loss": 2.0658,
"step": 145000
},
{
"epoch": 10.65,
"learning_rate": 4.644444444444445e-05,
"loss": 2.0669,
"step": 145500
},
{
"epoch": 10.68,
"learning_rate": 4.6222222222222224e-05,
"loss": 2.0661,
"step": 146000
},
{
"epoch": 10.72,
"learning_rate": 4.600000000000001e-05,
"loss": 2.061,
"step": 146500
},
{
"epoch": 10.76,
"learning_rate": 4.577777777777778e-05,
"loss": 2.0598,
"step": 147000
},
{
"epoch": 10.79,
"learning_rate": 4.555555555555556e-05,
"loss": 2.0606,
"step": 147500
},
{
"epoch": 10.83,
"learning_rate": 4.5333333333333335e-05,
"loss": 2.0592,
"step": 148000
},
{
"epoch": 10.87,
"learning_rate": 4.511111111111112e-05,
"loss": 2.0636,
"step": 148500
},
{
"epoch": 10.9,
"learning_rate": 4.4888888888888894e-05,
"loss": 2.059,
"step": 149000
},
{
"epoch": 10.94,
"learning_rate": 4.466666666666667e-05,
"loss": 2.0592,
"step": 149500
},
{
"epoch": 10.98,
"learning_rate": 4.4444444444444447e-05,
"loss": 2.0595,
"step": 150000
},
{
"epoch": 11.01,
"learning_rate": 4.422222222222222e-05,
"loss": 2.0573,
"step": 150500
},
{
"epoch": 11.05,
"learning_rate": 4.4000000000000006e-05,
"loss": 2.0556,
"step": 151000
},
{
"epoch": 11.09,
"learning_rate": 4.377777777777778e-05,
"loss": 2.0532,
"step": 151500
},
{
"epoch": 11.12,
"learning_rate": 4.355555555555556e-05,
"loss": 2.053,
"step": 152000
},
{
"epoch": 11.16,
"learning_rate": 4.3333333333333334e-05,
"loss": 2.0526,
"step": 152500
},
{
"epoch": 11.2,
"learning_rate": 4.311111111111111e-05,
"loss": 2.0531,
"step": 153000
},
{
"epoch": 11.23,
"learning_rate": 4.2888888888888886e-05,
"loss": 2.0524,
"step": 153500
},
{
"epoch": 11.27,
"learning_rate": 4.266666666666667e-05,
"loss": 2.0519,
"step": 154000
},
{
"epoch": 11.31,
"learning_rate": 4.2444444444444445e-05,
"loss": 2.0536,
"step": 154500
},
{
"epoch": 11.34,
"learning_rate": 4.222222222222222e-05,
"loss": 2.0482,
"step": 155000
},
{
"epoch": 11.38,
"learning_rate": 4.2e-05,
"loss": 2.0505,
"step": 155500
},
{
"epoch": 11.42,
"learning_rate": 4.177777777777778e-05,
"loss": 2.0492,
"step": 156000
},
{
"epoch": 11.45,
"learning_rate": 4.155555555555556e-05,
"loss": 2.0498,
"step": 156500
},
{
"epoch": 11.49,
"learning_rate": 4.133333333333333e-05,
"loss": 2.0478,
"step": 157000
},
{
"epoch": 11.52,
"learning_rate": 4.111111111111111e-05,
"loss": 2.0491,
"step": 157500
},
{
"epoch": 11.56,
"learning_rate": 4.088888888888889e-05,
"loss": 2.0485,
"step": 158000
},
{
"epoch": 11.6,
"learning_rate": 4.066666666666667e-05,
"loss": 2.0501,
"step": 158500
},
{
"epoch": 11.63,
"learning_rate": 4.0444444444444444e-05,
"loss": 2.0477,
"step": 159000
},
{
"epoch": 11.67,
"learning_rate": 4.022222222222222e-05,
"loss": 2.0459,
"step": 159500
},
{
"epoch": 11.71,
"learning_rate": 4e-05,
"loss": 2.0486,
"step": 160000
},
{
"epoch": 11.74,
"learning_rate": 3.977777777777778e-05,
"loss": 2.0478,
"step": 160500
},
{
"epoch": 11.78,
"learning_rate": 3.9555555555555556e-05,
"loss": 2.0433,
"step": 161000
},
{
"epoch": 11.82,
"learning_rate": 3.933333333333333e-05,
"loss": 2.0432,
"step": 161500
},
{
"epoch": 11.85,
"learning_rate": 3.9111111111111115e-05,
"loss": 2.0432,
"step": 162000
},
{
"epoch": 11.89,
"learning_rate": 3.888888888888889e-05,
"loss": 2.0424,
"step": 162500
},
{
"epoch": 11.93,
"learning_rate": 3.866666666666667e-05,
"loss": 2.0424,
"step": 163000
},
{
"epoch": 11.96,
"learning_rate": 3.844444444444444e-05,
"loss": 2.0434,
"step": 163500
},
{
"epoch": 12.0,
"learning_rate": 3.8222222222222226e-05,
"loss": 2.0422,
"step": 164000
},
{
"epoch": 12.04,
"learning_rate": 3.8e-05,
"loss": 2.0393,
"step": 164500
},
{
"epoch": 12.07,
"learning_rate": 3.777777777777778e-05,
"loss": 2.0395,
"step": 165000
},
{
"epoch": 12.11,
"learning_rate": 3.7555555555555554e-05,
"loss": 2.0383,
"step": 165500
},
{
"epoch": 12.15,
"learning_rate": 3.733333333333334e-05,
"loss": 2.0389,
"step": 166000
},
{
"epoch": 12.18,
"learning_rate": 3.7111111111111113e-05,
"loss": 2.0357,
"step": 166500
},
{
"epoch": 12.22,
"learning_rate": 3.688888888888889e-05,
"loss": 2.0368,
"step": 167000
},
{
"epoch": 12.26,
"learning_rate": 3.6666666666666666e-05,
"loss": 2.0353,
"step": 167500
},
{
"epoch": 12.29,
"learning_rate": 3.644444444444445e-05,
"loss": 2.0375,
"step": 168000
},
{
"epoch": 12.33,
"learning_rate": 3.6222222222222225e-05,
"loss": 2.0367,
"step": 168500
},
{
"epoch": 12.37,
"learning_rate": 3.6e-05,
"loss": 2.0327,
"step": 169000
},
{
"epoch": 12.4,
"learning_rate": 3.577777777777778e-05,
"loss": 2.0318,
"step": 169500
},
{
"epoch": 12.44,
"learning_rate": 3.555555555555556e-05,
"loss": 2.0345,
"step": 170000
},
{
"epoch": 12.48,
"learning_rate": 3.5333333333333336e-05,
"loss": 2.0358,
"step": 170500
},
{
"epoch": 12.51,
"learning_rate": 3.511111111111111e-05,
"loss": 2.0338,
"step": 171000
},
{
"epoch": 12.55,
"learning_rate": 3.4888888888888895e-05,
"loss": 2.0355,
"step": 171500
},
{
"epoch": 12.59,
"learning_rate": 3.466666666666667e-05,
"loss": 2.0313,
"step": 172000
},
{
"epoch": 12.62,
"learning_rate": 3.444444444444445e-05,
"loss": 2.0335,
"step": 172500
},
{
"epoch": 12.66,
"learning_rate": 3.4222222222222224e-05,
"loss": 2.031,
"step": 173000
},
{
"epoch": 12.7,
"learning_rate": 3.4000000000000007e-05,
"loss": 2.0319,
"step": 173500
},
{
"epoch": 12.73,
"learning_rate": 3.377777777777778e-05,
"loss": 2.0321,
"step": 174000
},
{
"epoch": 12.77,
"learning_rate": 3.355555555555556e-05,
"loss": 2.0299,
"step": 174500
},
{
"epoch": 12.81,
"learning_rate": 3.3333333333333335e-05,
"loss": 2.0314,
"step": 175000
},
{
"epoch": 12.84,
"learning_rate": 3.311111111111112e-05,
"loss": 2.0313,
"step": 175500
},
{
"epoch": 12.88,
"learning_rate": 3.2888888888888894e-05,
"loss": 2.0291,
"step": 176000
},
{
"epoch": 12.92,
"learning_rate": 3.266666666666667e-05,
"loss": 2.0289,
"step": 176500
},
{
"epoch": 12.95,
"learning_rate": 3.2444444444444446e-05,
"loss": 2.0303,
"step": 177000
},
{
"epoch": 12.99,
"learning_rate": 3.222222222222223e-05,
"loss": 2.0286,
"step": 177500
},
{
"epoch": 13.03,
"learning_rate": 3.2000000000000005e-05,
"loss": 2.0297,
"step": 178000
},
{
"epoch": 13.06,
"learning_rate": 3.177777777777778e-05,
"loss": 2.0255,
"step": 178500
},
{
"epoch": 13.1,
"learning_rate": 3.155555555555556e-05,
"loss": 2.0259,
"step": 179000
},
{
"epoch": 13.13,
"learning_rate": 3.1333333333333334e-05,
"loss": 2.0243,
"step": 179500
},
{
"epoch": 13.17,
"learning_rate": 3.111111111111111e-05,
"loss": 2.0223,
"step": 180000
},
{
"epoch": 13.21,
"learning_rate": 3.088888888888889e-05,
"loss": 2.0214,
"step": 180500
},
{
"epoch": 13.24,
"learning_rate": 3.066666666666667e-05,
"loss": 2.022,
"step": 181000
},
{
"epoch": 13.28,
"learning_rate": 3.044444444444445e-05,
"loss": 2.022,
"step": 181500
},
{
"epoch": 13.32,
"learning_rate": 3.0222222222222225e-05,
"loss": 2.0219,
"step": 182000
},
{
"epoch": 13.35,
"learning_rate": 3e-05,
"loss": 2.0186,
"step": 182500
},
{
"epoch": 13.39,
"learning_rate": 2.9777777777777777e-05,
"loss": 2.0211,
"step": 183000
},
{
"epoch": 13.43,
"learning_rate": 2.955555555555556e-05,
"loss": 2.0194,
"step": 183500
},
{
"epoch": 13.46,
"learning_rate": 2.9333333333333336e-05,
"loss": 2.0231,
"step": 184000
},
{
"epoch": 13.5,
"learning_rate": 2.9111111111111112e-05,
"loss": 2.0216,
"step": 184500
},
{
"epoch": 13.54,
"learning_rate": 2.8888888888888888e-05,
"loss": 2.0195,
"step": 185000
},
{
"epoch": 13.57,
"learning_rate": 2.8666666666666668e-05,
"loss": 2.0188,
"step": 185500
},
{
"epoch": 13.61,
"learning_rate": 2.8444444444444447e-05,
"loss": 2.0181,
"step": 186000
},
{
"epoch": 13.65,
"learning_rate": 2.8222222222222223e-05,
"loss": 2.0171,
"step": 186500
},
{
"epoch": 13.68,
"learning_rate": 2.8000000000000003e-05,
"loss": 2.0182,
"step": 187000
},
{
"epoch": 13.72,
"learning_rate": 2.777777777777778e-05,
"loss": 2.0185,
"step": 187500
},
{
"epoch": 13.76,
"learning_rate": 2.7555555555555555e-05,
"loss": 2.0168,
"step": 188000
},
{
"epoch": 13.79,
"learning_rate": 2.733333333333333e-05,
"loss": 2.0179,
"step": 188500
},
{
"epoch": 13.83,
"learning_rate": 2.7111111111111114e-05,
"loss": 2.0152,
"step": 189000
},
{
"epoch": 13.87,
"learning_rate": 2.688888888888889e-05,
"loss": 2.0151,
"step": 189500
},
{
"epoch": 13.9,
"learning_rate": 2.6666666666666667e-05,
"loss": 2.013,
"step": 190000
},
{
"epoch": 13.94,
"learning_rate": 2.6444444444444443e-05,
"loss": 2.015,
"step": 190500
},
{
"epoch": 13.98,
"learning_rate": 2.6222222222222226e-05,
"loss": 2.0138,
"step": 191000
},
{
"epoch": 14.01,
"learning_rate": 2.6000000000000002e-05,
"loss": 2.0153,
"step": 191500
},
{
"epoch": 14.05,
"learning_rate": 2.5777777777777778e-05,
"loss": 2.0124,
"step": 192000
},
{
"epoch": 14.09,
"learning_rate": 2.5555555555555554e-05,
"loss": 2.0076,
"step": 192500
},
{
"epoch": 14.12,
"learning_rate": 2.5333333333333337e-05,
"loss": 2.0098,
"step": 193000
},
{
"epoch": 14.16,
"learning_rate": 2.5111111111111113e-05,
"loss": 2.0109,
"step": 193500
},
{
"epoch": 14.2,
"learning_rate": 2.488888888888889e-05,
"loss": 2.0088,
"step": 194000
},
{
"epoch": 14.23,
"learning_rate": 2.466666666666667e-05,
"loss": 2.0099,
"step": 194500
},
{
"epoch": 14.27,
"learning_rate": 2.4444444444444445e-05,
"loss": 2.0083,
"step": 195000
},
{
"epoch": 14.31,
"learning_rate": 2.4222222222222224e-05,
"loss": 2.0097,
"step": 195500
},
{
"epoch": 14.34,
"learning_rate": 2.4e-05,
"loss": 2.0082,
"step": 196000
},
{
"epoch": 14.38,
"learning_rate": 2.377777777777778e-05,
"loss": 2.0059,
"step": 196500
},
{
"epoch": 14.42,
"learning_rate": 2.3555555555555556e-05,
"loss": 2.0065,
"step": 197000
},
{
"epoch": 14.45,
"learning_rate": 2.3333333333333336e-05,
"loss": 2.009,
"step": 197500
},
{
"epoch": 14.49,
"learning_rate": 2.3111111111111112e-05,
"loss": 2.0087,
"step": 198000
},
{
"epoch": 14.53,
"learning_rate": 2.288888888888889e-05,
"loss": 2.0065,
"step": 198500
},
{
"epoch": 14.56,
"learning_rate": 2.2666666666666668e-05,
"loss": 2.0052,
"step": 199000
},
{
"epoch": 14.6,
"learning_rate": 2.2444444444444447e-05,
"loss": 2.0091,
"step": 199500
},
{
"epoch": 14.63,
"learning_rate": 2.2222222222222223e-05,
"loss": 2.0043,
"step": 200000
},
{
"epoch": 14.67,
"learning_rate": 2.2000000000000003e-05,
"loss": 2.0043,
"step": 200500
},
{
"epoch": 14.71,
"learning_rate": 2.177777777777778e-05,
"loss": 2.0051,
"step": 201000
},
{
"epoch": 14.74,
"learning_rate": 2.1555555555555555e-05,
"loss": 2.005,
"step": 201500
},
{
"epoch": 14.78,
"learning_rate": 2.1333333333333335e-05,
"loss": 2.0017,
"step": 202000
},
{
"epoch": 14.82,
"learning_rate": 2.111111111111111e-05,
"loss": 2.006,
"step": 202500
},
{
"epoch": 14.85,
"learning_rate": 2.088888888888889e-05,
"loss": 2.004,
"step": 203000
},
{
"epoch": 14.89,
"learning_rate": 2.0666666666666666e-05,
"loss": 2.0018,
"step": 203500
},
{
"epoch": 14.93,
"learning_rate": 2.0444444444444446e-05,
"loss": 2.0047,
"step": 204000
},
{
"epoch": 14.96,
"learning_rate": 2.0222222222222222e-05,
"loss": 2.0015,
"step": 204500
},
{
"epoch": 15.0,
"learning_rate": 2e-05,
"loss": 2.0044,
"step": 205000
},
{
"epoch": 15.04,
"learning_rate": 1.9777777777777778e-05,
"loss": 2.0001,
"step": 205500
},
{
"epoch": 15.07,
"learning_rate": 1.9555555555555557e-05,
"loss": 1.9978,
"step": 206000
},
{
"epoch": 15.11,
"learning_rate": 1.9333333333333333e-05,
"loss": 1.9998,
"step": 206500
},
{
"epoch": 15.15,
"learning_rate": 1.9111111111111113e-05,
"loss": 2.0002,
"step": 207000
},
{
"epoch": 15.18,
"learning_rate": 1.888888888888889e-05,
"loss": 1.9999,
"step": 207500
},
{
"epoch": 15.22,
"learning_rate": 1.866666666666667e-05,
"loss": 1.9971,
"step": 208000
},
{
"epoch": 15.26,
"learning_rate": 1.8444444444444445e-05,
"loss": 1.9978,
"step": 208500
},
{
"epoch": 15.29,
"learning_rate": 1.8222222222222224e-05,
"loss": 1.9973,
"step": 209000
},
{
"epoch": 15.33,
"learning_rate": 1.8e-05,
"loss": 1.9989,
"step": 209500
},
{
"epoch": 15.37,
"learning_rate": 1.777777777777778e-05,
"loss": 1.9968,
"step": 210000
},
{
"epoch": 15.4,
"learning_rate": 1.7555555555555556e-05,
"loss": 1.993,
"step": 210500
},
{
"epoch": 15.44,
"learning_rate": 1.7333333333333336e-05,
"loss": 1.9943,
"step": 211000
},
{
"epoch": 15.48,
"learning_rate": 1.7111111111111112e-05,
"loss": 1.9952,
"step": 211500
},
{
"epoch": 15.51,
"learning_rate": 1.688888888888889e-05,
"loss": 1.9936,
"step": 212000
},
{
"epoch": 15.55,
"learning_rate": 1.6666666666666667e-05,
"loss": 1.9948,
"step": 212500
},
{
"epoch": 15.59,
"learning_rate": 1.6444444444444447e-05,
"loss": 1.9916,
"step": 213000
},
{
"epoch": 15.62,
"learning_rate": 1.6222222222222223e-05,
"loss": 1.994,
"step": 213500
},
{
"epoch": 15.66,
"learning_rate": 1.6000000000000003e-05,
"loss": 1.9945,
"step": 214000
},
{
"epoch": 15.7,
"learning_rate": 1.577777777777778e-05,
"loss": 1.9903,
"step": 214500
},
{
"epoch": 15.73,
"learning_rate": 1.5555555555555555e-05,
"loss": 1.9905,
"step": 215000
},
{
"epoch": 15.77,
"learning_rate": 1.5333333333333334e-05,
"loss": 1.994,
"step": 215500
},
{
"epoch": 15.81,
"learning_rate": 1.5111111111111112e-05,
"loss": 1.992,
"step": 216000
},
{
"epoch": 15.84,
"learning_rate": 1.4888888888888888e-05,
"loss": 1.9918,
"step": 216500
},
{
"epoch": 15.88,
"learning_rate": 1.4666666666666668e-05,
"loss": 1.9905,
"step": 217000
},
{
"epoch": 15.92,
"learning_rate": 1.4444444444444444e-05,
"loss": 1.993,
"step": 217500
},
{
"epoch": 15.95,
"learning_rate": 1.4222222222222224e-05,
"loss": 1.9917,
"step": 218000
},
{
"epoch": 15.99,
"learning_rate": 1.4000000000000001e-05,
"loss": 1.9883,
"step": 218500
},
{
"epoch": 16.03,
"learning_rate": 1.3777777777777778e-05,
"loss": 1.9877,
"step": 219000
},
{
"epoch": 16.06,
"learning_rate": 1.3555555555555557e-05,
"loss": 1.9876,
"step": 219500
},
{
"epoch": 16.1,
"learning_rate": 1.3333333333333333e-05,
"loss": 1.989,
"step": 220000
},
{
"epoch": 16.13,
"learning_rate": 1.3111111111111113e-05,
"loss": 1.9876,
"step": 220500
},
{
"epoch": 16.17,
"learning_rate": 1.2888888888888889e-05,
"loss": 1.9879,
"step": 221000
},
{
"epoch": 16.21,
"learning_rate": 1.2666666666666668e-05,
"loss": 1.9878,
"step": 221500
},
{
"epoch": 16.24,
"learning_rate": 1.2444444444444445e-05,
"loss": 1.9863,
"step": 222000
},
{
"epoch": 16.28,
"learning_rate": 1.2222222222222222e-05,
"loss": 1.9858,
"step": 222500
},
{
"epoch": 16.32,
"learning_rate": 1.2e-05,
"loss": 1.9833,
"step": 223000
},
{
"epoch": 16.35,
"learning_rate": 1.1777777777777778e-05,
"loss": 1.9855,
"step": 223500
},
{
"epoch": 16.39,
"learning_rate": 1.1555555555555556e-05,
"loss": 1.9852,
"step": 224000
},
{
"epoch": 16.43,
"learning_rate": 1.1333333333333334e-05,
"loss": 1.9841,
"step": 224500
},
{
"epoch": 16.46,
"learning_rate": 1.1111111111111112e-05,
"loss": 1.986,
"step": 225000
},
{
"epoch": 16.5,
"learning_rate": 1.088888888888889e-05,
"loss": 1.9844,
"step": 225500
},
{
"epoch": 16.54,
"learning_rate": 1.0666666666666667e-05,
"loss": 1.9806,
"step": 226000
},
{
"epoch": 16.57,
"learning_rate": 1.0444444444444445e-05,
"loss": 1.9826,
"step": 226500
},
{
"epoch": 16.61,
"learning_rate": 1.0222222222222223e-05,
"loss": 1.9831,
"step": 227000
},
{
"epoch": 16.65,
"learning_rate": 1e-05,
"loss": 1.9835,
"step": 227500
},
{
"epoch": 16.68,
"learning_rate": 9.777777777777779e-06,
"loss": 1.9796,
"step": 228000
},
{
"epoch": 16.72,
"learning_rate": 9.555555555555556e-06,
"loss": 1.9865,
"step": 228500
},
{
"epoch": 16.76,
"learning_rate": 9.333333333333334e-06,
"loss": 1.9827,
"step": 229000
},
{
"epoch": 16.79,
"learning_rate": 9.111111111111112e-06,
"loss": 1.981,
"step": 229500
},
{
"epoch": 16.83,
"learning_rate": 8.88888888888889e-06,
"loss": 1.9805,
"step": 230000
},
{
"epoch": 16.87,
"learning_rate": 8.666666666666668e-06,
"loss": 1.9812,
"step": 230500
},
{
"epoch": 16.9,
"learning_rate": 8.444444444444446e-06,
"loss": 1.9815,
"step": 231000
},
{
"epoch": 16.94,
"learning_rate": 8.222222222222223e-06,
"loss": 1.9821,
"step": 231500
},
{
"epoch": 16.98,
"learning_rate": 8.000000000000001e-06,
"loss": 1.9802,
"step": 232000
},
{
"epoch": 17.01,
"learning_rate": 7.777777777777777e-06,
"loss": 1.9789,
"step": 232500
},
{
"epoch": 17.05,
"learning_rate": 7.555555555555556e-06,
"loss": 1.9764,
"step": 233000
},
{
"epoch": 17.09,
"learning_rate": 7.333333333333334e-06,
"loss": 1.9791,
"step": 233500
},
{
"epoch": 17.12,
"learning_rate": 7.111111111111112e-06,
"loss": 1.9787,
"step": 234000
},
{
"epoch": 17.16,
"learning_rate": 6.888888888888889e-06,
"loss": 1.9799,
"step": 234500
},
{
"epoch": 17.2,
"learning_rate": 6.666666666666667e-06,
"loss": 1.9762,
"step": 235000
},
{
"epoch": 17.23,
"learning_rate": 6.4444444444444445e-06,
"loss": 1.9805,
"step": 235500
},
{
"epoch": 17.27,
"learning_rate": 6.222222222222222e-06,
"loss": 1.9773,
"step": 236000
},
{
"epoch": 17.31,
"learning_rate": 6e-06,
"loss": 1.9766,
"step": 236500
},
{
"epoch": 17.34,
"learning_rate": 5.777777777777778e-06,
"loss": 1.9757,
"step": 237000
},
{
"epoch": 17.38,
"learning_rate": 5.555555555555556e-06,
"loss": 1.977,
"step": 237500
},
{
"epoch": 17.42,
"learning_rate": 5.333333333333334e-06,
"loss": 1.9756,
"step": 238000
},
{
"epoch": 17.45,
"learning_rate": 5.1111111111111115e-06,
"loss": 1.9756,
"step": 238500
},
{
"epoch": 17.49,
"learning_rate": 4.888888888888889e-06,
"loss": 1.9756,
"step": 239000
},
{
"epoch": 17.53,
"learning_rate": 4.666666666666667e-06,
"loss": 1.9758,
"step": 239500
},
{
"epoch": 17.56,
"learning_rate": 4.444444444444445e-06,
"loss": 1.9731,
"step": 240000
},
{
"epoch": 17.6,
"learning_rate": 4.222222222222223e-06,
"loss": 1.9742,
"step": 240500
},
{
"epoch": 17.63,
"learning_rate": 4.000000000000001e-06,
"loss": 1.975,
"step": 241000
},
{
"epoch": 17.67,
"learning_rate": 3.777777777777778e-06,
"loss": 1.9736,
"step": 241500
},
{
"epoch": 17.71,
"learning_rate": 3.555555555555556e-06,
"loss": 1.976,
"step": 242000
},
{
"epoch": 17.74,
"learning_rate": 3.3333333333333333e-06,
"loss": 1.9723,
"step": 242500
},
{
"epoch": 17.78,
"learning_rate": 3.111111111111111e-06,
"loss": 1.9755,
"step": 243000
},
{
"epoch": 17.82,
"learning_rate": 2.888888888888889e-06,
"loss": 1.9729,
"step": 243500
},
{
"epoch": 17.85,
"learning_rate": 2.666666666666667e-06,
"loss": 1.9718,
"step": 244000
},
{
"epoch": 17.89,
"learning_rate": 2.4444444444444447e-06,
"loss": 1.9732,
"step": 244500
},
{
"epoch": 17.93,
"learning_rate": 2.2222222222222225e-06,
"loss": 1.971,
"step": 245000
},
{
"epoch": 17.96,
"learning_rate": 2.0000000000000003e-06,
"loss": 1.9731,
"step": 245500
},
{
"epoch": 18.0,
"learning_rate": 1.777777777777778e-06,
"loss": 1.9734,
"step": 246000
},
{
"epoch": 18.04,
"learning_rate": 1.5555555555555556e-06,
"loss": 1.9711,
"step": 246500
},
{
"epoch": 18.07,
"learning_rate": 1.3333333333333334e-06,
"loss": 1.9728,
"step": 247000
},
{
"epoch": 18.11,
"learning_rate": 1.1111111111111112e-06,
"loss": 1.9699,
"step": 247500
},
{
"epoch": 18.15,
"learning_rate": 8.88888888888889e-07,
"loss": 1.9725,
"step": 248000
},
{
"epoch": 18.18,
"learning_rate": 6.666666666666667e-07,
"loss": 1.9697,
"step": 248500
},
{
"epoch": 18.22,
"learning_rate": 4.444444444444445e-07,
"loss": 1.9723,
"step": 249000
},
{
"epoch": 18.26,
"learning_rate": 2.2222222222222224e-07,
"loss": 1.971,
"step": 249500
},
{
"epoch": 18.29,
"learning_rate": 0.0,
"loss": 1.9694,
"step": 250000
},
{
"epoch": 18.29,
"step": 250000,
"total_flos": 1.4913640780926124e+19,
"train_loss": 2.2607063239746092,
"train_runtime": 1000597.3759,
"train_samples_per_second": 127.924,
"train_steps_per_second": 0.25
}
],
"max_steps": 250000,
"num_train_epochs": 19,
"total_flos": 1.4913640780926124e+19,
"trial_name": null,
"trial_params": null
}